openhands-tools 1.2.0__tar.gz → 1.7.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/PKG-INFO +2 -1
- openhands_tools-1.7.3/openhands/tools/apply_patch/__init__.py +4 -0
- openhands_tools-1.7.3/openhands/tools/apply_patch/core.py +479 -0
- openhands_tools-1.7.3/openhands/tools/apply_patch/definition.py +181 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/browser_use/__init__.py +8 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/browser_use/definition.py +162 -7
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/browser_use/impl.py +113 -54
- openhands_tools-1.7.3/openhands/tools/browser_use/impl_windows.py +67 -0
- openhands_tools-1.7.3/openhands/tools/browser_use/logging_fix.py +65 -0
- openhands_tools-1.7.3/openhands/tools/browser_use/server.py +190 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/delegate/__init__.py +2 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/delegate/definition.py +29 -26
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/delegate/impl.py +47 -23
- openhands_tools-1.7.3/openhands/tools/delegate/registration.py +135 -0
- openhands_tools-1.7.3/openhands/tools/delegate/templates/delegate_tool_description.j2 +26 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/file_editor/editor.py +7 -1
- openhands_tools-1.7.3/openhands/tools/gemini/__init__.py +92 -0
- openhands_tools-1.7.3/openhands/tools/gemini/edit/__init__.py +15 -0
- openhands_tools-1.7.3/openhands/tools/gemini/edit/definition.py +173 -0
- openhands_tools-1.7.3/openhands/tools/gemini/edit/impl.py +187 -0
- openhands_tools-1.7.3/openhands/tools/gemini/list_directory/__init__.py +17 -0
- openhands_tools-1.7.3/openhands/tools/gemini/list_directory/definition.py +183 -0
- openhands_tools-1.7.3/openhands/tools/gemini/list_directory/impl.py +165 -0
- openhands_tools-1.7.3/openhands/tools/gemini/read_file/__init__.py +15 -0
- openhands_tools-1.7.3/openhands/tools/gemini/read_file/definition.py +148 -0
- openhands_tools-1.7.3/openhands/tools/gemini/read_file/impl.py +153 -0
- openhands_tools-1.7.3/openhands/tools/gemini/write_file/__init__.py +15 -0
- openhands_tools-1.7.3/openhands/tools/gemini/write_file/definition.py +140 -0
- openhands_tools-1.7.3/openhands/tools/gemini/write_file/impl.py +96 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/preset/__init__.py +9 -1
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/preset/default.py +0 -3
- openhands_tools-1.7.3/openhands/tools/preset/gemini.py +104 -0
- openhands_tools-1.7.3/openhands/tools/preset/gpt5.py +75 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/preset/planning.py +0 -1
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/task_tracker/__init__.py +2 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/task_tracker/definition.py +4 -1
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/terminal/__init__.py +6 -7
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/terminal/definition.py +24 -11
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/terminal/impl.py +25 -15
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/terminal/terminal/interface.py +4 -4
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/terminal/terminal/subprocess_terminal.py +3 -13
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/terminal/terminal/terminal_session.py +17 -13
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/terminal/terminal/tmux_terminal.py +9 -9
- openhands_tools-1.7.3/openhands/tools/tom_consult/__init__.py +24 -0
- openhands_tools-1.7.3/openhands/tools/tom_consult/definition.py +253 -0
- openhands_tools-1.7.3/openhands/tools/tom_consult/executor.py +425 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands_tools.egg-info/PKG-INFO +2 -1
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands_tools.egg-info/SOURCES.txt +50 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands_tools.egg-info/requires.txt +1 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/pyproject.toml +6 -2
- openhands_tools-1.2.0/openhands/tools/browser_use/server.py +0 -100
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/__init__.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/delegate/visualizer.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/file_editor/__init__.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/file_editor/definition.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/file_editor/exceptions.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/file_editor/impl.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/file_editor/utils/__init__.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/file_editor/utils/config.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/file_editor/utils/constants.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/file_editor/utils/diff.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/file_editor/utils/encoding.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/file_editor/utils/file_cache.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/file_editor/utils/history.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/file_editor/utils/shell.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/glob/__init__.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/glob/definition.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/glob/impl.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/grep/__init__.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/grep/definition.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/grep/impl.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/planning_file_editor/__init__.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/planning_file_editor/definition.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/planning_file_editor/impl.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/py.typed +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/terminal/constants.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/terminal/metadata.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/terminal/terminal/__init__.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/terminal/terminal/factory.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/terminal/utils/command.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/utils/__init__.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands/tools/utils/timeout.py +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands_tools.egg-info/dependency_links.txt +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/openhands_tools.egg-info/top_level.txt +0 -0
- {openhands_tools-1.2.0 → openhands_tools-1.7.3}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: openhands-tools
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.7.3
|
|
4
4
|
Summary: OpenHands Tools - Runtime tools for AI agents
|
|
5
5
|
Requires-Python: >=3.12
|
|
6
6
|
Requires-Dist: openhands-sdk
|
|
@@ -11,3 +11,4 @@ Requires-Dist: libtmux>=0.46.2
|
|
|
11
11
|
Requires-Dist: pydantic>=2.11.7
|
|
12
12
|
Requires-Dist: browser-use>=0.8.0
|
|
13
13
|
Requires-Dist: func-timeout>=4.3.5
|
|
14
|
+
Requires-Dist: tom-swe>=1.0.3
|
|
@@ -0,0 +1,479 @@
|
|
|
1
|
+
"""Core logic for applying 'apply_patch' text format (OpenAI GPT-5.1 guide).
|
|
2
|
+
|
|
3
|
+
This module is an adaptation of the reference implementation from
|
|
4
|
+
https://github.com/openai/openai-cookbook/blob/main/examples/gpt-5/apply_patch.py
|
|
5
|
+
and provides pure functions and data models to parse and apply patches.
|
|
6
|
+
|
|
7
|
+
Minimal modifications were made to fit within the OpenHands SDK tool ecosystem:
|
|
8
|
+
- Types exposed here are used by the ApplyPatch tool executor
|
|
9
|
+
- File I/O is injected via callables so the executor can enforce workspace safety
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from __future__ import annotations
|
|
13
|
+
|
|
14
|
+
from collections.abc import Callable
|
|
15
|
+
from enum import Enum
|
|
16
|
+
|
|
17
|
+
from pydantic import BaseModel, Field
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ActionType(str, Enum):
|
|
21
|
+
ADD = "add"
|
|
22
|
+
DELETE = "delete"
|
|
23
|
+
UPDATE = "update"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class FileChange(BaseModel):
|
|
27
|
+
type: ActionType
|
|
28
|
+
old_content: str | None = None
|
|
29
|
+
new_content: str | None = None
|
|
30
|
+
move_path: str | None = None
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class Commit(BaseModel):
|
|
34
|
+
changes: dict[str, FileChange] = Field(default_factory=dict)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def assemble_changes(
|
|
38
|
+
orig: dict[str, str | None], dest: dict[str, str | None]
|
|
39
|
+
) -> Commit:
|
|
40
|
+
commit = Commit()
|
|
41
|
+
for path in sorted(set(orig.keys()).union(dest.keys())):
|
|
42
|
+
old_content = orig.get(path)
|
|
43
|
+
new_content = dest.get(path)
|
|
44
|
+
if old_content != new_content:
|
|
45
|
+
if old_content is not None and new_content is not None:
|
|
46
|
+
commit.changes[path] = FileChange(
|
|
47
|
+
type=ActionType.UPDATE,
|
|
48
|
+
old_content=old_content,
|
|
49
|
+
new_content=new_content,
|
|
50
|
+
)
|
|
51
|
+
elif new_content:
|
|
52
|
+
commit.changes[path] = FileChange(
|
|
53
|
+
type=ActionType.ADD,
|
|
54
|
+
new_content=new_content,
|
|
55
|
+
)
|
|
56
|
+
elif old_content:
|
|
57
|
+
commit.changes[path] = FileChange(
|
|
58
|
+
type=ActionType.DELETE,
|
|
59
|
+
old_content=old_content,
|
|
60
|
+
)
|
|
61
|
+
else:
|
|
62
|
+
assert False
|
|
63
|
+
return commit
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class Chunk(BaseModel):
|
|
67
|
+
orig_index: int = -1 # line index of the first line in the original file
|
|
68
|
+
del_lines: list[str] = Field(default_factory=list)
|
|
69
|
+
ins_lines: list[str] = Field(default_factory=list)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class PatchAction(BaseModel):
|
|
73
|
+
type: ActionType
|
|
74
|
+
new_file: str | None = None
|
|
75
|
+
chunks: list[Chunk] = Field(default_factory=list)
|
|
76
|
+
move_path: str | None = None
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class Patch(BaseModel):
|
|
80
|
+
actions: dict[str, PatchAction] = Field(default_factory=dict)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class Parser(BaseModel):
|
|
84
|
+
current_files: dict[str, str] = Field(default_factory=dict)
|
|
85
|
+
lines: list[str] = Field(default_factory=list)
|
|
86
|
+
index: int = 0
|
|
87
|
+
patch: Patch = Field(default_factory=Patch)
|
|
88
|
+
fuzz: int = 0
|
|
89
|
+
|
|
90
|
+
def is_done(self, prefixes: tuple[str, ...] | None = None) -> bool:
|
|
91
|
+
if self.index >= len(self.lines):
|
|
92
|
+
return True
|
|
93
|
+
if prefixes and self.lines[self.index].startswith(prefixes):
|
|
94
|
+
return True
|
|
95
|
+
return False
|
|
96
|
+
|
|
97
|
+
def startswith(self, prefix: str | tuple[str, ...]) -> bool:
|
|
98
|
+
assert self.index < len(self.lines), f"Index: {self.index} >= {len(self.lines)}"
|
|
99
|
+
if self.lines[self.index].startswith(prefix):
|
|
100
|
+
return True
|
|
101
|
+
return False
|
|
102
|
+
|
|
103
|
+
def read_str(self, prefix: str = "", return_everything: bool = False) -> str:
|
|
104
|
+
assert self.index < len(self.lines), f"Index: {self.index} >= {len(self.lines)}"
|
|
105
|
+
line = self.lines[self.index]
|
|
106
|
+
if line.startswith(prefix):
|
|
107
|
+
text = line if return_everything else line[len(prefix) :]
|
|
108
|
+
self.index += 1
|
|
109
|
+
return text
|
|
110
|
+
return ""
|
|
111
|
+
|
|
112
|
+
def parse(self):
|
|
113
|
+
while not self.is_done(("*** End Patch",)):
|
|
114
|
+
path = self.read_str("*** Update File: ")
|
|
115
|
+
if path:
|
|
116
|
+
if path in self.patch.actions:
|
|
117
|
+
raise DiffError(f"Update File Error: Duplicate Path: {path}")
|
|
118
|
+
move_to = self.read_str("*** Move to: ")
|
|
119
|
+
if path not in self.current_files:
|
|
120
|
+
raise DiffError(f"Update File Error: Missing File: {path}")
|
|
121
|
+
text = self.current_files[path]
|
|
122
|
+
action = self.parse_update_file(text)
|
|
123
|
+
# TODO: Check move_to is valid
|
|
124
|
+
action.move_path = move_to
|
|
125
|
+
self.patch.actions[path] = action
|
|
126
|
+
continue
|
|
127
|
+
path = self.read_str("*** Delete File: ")
|
|
128
|
+
if path:
|
|
129
|
+
if path in self.patch.actions:
|
|
130
|
+
raise DiffError(f"Delete File Error: Duplicate Path: {path}")
|
|
131
|
+
if path not in self.current_files:
|
|
132
|
+
raise DiffError(f"Delete File Error: Missing File: {path}")
|
|
133
|
+
self.patch.actions[path] = PatchAction(
|
|
134
|
+
type=ActionType.DELETE,
|
|
135
|
+
)
|
|
136
|
+
continue
|
|
137
|
+
path = self.read_str("*** Add File: ")
|
|
138
|
+
if path:
|
|
139
|
+
if path in self.patch.actions:
|
|
140
|
+
raise DiffError(f"Add File Error: Duplicate Path: {path}")
|
|
141
|
+
self.patch.actions[path] = self.parse_add_file()
|
|
142
|
+
continue
|
|
143
|
+
raise DiffError(f"Unknown Line: {self.lines[self.index]}")
|
|
144
|
+
if not self.startswith(("*** End Patch",)):
|
|
145
|
+
raise DiffError("Missing End Patch")
|
|
146
|
+
self.index += 1
|
|
147
|
+
|
|
148
|
+
def parse_update_file(self, text: str) -> PatchAction:
|
|
149
|
+
action = PatchAction(
|
|
150
|
+
type=ActionType.UPDATE,
|
|
151
|
+
)
|
|
152
|
+
lines = text.split("\n")
|
|
153
|
+
index = 0
|
|
154
|
+
while not self.is_done(
|
|
155
|
+
(
|
|
156
|
+
"*** End Patch",
|
|
157
|
+
"*** Update File:",
|
|
158
|
+
"*** Delete File:",
|
|
159
|
+
"*** Add File:",
|
|
160
|
+
"*** End of File",
|
|
161
|
+
)
|
|
162
|
+
):
|
|
163
|
+
def_str = self.read_str("@@ ")
|
|
164
|
+
section_str = ""
|
|
165
|
+
if not def_str:
|
|
166
|
+
if self.lines[self.index] == "@@":
|
|
167
|
+
section_str = self.lines[self.index]
|
|
168
|
+
self.index += 1
|
|
169
|
+
if not (def_str or section_str or index == 0):
|
|
170
|
+
raise DiffError(f"Invalid Line:\n{self.lines[self.index]}")
|
|
171
|
+
if def_str.strip():
|
|
172
|
+
found = False
|
|
173
|
+
if not [s for s in lines[:index] if s == def_str]:
|
|
174
|
+
for i, s in enumerate(lines[index:], index):
|
|
175
|
+
if s == def_str:
|
|
176
|
+
index = i + 1
|
|
177
|
+
found = True
|
|
178
|
+
break
|
|
179
|
+
if not found and not [
|
|
180
|
+
s for s in lines[:index] if s.strip() == def_str.strip()
|
|
181
|
+
]:
|
|
182
|
+
for i, s in enumerate(lines[index:], index):
|
|
183
|
+
if s.strip() == def_str.strip():
|
|
184
|
+
index = i + 1
|
|
185
|
+
self.fuzz += 1
|
|
186
|
+
found = True
|
|
187
|
+
break
|
|
188
|
+
next_chunk_context, chunks, end_patch_index, eof = peek_next_section(
|
|
189
|
+
self.lines, self.index
|
|
190
|
+
)
|
|
191
|
+
next_chunk_text = "\n".join(next_chunk_context)
|
|
192
|
+
new_index, fuzz = find_context(lines, next_chunk_context, index, eof)
|
|
193
|
+
if new_index == -1:
|
|
194
|
+
if eof:
|
|
195
|
+
raise DiffError(f"Invalid EOF Context {index}:\n{next_chunk_text}")
|
|
196
|
+
else:
|
|
197
|
+
raise DiffError(f"Invalid Context {index}:\n{next_chunk_text}")
|
|
198
|
+
self.fuzz += fuzz
|
|
199
|
+
for ch in chunks:
|
|
200
|
+
ch.orig_index += new_index
|
|
201
|
+
action.chunks.append(ch)
|
|
202
|
+
index = new_index + len(next_chunk_context)
|
|
203
|
+
self.index = end_patch_index
|
|
204
|
+
continue
|
|
205
|
+
return action
|
|
206
|
+
|
|
207
|
+
def parse_add_file(self) -> PatchAction:
|
|
208
|
+
lines = []
|
|
209
|
+
while not self.is_done(
|
|
210
|
+
("*** End Patch", "*** Update File:", "*** Delete File:", "*** Add File:")
|
|
211
|
+
):
|
|
212
|
+
s = self.read_str()
|
|
213
|
+
if not s.startswith("+"):
|
|
214
|
+
raise DiffError(f"Invalid Add File Line: {s}")
|
|
215
|
+
s = s[1:]
|
|
216
|
+
lines.append(s)
|
|
217
|
+
return PatchAction(
|
|
218
|
+
type=ActionType.ADD,
|
|
219
|
+
new_file="\n".join(lines),
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def find_context_core(
|
|
224
|
+
lines: list[str], context: list[str], start: int
|
|
225
|
+
) -> tuple[int, int]:
|
|
226
|
+
if not context:
|
|
227
|
+
return start, 0
|
|
228
|
+
|
|
229
|
+
for i in range(start, len(lines)):
|
|
230
|
+
if lines[i : i + len(context)] == context:
|
|
231
|
+
return i, 0
|
|
232
|
+
for i in range(start, len(lines)):
|
|
233
|
+
if [s.rstrip() for s in lines[i : i + len(context)]] == [
|
|
234
|
+
s.rstrip() for s in context
|
|
235
|
+
]:
|
|
236
|
+
return i, 1
|
|
237
|
+
for i in range(start, len(lines)):
|
|
238
|
+
if [s.strip() for s in lines[i : i + len(context)]] == [
|
|
239
|
+
s.strip() for s in context
|
|
240
|
+
]:
|
|
241
|
+
return i, 100
|
|
242
|
+
return -1, 0
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
def find_context(
|
|
246
|
+
lines: list[str], context: list[str], start: int, eof: bool
|
|
247
|
+
) -> tuple[int, int]:
|
|
248
|
+
if eof:
|
|
249
|
+
new_index, fuzz = find_context_core(lines, context, len(lines) - len(context))
|
|
250
|
+
if new_index != -1:
|
|
251
|
+
return new_index, fuzz
|
|
252
|
+
new_index, fuzz = find_context_core(lines, context, start)
|
|
253
|
+
return new_index, fuzz + 10000
|
|
254
|
+
return find_context_core(lines, context, start)
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
def peek_next_section(
|
|
258
|
+
lines: list[str], index: int
|
|
259
|
+
) -> tuple[list[str], list[Chunk], int, bool]:
|
|
260
|
+
old: list[str] = []
|
|
261
|
+
del_lines: list[str] = []
|
|
262
|
+
ins_lines: list[str] = []
|
|
263
|
+
chunks: list[Chunk] = []
|
|
264
|
+
mode = "keep"
|
|
265
|
+
orig_index = index
|
|
266
|
+
while index < len(lines):
|
|
267
|
+
s = lines[index]
|
|
268
|
+
if s.startswith(
|
|
269
|
+
(
|
|
270
|
+
"@@",
|
|
271
|
+
"*** End Patch",
|
|
272
|
+
"*** Update File:",
|
|
273
|
+
"*** Delete File:",
|
|
274
|
+
"*** Add File:",
|
|
275
|
+
"*** End of File",
|
|
276
|
+
)
|
|
277
|
+
):
|
|
278
|
+
break
|
|
279
|
+
if s == "***":
|
|
280
|
+
break
|
|
281
|
+
elif s.startswith("***"):
|
|
282
|
+
raise DiffError(f"Invalid Line: {s}")
|
|
283
|
+
index += 1
|
|
284
|
+
last_mode = mode
|
|
285
|
+
if s == "":
|
|
286
|
+
s = " "
|
|
287
|
+
if s[0] == "+":
|
|
288
|
+
mode = "add"
|
|
289
|
+
elif s[0] == "-":
|
|
290
|
+
mode = "delete"
|
|
291
|
+
elif s[0] == " ":
|
|
292
|
+
mode = "keep"
|
|
293
|
+
else:
|
|
294
|
+
raise DiffError(f"Invalid Line: {s}")
|
|
295
|
+
s = s[1:]
|
|
296
|
+
if mode == "keep" and last_mode != mode:
|
|
297
|
+
if ins_lines or del_lines:
|
|
298
|
+
chunks.append(
|
|
299
|
+
Chunk(
|
|
300
|
+
orig_index=len(old) - len(del_lines),
|
|
301
|
+
del_lines=del_lines,
|
|
302
|
+
ins_lines=ins_lines,
|
|
303
|
+
)
|
|
304
|
+
)
|
|
305
|
+
del_lines = []
|
|
306
|
+
ins_lines = []
|
|
307
|
+
if mode == "delete":
|
|
308
|
+
del_lines.append(s)
|
|
309
|
+
old.append(s)
|
|
310
|
+
elif mode == "add":
|
|
311
|
+
ins_lines.append(s)
|
|
312
|
+
elif mode == "keep":
|
|
313
|
+
old.append(s)
|
|
314
|
+
if ins_lines or del_lines:
|
|
315
|
+
chunks.append(
|
|
316
|
+
Chunk(
|
|
317
|
+
orig_index=len(old) - len(del_lines),
|
|
318
|
+
del_lines=del_lines,
|
|
319
|
+
ins_lines=ins_lines,
|
|
320
|
+
)
|
|
321
|
+
)
|
|
322
|
+
del_lines = []
|
|
323
|
+
ins_lines = []
|
|
324
|
+
if index < len(lines) and lines[index] == "*** End of File":
|
|
325
|
+
index += 1
|
|
326
|
+
return old, chunks, index, True
|
|
327
|
+
if index == orig_index:
|
|
328
|
+
raise DiffError(f"Nothing in this section - index={index} {lines[index]}")
|
|
329
|
+
return old, chunks, index, False
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
def text_to_patch(text: str, orig: dict[str, str]) -> tuple[Patch, int]:
|
|
333
|
+
lines = text.strip().split("\n")
|
|
334
|
+
if (
|
|
335
|
+
len(lines) < 2
|
|
336
|
+
or not lines[0].startswith("*** Begin Patch")
|
|
337
|
+
or lines[-1] != "*** End Patch"
|
|
338
|
+
):
|
|
339
|
+
raise DiffError("Invalid patch text")
|
|
340
|
+
|
|
341
|
+
parser = Parser(
|
|
342
|
+
current_files=orig,
|
|
343
|
+
lines=lines,
|
|
344
|
+
index=1,
|
|
345
|
+
)
|
|
346
|
+
parser.parse()
|
|
347
|
+
return parser.patch, parser.fuzz
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
def identify_files_needed(text: str) -> list[str]:
|
|
351
|
+
lines = text.strip().split("\n")
|
|
352
|
+
result = set()
|
|
353
|
+
for line in lines:
|
|
354
|
+
if line.startswith("*** Update File: "):
|
|
355
|
+
result.add(line[len("*** Update File: ") :])
|
|
356
|
+
if line.startswith("*** Delete File: "):
|
|
357
|
+
result.add(line[len("*** Delete File: ") :])
|
|
358
|
+
return list(result)
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
def _get_updated_file(text: str, action: PatchAction, path: str) -> str:
|
|
362
|
+
assert action.type == ActionType.UPDATE
|
|
363
|
+
orig_lines = text.split("\n")
|
|
364
|
+
dest_lines = []
|
|
365
|
+
orig_index = 0
|
|
366
|
+
dest_index = 0
|
|
367
|
+
for chunk in action.chunks:
|
|
368
|
+
if chunk.orig_index > len(orig_lines):
|
|
369
|
+
raise DiffError(
|
|
370
|
+
f"_get_updated_file: {path}: chunk.orig_index {chunk.orig_index} > "
|
|
371
|
+
f"len(lines) {len(orig_lines)}"
|
|
372
|
+
)
|
|
373
|
+
if orig_index > chunk.orig_index:
|
|
374
|
+
raise DiffError(
|
|
375
|
+
f"_get_updated_file: {path}: orig_index {orig_index} > "
|
|
376
|
+
f"chunk.orig_index {chunk.orig_index}"
|
|
377
|
+
)
|
|
378
|
+
assert orig_index <= chunk.orig_index
|
|
379
|
+
dest_lines.extend(orig_lines[orig_index : chunk.orig_index])
|
|
380
|
+
delta = chunk.orig_index - orig_index
|
|
381
|
+
orig_index += delta
|
|
382
|
+
dest_index += delta
|
|
383
|
+
if chunk.ins_lines:
|
|
384
|
+
for s in chunk.ins_lines:
|
|
385
|
+
dest_lines.append(s)
|
|
386
|
+
dest_index += len(chunk.ins_lines)
|
|
387
|
+
orig_index += len(chunk.del_lines)
|
|
388
|
+
dest_lines.extend(orig_lines[orig_index:])
|
|
389
|
+
delta = len(orig_lines) - orig_index
|
|
390
|
+
orig_index += delta
|
|
391
|
+
dest_index += delta
|
|
392
|
+
assert orig_index == len(orig_lines)
|
|
393
|
+
assert dest_index == len(dest_lines)
|
|
394
|
+
return "\n".join(dest_lines)
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
def patch_to_commit(patch: Patch, orig: dict[str, str]) -> Commit:
|
|
398
|
+
commit = Commit()
|
|
399
|
+
for path, action in patch.actions.items():
|
|
400
|
+
if action.type == ActionType.DELETE:
|
|
401
|
+
commit.changes[path] = FileChange(
|
|
402
|
+
type=ActionType.DELETE, old_content=orig[path]
|
|
403
|
+
)
|
|
404
|
+
elif action.type == ActionType.ADD:
|
|
405
|
+
commit.changes[path] = FileChange(
|
|
406
|
+
type=ActionType.ADD, new_content=action.new_file
|
|
407
|
+
)
|
|
408
|
+
elif action.type == ActionType.UPDATE:
|
|
409
|
+
new_content = _get_updated_file(text=orig[path], action=action, path=path)
|
|
410
|
+
commit.changes[path] = FileChange(
|
|
411
|
+
type=ActionType.UPDATE,
|
|
412
|
+
old_content=orig[path],
|
|
413
|
+
new_content=new_content,
|
|
414
|
+
move_path=action.move_path,
|
|
415
|
+
)
|
|
416
|
+
return commit
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
class DiffError(ValueError):
|
|
420
|
+
"""Raised for invalid or malformed patch text."""
|
|
421
|
+
|
|
422
|
+
|
|
423
|
+
def load_files(paths: list[str], open_fn: Callable[[str], str]) -> dict[str, str]:
|
|
424
|
+
"""Load original file contents used as the patch base.
|
|
425
|
+
|
|
426
|
+
This wraps the reference implementation's behavior from the OpenAI
|
|
427
|
+
cookbook apply_patch.py, but converts missing files into DiffError so
|
|
428
|
+
callers can surface a structured tool error instead of FileNotFoundError.
|
|
429
|
+
See:
|
|
430
|
+
https://github.com/openai/openai-cookbook/blob/main/examples/gpt-5/apply_patch.py
|
|
431
|
+
"""
|
|
432
|
+
orig: dict[str, str] = {}
|
|
433
|
+
for path in paths:
|
|
434
|
+
try:
|
|
435
|
+
orig[path] = open_fn(path)
|
|
436
|
+
except (
|
|
437
|
+
FileNotFoundError
|
|
438
|
+
) as exc: # pragma: no cover - exercised via higher-level tests
|
|
439
|
+
raise DiffError(f"Delete File Error: Missing File: {path}") from exc
|
|
440
|
+
return orig
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
def apply_commit(
|
|
444
|
+
commit: Commit,
|
|
445
|
+
write_fn: Callable[[str, str], None],
|
|
446
|
+
remove_fn: Callable[[str], None],
|
|
447
|
+
) -> None:
|
|
448
|
+
for path, change in commit.changes.items():
|
|
449
|
+
if change.type == ActionType.DELETE:
|
|
450
|
+
remove_fn(path)
|
|
451
|
+
elif change.type == ActionType.ADD:
|
|
452
|
+
assert change.new_content is not None
|
|
453
|
+
write_fn(path, change.new_content)
|
|
454
|
+
elif change.type == ActionType.UPDATE:
|
|
455
|
+
assert change.new_content is not None
|
|
456
|
+
if change.move_path:
|
|
457
|
+
write_fn(change.move_path, change.new_content)
|
|
458
|
+
remove_fn(path)
|
|
459
|
+
else:
|
|
460
|
+
write_fn(path, change.new_content)
|
|
461
|
+
|
|
462
|
+
|
|
463
|
+
def process_patch(
|
|
464
|
+
text: str,
|
|
465
|
+
open_fn: Callable[[str], str],
|
|
466
|
+
write_fn: Callable[[str, str], None],
|
|
467
|
+
remove_fn: Callable[[str], None],
|
|
468
|
+
) -> tuple[str, int, Commit]:
|
|
469
|
+
"""Process a patch string and apply it via provided I/O callables.
|
|
470
|
+
|
|
471
|
+
Returns (message, fuzz, commit)
|
|
472
|
+
"""
|
|
473
|
+
assert text.startswith("*** Begin Patch")
|
|
474
|
+
paths = identify_files_needed(text)
|
|
475
|
+
orig = load_files(paths, open_fn)
|
|
476
|
+
patch, fuzz = text_to_patch(text, orig)
|
|
477
|
+
commit = patch_to_commit(patch, orig)
|
|
478
|
+
apply_commit(commit, write_fn, remove_fn)
|
|
479
|
+
return "Done!", fuzz, commit
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
"""ApplyPatch ToolDefinition and executor integrating the cookbook implementation."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Sequence
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
from pydantic import Field
|
|
10
|
+
|
|
11
|
+
from openhands.sdk.tool import (
|
|
12
|
+
Action,
|
|
13
|
+
Observation,
|
|
14
|
+
ToolAnnotations,
|
|
15
|
+
ToolDefinition,
|
|
16
|
+
ToolExecutor,
|
|
17
|
+
register_tool,
|
|
18
|
+
)
|
|
19
|
+
from openhands.sdk.tool.tool import FunctionToolParam
|
|
20
|
+
|
|
21
|
+
from .core import Commit, DiffError, process_patch
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from openhands.sdk.conversation.state import ConversationState
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class ApplyPatchAction(Action):
|
|
29
|
+
"""Tool action schema specifying the patch to apply.
|
|
30
|
+
|
|
31
|
+
The patch must follow the exact text format described in the OpenAI
|
|
32
|
+
Cookbook's GPT-5.1 prompting guide. The executor parses this patch and
|
|
33
|
+
applies changes relative to the current workspace root.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
patch: str = Field(
|
|
37
|
+
description=(
|
|
38
|
+
"Patch content following the '*** Begin Patch' ... '*** End Patch' "
|
|
39
|
+
"format as described in OpenAI GPT-5.1 prompting guide."
|
|
40
|
+
),
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class ApplyPatchObservation(Observation):
|
|
45
|
+
"""Result of applying a patch.
|
|
46
|
+
|
|
47
|
+
- message: human-readable summary of the changes or error
|
|
48
|
+
- fuzz: number of lines of fuzz used when applying hunks (0 means exact)
|
|
49
|
+
- commit: structured summary of the applied operations
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
message: str = ""
|
|
53
|
+
fuzz: int = 0
|
|
54
|
+
commit: Commit | None = None
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class ApplyPatchExecutor(ToolExecutor[ApplyPatchAction, ApplyPatchObservation]):
|
|
58
|
+
"""Executor that applies unified text patches within the workspace.
|
|
59
|
+
|
|
60
|
+
Uses the pure functions in core.py for parsing and applying patches. All
|
|
61
|
+
filesystem access is constrained to the agent's workspace_root.
|
|
62
|
+
"""
|
|
63
|
+
|
|
64
|
+
def __init__(self, workspace_root: str):
|
|
65
|
+
"""Initialize executor with a workspace root.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
workspace_root: Base directory relative to which all patch paths are
|
|
69
|
+
resolved. Absolute or path-escaping references are rejected.
|
|
70
|
+
"""
|
|
71
|
+
self.workspace_root = Path(workspace_root).resolve()
|
|
72
|
+
|
|
73
|
+
def _resolve_path(self, p: str) -> Path:
|
|
74
|
+
"""Resolve a file path into the workspace, disallowing escapes."""
|
|
75
|
+
pth = (
|
|
76
|
+
(self.workspace_root / p).resolve()
|
|
77
|
+
if not p.startswith("/")
|
|
78
|
+
else Path(p).resolve()
|
|
79
|
+
)
|
|
80
|
+
if not str(pth).startswith(str(self.workspace_root)):
|
|
81
|
+
raise DiffError("Absolute or escaping paths are not allowed")
|
|
82
|
+
return pth
|
|
83
|
+
|
|
84
|
+
def __call__(
|
|
85
|
+
self,
|
|
86
|
+
action: ApplyPatchAction,
|
|
87
|
+
conversation=None, # noqa: ARG002 - signature match
|
|
88
|
+
) -> ApplyPatchObservation:
|
|
89
|
+
"""Execute the patch application and return an observation."""
|
|
90
|
+
|
|
91
|
+
def open_file(path: str) -> str:
|
|
92
|
+
fp = self._resolve_path(path)
|
|
93
|
+
with open(fp, encoding="utf-8") as f:
|
|
94
|
+
return f.read()
|
|
95
|
+
|
|
96
|
+
def write_file(path: str, content: str) -> None:
|
|
97
|
+
fp = self._resolve_path(path)
|
|
98
|
+
fp.parent.mkdir(parents=True, exist_ok=True)
|
|
99
|
+
with open(fp, "w", encoding="utf-8") as f:
|
|
100
|
+
f.write(content)
|
|
101
|
+
|
|
102
|
+
def remove_file(path: str) -> None:
|
|
103
|
+
fp = self._resolve_path(path)
|
|
104
|
+
fp.unlink(missing_ok=False)
|
|
105
|
+
|
|
106
|
+
try:
|
|
107
|
+
msg, fuzz, commit = process_patch(
|
|
108
|
+
action.patch, open_file, write_file, remove_file
|
|
109
|
+
)
|
|
110
|
+
# Include a human-readable summary in content so Responses API sees
|
|
111
|
+
# a function_call_output payload paired with the function_call.
|
|
112
|
+
obs = ApplyPatchObservation(message=msg, fuzz=fuzz, commit=commit)
|
|
113
|
+
if msg:
|
|
114
|
+
# Use Observation.from_text to populate content field correctly
|
|
115
|
+
obs = ApplyPatchObservation.from_text(
|
|
116
|
+
text=msg, message=msg, fuzz=fuzz, commit=commit, is_error=False
|
|
117
|
+
)
|
|
118
|
+
return obs
|
|
119
|
+
except DiffError as e:
|
|
120
|
+
return ApplyPatchObservation.from_text(text=str(e), is_error=True)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
_DESCRIPTION = (
|
|
124
|
+
"Apply unified text patches to files in the workspace. "
|
|
125
|
+
"Input must start with '*** Begin Patch' and end with '*** End Patch'."
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
class ApplyPatchTool(ToolDefinition[ApplyPatchAction, ApplyPatchObservation]):
|
|
130
|
+
"""ToolDefinition for applying unified text patches.
|
|
131
|
+
|
|
132
|
+
Creates an ApplyPatchExecutor bound to the current workspace and supplies a
|
|
133
|
+
concise description. The Responses tool schema is minimized to rely on
|
|
134
|
+
provider-known behavior for GPT-5.1 models.
|
|
135
|
+
"""
|
|
136
|
+
|
|
137
|
+
@classmethod
|
|
138
|
+
def create(cls, conv_state: ConversationState) -> Sequence[ApplyPatchTool]:
|
|
139
|
+
"""Initialize the tool for the active conversation state."""
|
|
140
|
+
executor = ApplyPatchExecutor(workspace_root=conv_state.workspace.working_dir)
|
|
141
|
+
return [
|
|
142
|
+
cls(
|
|
143
|
+
description=_DESCRIPTION,
|
|
144
|
+
action_type=ApplyPatchAction,
|
|
145
|
+
observation_type=ApplyPatchObservation,
|
|
146
|
+
annotations=ToolAnnotations(
|
|
147
|
+
title="apply_patch",
|
|
148
|
+
readOnlyHint=False,
|
|
149
|
+
destructiveHint=True,
|
|
150
|
+
idempotentHint=False,
|
|
151
|
+
openWorldHint=False,
|
|
152
|
+
),
|
|
153
|
+
executor=executor,
|
|
154
|
+
)
|
|
155
|
+
]
|
|
156
|
+
|
|
157
|
+
# For OpenAI Responses API with GPT-5.1 models, the tool is server-known.
|
|
158
|
+
# Return a minimal function spec so the provider wires its own definition.
|
|
159
|
+
def to_responses_tool(
|
|
160
|
+
self,
|
|
161
|
+
add_security_risk_prediction: bool = False, # noqa: ARG002 - signature match
|
|
162
|
+
action_type: type | None = None, # noqa: ARG002 - signature match
|
|
163
|
+
) -> FunctionToolParam: # type: ignore[override]
|
|
164
|
+
"""Serialize to OpenAI Responses function tool spec.
|
|
165
|
+
|
|
166
|
+
GPT-5.1 tools are known server-side. We return a minimal schema to ensure
|
|
167
|
+
the model includes the canonical 'patch' argument when calling this tool.
|
|
168
|
+
"""
|
|
169
|
+
return {
|
|
170
|
+
"type": "function",
|
|
171
|
+
"name": self.name,
|
|
172
|
+
"parameters": {
|
|
173
|
+
"type": "object",
|
|
174
|
+
"properties": {"patch": {"type": "string"}},
|
|
175
|
+
"required": ["patch"],
|
|
176
|
+
},
|
|
177
|
+
"strict": False,
|
|
178
|
+
} # type: ignore[return-value]
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
register_tool(ApplyPatchTool.name, ApplyPatchTool)
|