wcgw 2.6.3__py3-none-any.whl → 2.7.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of wcgw might be problematic. Click here for more details.
- wcgw/client/anthropic_client.py +59 -51
- wcgw/client/diff-instructions.txt +0 -1
- wcgw/client/file_ops/diff_edit.py +482 -0
- wcgw/client/file_ops/search_replace.py +119 -0
- wcgw/client/mcp_server/server.py +45 -2
- wcgw/client/memory.py +78 -0
- wcgw/client/openai_client.py +38 -18
- wcgw/client/tools.py +83 -169
- wcgw/relay/serve.py +41 -12
- wcgw/types_.py +8 -0
- {wcgw-2.6.3.dist-info → wcgw-2.7.1.dist-info}/METADATA +14 -3
- {wcgw-2.6.3.dist-info → wcgw-2.7.1.dist-info}/RECORD +15 -12
- {wcgw-2.6.3.dist-info → wcgw-2.7.1.dist-info}/WHEEL +0 -0
- {wcgw-2.6.3.dist-info → wcgw-2.7.1.dist-info}/entry_points.txt +0 -0
- {wcgw-2.6.3.dist-info → wcgw-2.7.1.dist-info}/licenses/LICENSE +0 -0
wcgw/client/tools.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import base64
|
|
2
2
|
import datetime
|
|
3
|
+
import glob
|
|
3
4
|
import importlib.metadata
|
|
4
5
|
import json
|
|
5
6
|
import mimetypes
|
|
@@ -9,7 +10,7 @@ import shlex
|
|
|
9
10
|
import time
|
|
10
11
|
import traceback
|
|
11
12
|
import uuid
|
|
12
|
-
from
|
|
13
|
+
from os.path import expanduser
|
|
13
14
|
from pathlib import Path
|
|
14
15
|
from tempfile import NamedTemporaryFile, TemporaryDirectory
|
|
15
16
|
from typing import (
|
|
@@ -38,6 +39,7 @@ from websockets.sync.client import connect as syncconnect
|
|
|
38
39
|
from ..types_ import (
|
|
39
40
|
BashCommand,
|
|
40
41
|
BashInteraction,
|
|
42
|
+
ContextSave,
|
|
41
43
|
FileEdit,
|
|
42
44
|
FileEditFindReplace,
|
|
43
45
|
GetScreenInfo,
|
|
@@ -51,6 +53,8 @@ from ..types_ import (
|
|
|
51
53
|
WriteIfEmpty,
|
|
52
54
|
)
|
|
53
55
|
from .computer_use import run_computer_tool
|
|
56
|
+
from .file_ops.search_replace import search_replace_edit
|
|
57
|
+
from .memory import load_memory, save_memory
|
|
54
58
|
from .repo_ops.repo_context import get_repo_context
|
|
55
59
|
from .sys_utils import command_run
|
|
56
60
|
|
|
@@ -290,12 +294,34 @@ BASH_STATE = BashState()
|
|
|
290
294
|
|
|
291
295
|
|
|
292
296
|
def initialize(
|
|
293
|
-
any_workspace_path: str,
|
|
297
|
+
any_workspace_path: str,
|
|
298
|
+
read_files_: list[str],
|
|
299
|
+
task_id_to_resume: str,
|
|
300
|
+
max_tokens: Optional[int],
|
|
294
301
|
) -> str:
|
|
295
302
|
reset_shell()
|
|
296
303
|
|
|
304
|
+
# Expand the workspace path
|
|
305
|
+
any_workspace_path = expand_user(any_workspace_path, BASH_STATE.is_in_docker)
|
|
297
306
|
repo_context = ""
|
|
298
307
|
|
|
308
|
+
memory = ""
|
|
309
|
+
if task_id_to_resume:
|
|
310
|
+
try:
|
|
311
|
+
project_root_path, task_mem = load_memory(
|
|
312
|
+
task_id_to_resume,
|
|
313
|
+
max_tokens,
|
|
314
|
+
lambda x: default_enc.encode(x).ids,
|
|
315
|
+
lambda x: default_enc.decode(x),
|
|
316
|
+
)
|
|
317
|
+
memory = "Following is the retrieved task:\n" + task_mem
|
|
318
|
+
if (
|
|
319
|
+
not any_workspace_path or not os.path.exists(any_workspace_path)
|
|
320
|
+
) and os.path.exists(project_root_path):
|
|
321
|
+
any_workspace_path = project_root_path
|
|
322
|
+
except Exception:
|
|
323
|
+
memory = f'Error: Unable to load task with ID "{task_id_to_resume}" '
|
|
324
|
+
|
|
299
325
|
if any_workspace_path:
|
|
300
326
|
if os.path.exists(any_workspace_path):
|
|
301
327
|
repo_context, folder_to_start = get_repo_context(any_workspace_path, 200)
|
|
@@ -306,7 +332,9 @@ def initialize(
|
|
|
306
332
|
|
|
307
333
|
repo_context = f"---\n# Workspace structure\n{repo_context}\n---\n"
|
|
308
334
|
else:
|
|
309
|
-
|
|
335
|
+
repo_context = (
|
|
336
|
+
f"\nInfo: Workspace path {any_workspace_path} does not exist\n"
|
|
337
|
+
)
|
|
310
338
|
|
|
311
339
|
initial_files_context = ""
|
|
312
340
|
if read_files_:
|
|
@@ -325,6 +353,10 @@ Current working directory: {BASH_STATE.cwd}
|
|
|
325
353
|
{repo_context}
|
|
326
354
|
|
|
327
355
|
{initial_files_context}
|
|
356
|
+
|
|
357
|
+
---
|
|
358
|
+
|
|
359
|
+
{memory}
|
|
328
360
|
"""
|
|
329
361
|
|
|
330
362
|
return output
|
|
@@ -396,6 +428,12 @@ def rstrip(lines: list[str]) -> str:
|
|
|
396
428
|
return "\n".join([line.rstrip() for line in lines])
|
|
397
429
|
|
|
398
430
|
|
|
431
|
+
def expand_user(path: str, docker_id: Optional[str]) -> str:
|
|
432
|
+
if not path or not path.startswith("~") or docker_id:
|
|
433
|
+
return path
|
|
434
|
+
return expanduser(path)
|
|
435
|
+
|
|
436
|
+
|
|
399
437
|
def _incremental_text(text: str, last_pending_output: str) -> str:
|
|
400
438
|
# text = render_terminal_output(text[-100_000:])
|
|
401
439
|
text = text[-100_000:]
|
|
@@ -673,6 +711,9 @@ def truncate_if_over(content: str, max_tokens: Optional[int]) -> str:
|
|
|
673
711
|
|
|
674
712
|
|
|
675
713
|
def read_image_from_shell(file_path: str) -> ImageData:
|
|
714
|
+
# Expand the path
|
|
715
|
+
file_path = expand_user(file_path, BASH_STATE.is_in_docker)
|
|
716
|
+
|
|
676
717
|
if not os.path.isabs(file_path):
|
|
677
718
|
file_path = os.path.join(BASH_STATE.cwd, file_path)
|
|
678
719
|
|
|
@@ -722,7 +763,7 @@ def write_file(
|
|
|
722
763
|
if not os.path.isabs(writefile.file_path):
|
|
723
764
|
return f"Failure: file_path should be absolute path, current working directory is {BASH_STATE.cwd}"
|
|
724
765
|
else:
|
|
725
|
-
path_ = writefile.file_path
|
|
766
|
+
path_ = expand_user(writefile.file_path, BASH_STATE.is_in_docker)
|
|
726
767
|
|
|
727
768
|
error_on_exist_ = error_on_exist and path_ not in BASH_STATE.whitelist_for_overwrite
|
|
728
769
|
add_overwrite_warning = ""
|
|
@@ -828,126 +869,6 @@ Syntax errors:
|
|
|
828
869
|
return "Success" + "".join(warnings)
|
|
829
870
|
|
|
830
871
|
|
|
831
|
-
def find_least_edit_distance_substring(
|
|
832
|
-
orig_content_lines: list[str], find_lines: list[str]
|
|
833
|
-
) -> tuple[list[str], str]:
|
|
834
|
-
# Prepare content lines, stripping whitespace and keeping track of original indices
|
|
835
|
-
content_lines = [line.strip() for line in orig_content_lines]
|
|
836
|
-
new_to_original_indices = {}
|
|
837
|
-
new_content_lines = []
|
|
838
|
-
for i, line in enumerate(content_lines):
|
|
839
|
-
if not line:
|
|
840
|
-
continue
|
|
841
|
-
new_content_lines.append(line)
|
|
842
|
-
new_to_original_indices[len(new_content_lines) - 1] = i
|
|
843
|
-
content_lines = new_content_lines
|
|
844
|
-
|
|
845
|
-
# Prepare find lines, removing empty lines
|
|
846
|
-
find_lines = [line.strip() for line in find_lines if line.strip()]
|
|
847
|
-
|
|
848
|
-
# Initialize variables for best match tracking
|
|
849
|
-
max_similarity = 0.0
|
|
850
|
-
min_edit_distance_lines = []
|
|
851
|
-
context_lines = []
|
|
852
|
-
|
|
853
|
-
# For each possible starting position in content
|
|
854
|
-
for i in range(max(1, len(content_lines) - len(find_lines) + 1)):
|
|
855
|
-
# Calculate similarity for the block starting at position i
|
|
856
|
-
block_similarity = 0.0
|
|
857
|
-
for j in range(len(find_lines)):
|
|
858
|
-
if (i + j) < len(content_lines):
|
|
859
|
-
# Use SequenceMatcher for more efficient similarity calculation
|
|
860
|
-
similarity = SequenceMatcher(
|
|
861
|
-
None, content_lines[i + j], find_lines[j]
|
|
862
|
-
).ratio()
|
|
863
|
-
block_similarity += similarity
|
|
864
|
-
|
|
865
|
-
# If this block is more similar than previous best
|
|
866
|
-
if block_similarity > max_similarity:
|
|
867
|
-
max_similarity = block_similarity
|
|
868
|
-
# Map back to original line indices
|
|
869
|
-
orig_start_index = new_to_original_indices[i]
|
|
870
|
-
orig_end_index = (
|
|
871
|
-
new_to_original_indices.get(
|
|
872
|
-
i + len(find_lines) - 1, len(orig_content_lines) - 1
|
|
873
|
-
)
|
|
874
|
-
+ 1
|
|
875
|
-
)
|
|
876
|
-
# Get the original lines
|
|
877
|
-
min_edit_distance_lines = orig_content_lines[
|
|
878
|
-
orig_start_index:orig_end_index
|
|
879
|
-
]
|
|
880
|
-
# Get context (10 lines before and after)
|
|
881
|
-
context_lines = orig_content_lines[
|
|
882
|
-
max(0, orig_start_index - 10) : (orig_end_index + 10)
|
|
883
|
-
]
|
|
884
|
-
|
|
885
|
-
return (
|
|
886
|
-
min_edit_distance_lines,
|
|
887
|
-
"\n".join(context_lines),
|
|
888
|
-
)
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
def lines_replacer(
|
|
892
|
-
orig_content_lines: list[str], search_lines: list[str], replace_lines: list[str]
|
|
893
|
-
) -> str:
|
|
894
|
-
# Validation for empty search
|
|
895
|
-
search_lines = list(filter(None, [x.strip() for x in search_lines]))
|
|
896
|
-
|
|
897
|
-
# Create mapping of non-empty lines to original indices
|
|
898
|
-
new_to_original_indices = []
|
|
899
|
-
new_content_lines = []
|
|
900
|
-
for i, line in enumerate(orig_content_lines):
|
|
901
|
-
stripped = line.strip()
|
|
902
|
-
if not stripped:
|
|
903
|
-
continue
|
|
904
|
-
new_content_lines.append(stripped)
|
|
905
|
-
new_to_original_indices.append(i)
|
|
906
|
-
|
|
907
|
-
if not new_content_lines and not search_lines:
|
|
908
|
-
return "\n".join(replace_lines)
|
|
909
|
-
elif not search_lines:
|
|
910
|
-
raise ValueError("Search block is empty")
|
|
911
|
-
elif not new_content_lines:
|
|
912
|
-
raise ValueError("File content is empty")
|
|
913
|
-
|
|
914
|
-
# Search for matching block
|
|
915
|
-
for i in range(len(new_content_lines) - len(search_lines) + 1):
|
|
916
|
-
if all(
|
|
917
|
-
new_content_lines[i + j] == search_lines[j]
|
|
918
|
-
for j in range(len(search_lines))
|
|
919
|
-
):
|
|
920
|
-
start_idx = new_to_original_indices[i]
|
|
921
|
-
end_idx = new_to_original_indices[i + len(search_lines) - 1] + 1
|
|
922
|
-
return "\n".join(
|
|
923
|
-
orig_content_lines[:start_idx]
|
|
924
|
-
+ replace_lines
|
|
925
|
-
+ orig_content_lines[end_idx:]
|
|
926
|
-
)
|
|
927
|
-
|
|
928
|
-
raise ValueError("Search block not found in content")
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
def edit_content(content: str, find_lines: str, replace_with_lines: str) -> str:
|
|
932
|
-
replace_with_lines_ = replace_with_lines.split("\n")
|
|
933
|
-
find_lines_ = find_lines.split("\n")
|
|
934
|
-
content_lines_ = content.split("\n")
|
|
935
|
-
try:
|
|
936
|
-
return lines_replacer(content_lines_, find_lines_, replace_with_lines_)
|
|
937
|
-
except ValueError:
|
|
938
|
-
pass
|
|
939
|
-
|
|
940
|
-
_, context_lines = find_least_edit_distance_substring(content_lines_, find_lines_)
|
|
941
|
-
|
|
942
|
-
raise Exception(
|
|
943
|
-
f"""Error: no match found for the provided search block.
|
|
944
|
-
Requested search block: \n```\n{find_lines}\n```
|
|
945
|
-
Possible relevant section in the file:\n---\n```\n{context_lines}\n```\n---\nFile not edited
|
|
946
|
-
\nPlease retry with exact search. Re-read the file if unsure.
|
|
947
|
-
"""
|
|
948
|
-
)
|
|
949
|
-
|
|
950
|
-
|
|
951
872
|
def do_diff_edit(fedit: FileEdit, max_tokens: Optional[int]) -> str:
|
|
952
873
|
try:
|
|
953
874
|
return _do_diff_edit(fedit, max_tokens)
|
|
@@ -974,7 +895,7 @@ def _do_diff_edit(fedit: FileEdit, max_tokens: Optional[int]) -> str:
|
|
|
974
895
|
f"Failure: file_path should be absolute path, current working directory is {BASH_STATE.cwd}"
|
|
975
896
|
)
|
|
976
897
|
else:
|
|
977
|
-
path_ = fedit.file_path
|
|
898
|
+
path_ = expand_user(fedit.file_path, BASH_STATE.is_in_docker)
|
|
978
899
|
|
|
979
900
|
# The LLM is now aware that the file exists
|
|
980
901
|
BASH_STATE.add_to_whitelist_for_overwrite(path_)
|
|
@@ -1002,46 +923,7 @@ def _do_diff_edit(fedit: FileEdit, max_tokens: Optional[int]) -> str:
|
|
|
1002
923
|
)
|
|
1003
924
|
lines = fedit.file_edit_using_search_replace_blocks.split("\n")
|
|
1004
925
|
|
|
1005
|
-
|
|
1006
|
-
raise Exception(
|
|
1007
|
-
"Error: first line should be `<<<<<< SEARCH` to start a search-replace block"
|
|
1008
|
-
)
|
|
1009
|
-
|
|
1010
|
-
n_lines = len(lines)
|
|
1011
|
-
i = 0
|
|
1012
|
-
replacement_count = 0
|
|
1013
|
-
while i < n_lines:
|
|
1014
|
-
if re.match(r"^<<<<<<+\s*SEARCH\s*$", lines[i]):
|
|
1015
|
-
search_block = []
|
|
1016
|
-
i += 1
|
|
1017
|
-
while i < n_lines and not re.match(r"^======*\s*$", lines[i]):
|
|
1018
|
-
search_block.append(lines[i])
|
|
1019
|
-
i += 1
|
|
1020
|
-
i += 1
|
|
1021
|
-
replace_block = []
|
|
1022
|
-
while i < n_lines and not re.match(r"^>>>>>>+\s*REPLACE\s*$", lines[i]):
|
|
1023
|
-
replace_block.append(lines[i])
|
|
1024
|
-
i += 1
|
|
1025
|
-
i += 1
|
|
1026
|
-
|
|
1027
|
-
for line in search_block:
|
|
1028
|
-
console.log("> " + line)
|
|
1029
|
-
console.log("=======")
|
|
1030
|
-
for line in replace_block:
|
|
1031
|
-
console.log("< " + line)
|
|
1032
|
-
console.log("\n\n\n\n")
|
|
1033
|
-
search_block_ = "\n".join(search_block)
|
|
1034
|
-
replace_block_ = "\n".join(replace_block)
|
|
1035
|
-
|
|
1036
|
-
apply_diff_to = edit_content(apply_diff_to, search_block_, replace_block_)
|
|
1037
|
-
replacement_count += 1
|
|
1038
|
-
else:
|
|
1039
|
-
i += 1
|
|
1040
|
-
|
|
1041
|
-
if replacement_count == 0:
|
|
1042
|
-
raise Exception(
|
|
1043
|
-
"Error: no valid search-replace blocks found, please check your syntax for FileEdit"
|
|
1044
|
-
)
|
|
926
|
+
apply_diff_to, comments = search_replace_edit(lines, apply_diff_to, console.log)
|
|
1045
927
|
|
|
1046
928
|
if not BASH_STATE.is_in_docker:
|
|
1047
929
|
with open(path_, "w") as f:
|
|
@@ -1070,9 +952,9 @@ def _do_diff_edit(fedit: FileEdit, max_tokens: Optional[int]) -> str:
|
|
|
1070
952
|
)
|
|
1071
953
|
|
|
1072
954
|
console.print(f"W: Syntax errors encountered: {syntax_errors}")
|
|
1073
|
-
return f"""
|
|
955
|
+
return f"""{comments}
|
|
1074
956
|
---
|
|
1075
|
-
|
|
957
|
+
Tree-sitter reported syntax errors, please re-read the file and fix if there are any errors.
|
|
1076
958
|
Syntax errors:
|
|
1077
959
|
{syntax_errors}
|
|
1078
960
|
|
|
@@ -1081,7 +963,7 @@ Syntax errors:
|
|
|
1081
963
|
except Exception:
|
|
1082
964
|
pass
|
|
1083
965
|
|
|
1084
|
-
return
|
|
966
|
+
return comments
|
|
1085
967
|
|
|
1086
968
|
|
|
1087
969
|
class DoneFlag(BaseModel):
|
|
@@ -1123,6 +1005,7 @@ TOOLS = (
|
|
|
1123
1005
|
| Keyboard
|
|
1124
1006
|
| ScreenShot
|
|
1125
1007
|
| GetScreenInfo
|
|
1008
|
+
| ContextSave
|
|
1126
1009
|
)
|
|
1127
1010
|
|
|
1128
1011
|
|
|
@@ -1164,6 +1047,8 @@ def which_tool_name(name: str) -> Type[TOOLS]:
|
|
|
1164
1047
|
return ScreenShot
|
|
1165
1048
|
elif name == "GetScreenInfo":
|
|
1166
1049
|
return GetScreenInfo
|
|
1050
|
+
elif name == "ContextSave":
|
|
1051
|
+
return ContextSave
|
|
1167
1052
|
else:
|
|
1168
1053
|
raise ValueError(f"Unknown tool name: {name}")
|
|
1169
1054
|
|
|
@@ -1216,7 +1101,12 @@ def get_tool_output(
|
|
|
1216
1101
|
elif isinstance(arg, Initialize):
|
|
1217
1102
|
console.print("Calling initial info tool")
|
|
1218
1103
|
output = (
|
|
1219
|
-
initialize(
|
|
1104
|
+
initialize(
|
|
1105
|
+
arg.any_workspace_path,
|
|
1106
|
+
arg.initial_files_to_read,
|
|
1107
|
+
arg.task_id_to_resume,
|
|
1108
|
+
max_tokens,
|
|
1109
|
+
),
|
|
1220
1110
|
0.0,
|
|
1221
1111
|
)
|
|
1222
1112
|
elif isinstance(arg, (Mouse, Keyboard, ScreenShot, GetScreenInfo)):
|
|
@@ -1254,6 +1144,26 @@ def get_tool_output(
|
|
|
1254
1144
|
)
|
|
1255
1145
|
BASH_STATE.set_in_docker(arg.docker_image_id)
|
|
1256
1146
|
return outputs, outputs_cost[1]
|
|
1147
|
+
elif isinstance(arg, ContextSave):
|
|
1148
|
+
console.print("Calling task memory tool")
|
|
1149
|
+
assert not BASH_STATE.is_in_docker, "KT not supported in docker"
|
|
1150
|
+
relevant_files = []
|
|
1151
|
+
warnings = ""
|
|
1152
|
+
for fglob in arg.relevant_file_globs:
|
|
1153
|
+
fglob = expand_user(fglob, None)
|
|
1154
|
+
if not os.path.isabs(fglob) and arg.project_root_path:
|
|
1155
|
+
fglob = os.path.join(arg.project_root_path, fglob)
|
|
1156
|
+
globs = glob.glob(fglob)
|
|
1157
|
+
relevant_files.extend(globs[:1000])
|
|
1158
|
+
if not globs:
|
|
1159
|
+
warnings += f"Warning: No files found for the glob: {fglob}\n"
|
|
1160
|
+
relevant_files_data = read_files(relevant_files[:10_000], None)
|
|
1161
|
+
output_ = save_memory(arg, relevant_files_data)
|
|
1162
|
+
if not relevant_files and arg.relevant_file_globs:
|
|
1163
|
+
output_ = f'Error: No files found for the given globs. Context file successfully saved at "{output_}", but please fix the error.'
|
|
1164
|
+
elif warnings:
|
|
1165
|
+
output_ = warnings + "\nContext file successfully saved at " + output_
|
|
1166
|
+
output = output_, 0.0
|
|
1257
1167
|
else:
|
|
1258
1168
|
raise ValueError(f"Unknown tool: {arg}")
|
|
1259
1169
|
if isinstance(output[0], str):
|
|
@@ -1282,6 +1192,7 @@ class Mdata(BaseModel):
|
|
|
1282
1192
|
| str
|
|
1283
1193
|
| ReadFiles
|
|
1284
1194
|
| Initialize
|
|
1195
|
+
| ContextSave
|
|
1285
1196
|
)
|
|
1286
1197
|
|
|
1287
1198
|
|
|
@@ -1381,6 +1292,9 @@ def read_files(file_paths: list[str], max_tokens: Optional[int]) -> str:
|
|
|
1381
1292
|
def read_file(file_path: str, max_tokens: Optional[int]) -> tuple[str, bool, int]:
|
|
1382
1293
|
console.print(f"Reading file: {file_path}")
|
|
1383
1294
|
|
|
1295
|
+
# Expand the path before checking if it's absolute
|
|
1296
|
+
file_path = expand_user(file_path, BASH_STATE.is_in_docker)
|
|
1297
|
+
|
|
1384
1298
|
if not os.path.isabs(file_path):
|
|
1385
1299
|
raise ValueError(
|
|
1386
1300
|
f"Failure: file_path should be absolute path, current working directory is {BASH_STATE.cwd}"
|
wcgw/relay/serve.py
CHANGED
|
@@ -1,29 +1,28 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
import base64
|
|
3
|
-
from importlib import metadata
|
|
4
|
-
import semantic_version # type: ignore[import-untyped]
|
|
5
2
|
import threading
|
|
6
3
|
import time
|
|
7
|
-
from
|
|
4
|
+
from importlib import metadata
|
|
5
|
+
from typing import Any, Callable, Coroutine, DefaultDict, Optional
|
|
8
6
|
from uuid import UUID
|
|
7
|
+
|
|
9
8
|
import fastapi
|
|
10
|
-
|
|
11
|
-
from pydantic import BaseModel
|
|
9
|
+
import semantic_version # type: ignore[import-untyped]
|
|
12
10
|
import uvicorn
|
|
13
|
-
from fastapi.staticfiles import StaticFiles
|
|
14
|
-
|
|
15
11
|
from dotenv import load_dotenv
|
|
12
|
+
from fastapi import WebSocket, WebSocketDisconnect
|
|
13
|
+
from fastapi.staticfiles import StaticFiles
|
|
14
|
+
from pydantic import BaseModel
|
|
16
15
|
|
|
17
16
|
from ..types_ import (
|
|
18
17
|
BashCommand,
|
|
19
18
|
BashInteraction,
|
|
20
|
-
|
|
21
|
-
FileEditFindReplace,
|
|
19
|
+
ContextSave,
|
|
22
20
|
FileEdit,
|
|
21
|
+
FileEditFindReplace,
|
|
23
22
|
Initialize,
|
|
24
23
|
ReadFiles,
|
|
25
24
|
ResetShell,
|
|
26
|
-
|
|
25
|
+
WriteIfEmpty,
|
|
27
26
|
)
|
|
28
27
|
|
|
29
28
|
|
|
@@ -37,6 +36,7 @@ class Mdata(BaseModel):
|
|
|
37
36
|
| FileEdit
|
|
38
37
|
| ReadFiles
|
|
39
38
|
| Initialize
|
|
39
|
+
| ContextSave
|
|
40
40
|
| str
|
|
41
41
|
)
|
|
42
42
|
user_id: UUID
|
|
@@ -51,7 +51,7 @@ gpts: dict[UUID, Callable[[str], None]] = {}
|
|
|
51
51
|
images: DefaultDict[UUID, dict[str, dict[str, Any]]] = DefaultDict(dict)
|
|
52
52
|
|
|
53
53
|
|
|
54
|
-
CLIENT_VERSION_MINIMUM = "
|
|
54
|
+
CLIENT_VERSION_MINIMUM = "2.7.0"
|
|
55
55
|
|
|
56
56
|
|
|
57
57
|
@app.websocket("/v1/register/{uuid}")
|
|
@@ -317,6 +317,35 @@ async def initialize(initialize_data: InitializeWithUUID) -> str:
|
|
|
317
317
|
raise fastapi.HTTPException(status_code=500, detail="Timeout error")
|
|
318
318
|
|
|
319
319
|
|
|
320
|
+
class ContextSaveWithUUID(ContextSave):
|
|
321
|
+
user_id: UUID
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
@app.post("/v1/context_save")
|
|
325
|
+
async def context_save(context_save_data: ContextSaveWithUUID) -> str:
|
|
326
|
+
user_id = context_save_data.user_id
|
|
327
|
+
if user_id not in clients:
|
|
328
|
+
return "Failure: id not found, ask the user to check it."
|
|
329
|
+
|
|
330
|
+
results: Optional[str] = None
|
|
331
|
+
|
|
332
|
+
def put_results(result: str) -> None:
|
|
333
|
+
nonlocal results
|
|
334
|
+
results = result
|
|
335
|
+
|
|
336
|
+
gpts[user_id] = put_results
|
|
337
|
+
|
|
338
|
+
await clients[user_id](Mdata(data=context_save_data, user_id=user_id))
|
|
339
|
+
|
|
340
|
+
start_time = time.time()
|
|
341
|
+
while time.time() - start_time < 30:
|
|
342
|
+
if results is not None:
|
|
343
|
+
return results
|
|
344
|
+
await asyncio.sleep(0.1)
|
|
345
|
+
|
|
346
|
+
raise fastapi.HTTPException(status_code=500, detail="Timeout error")
|
|
347
|
+
|
|
348
|
+
|
|
320
349
|
app.mount("/static", StaticFiles(directory="static"), name="static")
|
|
321
350
|
|
|
322
351
|
|
wcgw/types_.py
CHANGED
|
@@ -59,6 +59,7 @@ class FileEdit(BaseModel):
|
|
|
59
59
|
class Initialize(BaseModel):
|
|
60
60
|
any_workspace_path: str
|
|
61
61
|
initial_files_to_read: list[str]
|
|
62
|
+
task_id_to_resume: str
|
|
62
63
|
|
|
63
64
|
|
|
64
65
|
class GetScreenInfo(BaseModel):
|
|
@@ -98,3 +99,10 @@ class Mouse(BaseModel):
|
|
|
98
99
|
class Keyboard(BaseModel):
|
|
99
100
|
action: Literal["key", "type"]
|
|
100
101
|
text: str
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
class ContextSave(BaseModel):
|
|
105
|
+
id: str
|
|
106
|
+
project_root_path: str
|
|
107
|
+
description: str
|
|
108
|
+
relevant_file_globs: list[str]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: wcgw
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.7.1
|
|
4
4
|
Summary: Shell and coding agent on claude and chatgpt
|
|
5
5
|
Project-URL: Homepage, https://github.com/rusiaaman/wcgw
|
|
6
6
|
Author-email: Aman Rusia <gapypi@arcfu.com>
|
|
@@ -33,7 +33,7 @@ Description-Content-Type: text/markdown
|
|
|
33
33
|
- Chatgpt - Allows custom gpt to talk to your shell via a relay server. (linux or mac)
|
|
34
34
|
|
|
35
35
|
|
|
36
|
-
⚠️ Warning: do not
|
|
36
|
+
⚠️ Warning: do not allow BashCommand tool without reviewing the command, it may result in data loss.
|
|
37
37
|
|
|
38
38
|
[](https://github.com/rusiaaman/wcgw/actions/workflows/python-tests.yml)
|
|
39
39
|
[](https://github.com/rusiaaman/wcgw/actions/workflows/python-types.yml)
|
|
@@ -55,7 +55,18 @@ Description-Content-Type: text/markdown
|
|
|
55
55
|
- ⚡ **Large file edit**: Supports large file incremental edits to avoid token limit issues. Faster than full file write.
|
|
56
56
|
- ⚡ **Syntax checking on edits**: Reports feedback to the LLM if its edits have any syntax errors, so that it can redo it.
|
|
57
57
|
- ⚡ **Interactive Command Handling**: Supports interactive commands using arrow keys, interrupt, and ansi escape sequences.
|
|
58
|
-
- ⚡ **
|
|
58
|
+
- ⚡ **File protections**:
|
|
59
|
+
- The AI needs to read a file at least once before it's allowed to edit or rewrite it. This avoids accidental overwrites.
|
|
60
|
+
- Avoids context filling up while reading very large files. Files get chunked based on token length.
|
|
61
|
+
- On initialisation the provided workspace's directory structure is returned after selecting important files (based on .gitignore as well as a statistical approach)
|
|
62
|
+
- File edit based on search-replace tries to find correct search block if it has multiple matches based on previous search blocks. Fails otherwise (for correctness).
|
|
63
|
+
- File edit has spacing tolerant matching, with warning on issues like indentation mismatch. If there's no match, the closest match is returned to the AI to fix its mistakes.
|
|
64
|
+
- Using Aider-like search and replace, which has better performance than tool call based search and replace.
|
|
65
|
+
- ⚡ **Shell optimisations**:
|
|
66
|
+
- Only one command is allowed to be run at a time, simplifying management and avoiding rogue processes. There's only single shell instance at any point of time.
|
|
67
|
+
- Current working directory is always returned after any shell command to prevent AI from getting lost.
|
|
68
|
+
- Command polling exits after a quick timeout to avoid slow feedback. However, status checking has wait tolerance based on fresh output streaming from a command. Both of these approach combined provides a good shell interaction experience.
|
|
69
|
+
|
|
59
70
|
|
|
60
71
|
## Top use cases examples
|
|
61
72
|
|
|
@@ -1,25 +1,28 @@
|
|
|
1
1
|
wcgw/__init__.py,sha256=9K2QW7QuSLhMTVbKbBYd9UUp-ZyrfBrxcjuD_xk458k,118
|
|
2
|
-
wcgw/types_.py,sha256=
|
|
2
|
+
wcgw/types_.py,sha256=6gqdwC0F_rAQ80uPQpTWnw7uUByAD9qPa6-hKFgxqUU,1946
|
|
3
3
|
wcgw/client/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
wcgw/client/__main__.py,sha256=wcCrL4PjG51r5wVKqJhcoJPTLfHW0wNbD31DrUN0MWI,28
|
|
5
|
-
wcgw/client/anthropic_client.py,sha256=
|
|
5
|
+
wcgw/client/anthropic_client.py,sha256=p-QsqjTl5LJxCFL5GfPNlQCRa4Bh4XM6MWfkaYJj4qc,21439
|
|
6
6
|
wcgw/client/cli.py,sha256=-z0kpDAW3mzfQrQeZfaVJhBCAQY3HXnt9GdgQ8s-u0Y,1003
|
|
7
7
|
wcgw/client/common.py,sha256=OCH7Tx64jojz3M3iONUrGMadE07W21DiZs5sOxWX1Qc,1456
|
|
8
8
|
wcgw/client/computer_use.py,sha256=35NKAlMrxwD0TBlMMRnbCwz4g8TBRGOlcy-cmS-yJ_A,15247
|
|
9
|
-
wcgw/client/diff-instructions.txt,sha256=
|
|
10
|
-
wcgw/client/
|
|
9
|
+
wcgw/client/diff-instructions.txt,sha256=tmJ9Fu9XdO_72lYXQQNY9RZyx91bjxrXJf9d_KBz57k,1611
|
|
10
|
+
wcgw/client/memory.py,sha256=x7AsZo2GwYyEmnxtTvmQJBlUxD_26LuENQbCbNhTGyY,2272
|
|
11
|
+
wcgw/client/openai_client.py,sha256=UCiamF27iVhYh5fI5Kpa1IIMMXI6R5uPtRDjdKyHhOQ,18340
|
|
11
12
|
wcgw/client/openai_utils.py,sha256=KfMB1-p2zDiA7pPWwAVarochf7-qeL1UMgtlDV9DtKA,2662
|
|
12
13
|
wcgw/client/sys_utils.py,sha256=GajPntKhaTUMn6EOmopENWZNR2G_BJyuVbuot0x6veI,1376
|
|
13
|
-
wcgw/client/tools.py,sha256=
|
|
14
|
+
wcgw/client/tools.py,sha256=Cs3VnwlhYpYvLrq5Q0bYAvgRObne-UU3P6VcfgqK4_0,44914
|
|
15
|
+
wcgw/client/file_ops/diff_edit.py,sha256=o0ucArVwn5p6QTDgYsjLfMy4TJXxUG3qcppFBNF3bbQ,16751
|
|
16
|
+
wcgw/client/file_ops/search_replace.py,sha256=89ieDC9fTsIKPDx7CJwnwpX32dRdSlMKoBtKVXc7VWI,3971
|
|
14
17
|
wcgw/client/mcp_server/Readme.md,sha256=I8N4dHkTUVGNQ63BQkBMBhCCBTgqGOSF_pUR6iOEiUk,2495
|
|
15
18
|
wcgw/client/mcp_server/__init__.py,sha256=hyPPwO9cabAJsOMWhKyat9yl7OlSmIobaoAZKHu3DMc,381
|
|
16
|
-
wcgw/client/mcp_server/server.py,sha256=
|
|
19
|
+
wcgw/client/mcp_server/server.py,sha256=UWUHBlu7UllR4q3xgyHQLeoUSLKmtkGc9nNrZaoKYe4,14905
|
|
17
20
|
wcgw/client/repo_ops/display_tree.py,sha256=5FD4hfMkM2cIZnXlu7WfJswJLthj0SkuHlkGH6dpWQU,4632
|
|
18
21
|
wcgw/client/repo_ops/path_prob.py,sha256=SWf0CDn37rtlsYRQ51ufSxay-heaQoVIhr1alB9tZ4M,2144
|
|
19
22
|
wcgw/client/repo_ops/paths_model.vocab,sha256=M1pXycYDQehMXtpp-qAgU7rtzeBbCOiJo4qcYFY0kqk,315087
|
|
20
23
|
wcgw/client/repo_ops/paths_tokens.model,sha256=jiwwE4ae8ADKuTZISutXuM5Wfyc_FBmN5rxTjoNnCos,1569052
|
|
21
24
|
wcgw/client/repo_ops/repo_context.py,sha256=5NqRxBY0K-SBFXJ0Ybt7llzYOBD8pRkTpruMMJHWxv4,4336
|
|
22
|
-
wcgw/relay/serve.py,sha256=
|
|
25
|
+
wcgw/relay/serve.py,sha256=Z5EwtaCAtKFBSnUw4mPYw0sze3Coc4Fa8gObRRG_bT0,9525
|
|
23
26
|
wcgw/relay/static/privacy.txt,sha256=s9qBdbx2SexCpC_z33sg16TptmAwDEehMCLz4L50JLc,529
|
|
24
27
|
mcp_wcgw/__init__.py,sha256=fKCgOdN7cn7gR3YGFaGyV5Goe8A2sEyllLcsRkN0i-g,2601
|
|
25
28
|
mcp_wcgw/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -43,8 +46,8 @@ mcp_wcgw/shared/memory.py,sha256=dBsOghxHz8-tycdSVo9kSujbsC8xb_tYsGmuJobuZnw,281
|
|
|
43
46
|
mcp_wcgw/shared/progress.py,sha256=ymxOsb8XO5Mhlop7fRfdbmvPodANj7oq6O4dD0iUcnw,1048
|
|
44
47
|
mcp_wcgw/shared/session.py,sha256=e44a0LQOW8gwdLs9_DE9oDsxqW2U8mXG3d5KT95bn5o,10393
|
|
45
48
|
mcp_wcgw/shared/version.py,sha256=d2LZii-mgsPIxpshjkXnOTUmk98i0DT4ff8VpA_kAvE,111
|
|
46
|
-
wcgw-2.
|
|
47
|
-
wcgw-2.
|
|
48
|
-
wcgw-2.
|
|
49
|
-
wcgw-2.
|
|
50
|
-
wcgw-2.
|
|
49
|
+
wcgw-2.7.1.dist-info/METADATA,sha256=XoEYy3PMfwJzhaSz8TcR2-yI_R7FgM141XVzaBVsHPU,8351
|
|
50
|
+
wcgw-2.7.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
51
|
+
wcgw-2.7.1.dist-info/entry_points.txt,sha256=eKo1omwbAggWlQ0l7GKoR7uV1-j16nk9tK0BhC2Oz_E,120
|
|
52
|
+
wcgw-2.7.1.dist-info/licenses/LICENSE,sha256=BvY8xqjOfc3X2qZpGpX3MZEmF-4Dp0LqgKBbT6L_8oI,11142
|
|
53
|
+
wcgw-2.7.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|