wcgw 2.6.2__py3-none-any.whl → 2.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of wcgw might be problematic. Click here for more details.

wcgw/client/tools.py CHANGED
@@ -9,7 +9,7 @@ import shlex
9
9
  import time
10
10
  import traceback
11
11
  import uuid
12
- from difflib import SequenceMatcher
12
+ from os.path import expanduser
13
13
  from pathlib import Path
14
14
  from tempfile import NamedTemporaryFile, TemporaryDirectory
15
15
  from typing import (
@@ -43,6 +43,7 @@ from ..types_ import (
43
43
  GetScreenInfo,
44
44
  Initialize,
45
45
  Keyboard,
46
+ KnowledgeTransfer,
46
47
  Mouse,
47
48
  ReadFiles,
48
49
  ReadImage,
@@ -51,6 +52,8 @@ from ..types_ import (
51
52
  WriteIfEmpty,
52
53
  )
53
54
  from .computer_use import run_computer_tool
55
+ from .file_ops.search_replace import search_replace_edit
56
+ from .memory import format_memory, load_memory, save_memory
54
57
  from .repo_ops.repo_context import get_repo_context
55
58
  from .sys_utils import command_run
56
59
 
@@ -290,12 +293,33 @@ BASH_STATE = BashState()
290
293
 
291
294
 
292
295
  def initialize(
293
- any_workspace_path: str, read_files_: list[str], max_tokens: Optional[int]
296
+ any_workspace_path: str,
297
+ read_files_: list[str],
298
+ task_id_to_resume: str,
299
+ max_tokens: Optional[int],
294
300
  ) -> str:
295
301
  reset_shell()
296
302
 
303
+ # Expand the workspace path
304
+ any_workspace_path = expand_user(any_workspace_path, BASH_STATE.is_in_docker)
297
305
  repo_context = ""
298
306
 
307
+ memory = ""
308
+ if task_id_to_resume:
309
+ try:
310
+ task_mem = load_memory(task_id_to_resume)
311
+ mem_files = task_mem.relevant_file_paths
312
+ mem_files_read = read_files(mem_files, max_tokens)
313
+ memory = "Following is the retrieved task:\n" + format_memory(
314
+ task_mem, mem_files_read
315
+ )
316
+ if (
317
+ not any_workspace_path or not os.path.exists(any_workspace_path)
318
+ ) and os.path.exists(task_mem.project_root_path):
319
+ any_workspace_path = task_mem.project_root_path
320
+ except Exception:
321
+ memory = f'Error: Unable to load task with ID "{task_id_to_resume}" '
322
+
299
323
  if any_workspace_path:
300
324
  if os.path.exists(any_workspace_path):
301
325
  repo_context, folder_to_start = get_repo_context(any_workspace_path, 200)
@@ -306,7 +330,9 @@ def initialize(
306
330
 
307
331
  repo_context = f"---\n# Workspace structure\n{repo_context}\n---\n"
308
332
  else:
309
- return f"\nInfo: Workspace path {any_workspace_path} does not exist\n"
333
+ repo_context = (
334
+ f"\nInfo: Workspace path {any_workspace_path} does not exist\n"
335
+ )
310
336
 
311
337
  initial_files_context = ""
312
338
  if read_files_:
@@ -325,6 +351,10 @@ Current working directory: {BASH_STATE.cwd}
325
351
  {repo_context}
326
352
 
327
353
  {initial_files_context}
354
+
355
+ ---
356
+
357
+ {memory}
328
358
  """
329
359
 
330
360
  return output
@@ -396,6 +426,12 @@ def rstrip(lines: list[str]) -> str:
396
426
  return "\n".join([line.rstrip() for line in lines])
397
427
 
398
428
 
429
+ def expand_user(path: str, docker_id: Optional[str]) -> str:
430
+ if not path or not path.startswith("~") or docker_id:
431
+ return path
432
+ return expanduser(path)
433
+
434
+
399
435
  def _incremental_text(text: str, last_pending_output: str) -> str:
400
436
  # text = render_terminal_output(text[-100_000:])
401
437
  text = text[-100_000:]
@@ -673,6 +709,9 @@ def truncate_if_over(content: str, max_tokens: Optional[int]) -> str:
673
709
 
674
710
 
675
711
  def read_image_from_shell(file_path: str) -> ImageData:
712
+ # Expand the path
713
+ file_path = expand_user(file_path, BASH_STATE.is_in_docker)
714
+
676
715
  if not os.path.isabs(file_path):
677
716
  file_path = os.path.join(BASH_STATE.cwd, file_path)
678
717
 
@@ -722,7 +761,7 @@ def write_file(
722
761
  if not os.path.isabs(writefile.file_path):
723
762
  return f"Failure: file_path should be absolute path, current working directory is {BASH_STATE.cwd}"
724
763
  else:
725
- path_ = writefile.file_path
764
+ path_ = expand_user(writefile.file_path, BASH_STATE.is_in_docker)
726
765
 
727
766
  error_on_exist_ = error_on_exist and path_ not in BASH_STATE.whitelist_for_overwrite
728
767
  add_overwrite_warning = ""
@@ -828,126 +867,6 @@ Syntax errors:
828
867
  return "Success" + "".join(warnings)
829
868
 
830
869
 
831
- def find_least_edit_distance_substring(
832
- orig_content_lines: list[str], find_lines: list[str]
833
- ) -> tuple[list[str], str]:
834
- # Prepare content lines, stripping whitespace and keeping track of original indices
835
- content_lines = [line.strip() for line in orig_content_lines]
836
- new_to_original_indices = {}
837
- new_content_lines = []
838
- for i, line in enumerate(content_lines):
839
- if not line:
840
- continue
841
- new_content_lines.append(line)
842
- new_to_original_indices[len(new_content_lines) - 1] = i
843
- content_lines = new_content_lines
844
-
845
- # Prepare find lines, removing empty lines
846
- find_lines = [line.strip() for line in find_lines if line.strip()]
847
-
848
- # Initialize variables for best match tracking
849
- max_similarity = 0.0
850
- min_edit_distance_lines = []
851
- context_lines = []
852
-
853
- # For each possible starting position in content
854
- for i in range(max(1, len(content_lines) - len(find_lines) + 1)):
855
- # Calculate similarity for the block starting at position i
856
- block_similarity = 0.0
857
- for j in range(len(find_lines)):
858
- if (i + j) < len(content_lines):
859
- # Use SequenceMatcher for more efficient similarity calculation
860
- similarity = SequenceMatcher(
861
- None, content_lines[i + j], find_lines[j]
862
- ).ratio()
863
- block_similarity += similarity
864
-
865
- # If this block is more similar than previous best
866
- if block_similarity > max_similarity:
867
- max_similarity = block_similarity
868
- # Map back to original line indices
869
- orig_start_index = new_to_original_indices[i]
870
- orig_end_index = (
871
- new_to_original_indices.get(
872
- i + len(find_lines) - 1, len(orig_content_lines) - 1
873
- )
874
- + 1
875
- )
876
- # Get the original lines
877
- min_edit_distance_lines = orig_content_lines[
878
- orig_start_index:orig_end_index
879
- ]
880
- # Get context (10 lines before and after)
881
- context_lines = orig_content_lines[
882
- max(0, orig_start_index - 10) : (orig_end_index + 10)
883
- ]
884
-
885
- return (
886
- min_edit_distance_lines,
887
- "\n".join(context_lines),
888
- )
889
-
890
-
891
- def lines_replacer(
892
- orig_content_lines: list[str], search_lines: list[str], replace_lines: list[str]
893
- ) -> str:
894
- # Validation for empty search
895
- search_lines = list(filter(None, [x.strip() for x in search_lines]))
896
-
897
- # Create mapping of non-empty lines to original indices
898
- new_to_original_indices = []
899
- new_content_lines = []
900
- for i, line in enumerate(orig_content_lines):
901
- stripped = line.strip()
902
- if not stripped:
903
- continue
904
- new_content_lines.append(stripped)
905
- new_to_original_indices.append(i)
906
-
907
- if not new_content_lines and not search_lines:
908
- return "\n".join(replace_lines)
909
- elif not search_lines:
910
- raise ValueError("Search block is empty")
911
- elif not new_content_lines:
912
- raise ValueError("File content is empty")
913
-
914
- # Search for matching block
915
- for i in range(len(new_content_lines) - len(search_lines) + 1):
916
- if all(
917
- new_content_lines[i + j] == search_lines[j]
918
- for j in range(len(search_lines))
919
- ):
920
- start_idx = new_to_original_indices[i]
921
- end_idx = new_to_original_indices[i + len(search_lines) - 1] + 1
922
- return "\n".join(
923
- orig_content_lines[:start_idx]
924
- + replace_lines
925
- + orig_content_lines[end_idx:]
926
- )
927
-
928
- raise ValueError("Search block not found in content")
929
-
930
-
931
- def edit_content(content: str, find_lines: str, replace_with_lines: str) -> str:
932
- replace_with_lines_ = replace_with_lines.split("\n")
933
- find_lines_ = find_lines.split("\n")
934
- content_lines_ = content.split("\n")
935
- try:
936
- return lines_replacer(content_lines_, find_lines_, replace_with_lines_)
937
- except ValueError:
938
- pass
939
-
940
- _, context_lines = find_least_edit_distance_substring(content_lines_, find_lines_)
941
-
942
- raise Exception(
943
- f"""Error: no match found for the provided search block.
944
- Requested search block: \n```\n{find_lines}\n```
945
- Possible relevant section in the file:\n---\n```\n{context_lines}\n```\n---\nFile not edited
946
- \nPlease retry with exact search. Re-read the file if unsure.
947
- """
948
- )
949
-
950
-
951
870
  def do_diff_edit(fedit: FileEdit, max_tokens: Optional[int]) -> str:
952
871
  try:
953
872
  return _do_diff_edit(fedit, max_tokens)
@@ -974,7 +893,7 @@ def _do_diff_edit(fedit: FileEdit, max_tokens: Optional[int]) -> str:
974
893
  f"Failure: file_path should be absolute path, current working directory is {BASH_STATE.cwd}"
975
894
  )
976
895
  else:
977
- path_ = fedit.file_path
896
+ path_ = expand_user(fedit.file_path, BASH_STATE.is_in_docker)
978
897
 
979
898
  # The LLM is now aware that the file exists
980
899
  BASH_STATE.add_to_whitelist_for_overwrite(path_)
@@ -1002,46 +921,7 @@ def _do_diff_edit(fedit: FileEdit, max_tokens: Optional[int]) -> str:
1002
921
  )
1003
922
  lines = fedit.file_edit_using_search_replace_blocks.split("\n")
1004
923
 
1005
- if not lines or not re.match(r"^<<<<<<+\s*SEARCH\s*$", lines[0]):
1006
- raise Exception(
1007
- "Error: first line should be `<<<<<< SEARCH` to start a search-replace block"
1008
- )
1009
-
1010
- n_lines = len(lines)
1011
- i = 0
1012
- replacement_count = 0
1013
- while i < n_lines:
1014
- if re.match(r"^<<<<<<+\s*SEARCH\s*$", lines[i]):
1015
- search_block = []
1016
- i += 1
1017
- while i < n_lines and not re.match(r"^======*\s*$", lines[i]):
1018
- search_block.append(lines[i])
1019
- i += 1
1020
- i += 1
1021
- replace_block = []
1022
- while i < n_lines and not re.match(r"^>>>>>>+\s*REPLACE\s*$", lines[i]):
1023
- replace_block.append(lines[i])
1024
- i += 1
1025
- i += 1
1026
-
1027
- for line in search_block:
1028
- console.log("> " + line)
1029
- console.log("=======")
1030
- for line in replace_block:
1031
- console.log("< " + line)
1032
- console.log("\n\n\n\n")
1033
- search_block_ = "\n".join(search_block)
1034
- replace_block_ = "\n".join(replace_block)
1035
-
1036
- apply_diff_to = edit_content(apply_diff_to, search_block_, replace_block_)
1037
- replacement_count += 1
1038
- else:
1039
- i += 1
1040
-
1041
- if replacement_count == 0:
1042
- raise Exception(
1043
- "Error: no valid search-replace blocks found, please check your syntax for FileEdit"
1044
- )
924
+ apply_diff_to, comments = search_replace_edit(lines, apply_diff_to, console.log)
1045
925
 
1046
926
  if not BASH_STATE.is_in_docker:
1047
927
  with open(path_, "w") as f:
@@ -1070,9 +950,9 @@ def _do_diff_edit(fedit: FileEdit, max_tokens: Optional[int]) -> str:
1070
950
  )
1071
951
 
1072
952
  console.print(f"W: Syntax errors encountered: {syntax_errors}")
1073
- return f"""Wrote file succesfully.
953
+ return f"""{comments}
1074
954
  ---
1075
- However, tree-sitter reported syntax errors, please re-read the file and fix if there are any errors.
955
+ Tree-sitter reported syntax errors, please re-read the file and fix if there are any errors.
1076
956
  Syntax errors:
1077
957
  {syntax_errors}
1078
958
 
@@ -1081,7 +961,7 @@ Syntax errors:
1081
961
  except Exception:
1082
962
  pass
1083
963
 
1084
- return "Success"
964
+ return comments
1085
965
 
1086
966
 
1087
967
  class DoneFlag(BaseModel):
@@ -1123,6 +1003,7 @@ TOOLS = (
1123
1003
  | Keyboard
1124
1004
  | ScreenShot
1125
1005
  | GetScreenInfo
1006
+ | KnowledgeTransfer
1126
1007
  )
1127
1008
 
1128
1009
 
@@ -1164,6 +1045,8 @@ def which_tool_name(name: str) -> Type[TOOLS]:
1164
1045
  return ScreenShot
1165
1046
  elif name == "GetScreenInfo":
1166
1047
  return GetScreenInfo
1048
+ elif name == "KnowledgeTransfer":
1049
+ return KnowledgeTransfer
1167
1050
  else:
1168
1051
  raise ValueError(f"Unknown tool name: {name}")
1169
1052
 
@@ -1216,7 +1099,12 @@ def get_tool_output(
1216
1099
  elif isinstance(arg, Initialize):
1217
1100
  console.print("Calling initial info tool")
1218
1101
  output = (
1219
- initialize(arg.any_workspace_path, arg.initial_files_to_read, max_tokens),
1102
+ initialize(
1103
+ arg.any_workspace_path,
1104
+ arg.initial_files_to_read,
1105
+ arg.task_id_to_resume,
1106
+ max_tokens,
1107
+ ),
1220
1108
  0.0,
1221
1109
  )
1222
1110
  elif isinstance(arg, (Mouse, Keyboard, ScreenShot, GetScreenInfo)):
@@ -1242,9 +1130,7 @@ def get_tool_output(
1242
1130
  # At this point we should go into the docker env
1243
1131
  res, _ = execute_bash(
1244
1132
  enc,
1245
- BashInteraction(
1246
- send_text=f"export PS1={PROMPT}", type="BashInteraction"
1247
- ),
1133
+ BashInteraction(send_text=f"export PS1={PROMPT}"),
1248
1134
  None,
1249
1135
  0.2,
1250
1136
  )
@@ -1256,6 +1142,20 @@ def get_tool_output(
1256
1142
  )
1257
1143
  BASH_STATE.set_in_docker(arg.docker_image_id)
1258
1144
  return outputs, outputs_cost[1]
1145
+ elif isinstance(arg, KnowledgeTransfer):
1146
+ console.print("Calling task memory tool")
1147
+ relevant_files = arg.relevant_file_paths
1148
+ for i, fpath in enumerate(relevant_files):
1149
+ if not os.path.isabs(fpath):
1150
+ relpath = os.path.join(arg.project_root_path, fpath)
1151
+ if os.path.exists(relpath):
1152
+ relevant_files[i] = relpath
1153
+ else:
1154
+ raise Exception(f"The file path {fpath} does not exist")
1155
+ elif not os.path.exists(fpath):
1156
+ raise Exception(f"The file path {fpath} does not exist")
1157
+ relevant_files_data = read_files(relevant_files, None)
1158
+ output = save_memory(arg, relevant_files_data), 0.0
1259
1159
  else:
1260
1160
  raise ValueError(f"Unknown tool: {arg}")
1261
1161
  if isinstance(output[0], str):
@@ -1284,6 +1184,7 @@ class Mdata(BaseModel):
1284
1184
  | str
1285
1185
  | ReadFiles
1286
1186
  | Initialize
1187
+ | KnowledgeTransfer
1287
1188
  )
1288
1189
 
1289
1190
 
@@ -1383,6 +1284,9 @@ def read_files(file_paths: list[str], max_tokens: Optional[int]) -> str:
1383
1284
  def read_file(file_path: str, max_tokens: Optional[int]) -> tuple[str, bool, int]:
1384
1285
  console.print(f"Reading file: {file_path}")
1385
1286
 
1287
+ # Expand the path before checking if it's absolute
1288
+ file_path = expand_user(file_path, BASH_STATE.is_in_docker)
1289
+
1386
1290
  if not os.path.isabs(file_path):
1387
1291
  raise ValueError(
1388
1292
  f"Failure: file_path should be absolute path, current working directory is {BASH_STATE.cwd}"
wcgw/relay/serve.py CHANGED
@@ -1,29 +1,28 @@
1
1
  import asyncio
2
- import base64
3
- from importlib import metadata
4
- import semantic_version # type: ignore[import-untyped]
5
2
  import threading
6
3
  import time
7
- from typing import Any, Callable, Coroutine, DefaultDict, Literal, Optional, Sequence
4
+ from importlib import metadata
5
+ from typing import Any, Callable, Coroutine, DefaultDict, Optional
8
6
  from uuid import UUID
7
+
9
8
  import fastapi
10
- from fastapi import Response, WebSocket, WebSocketDisconnect
11
- from pydantic import BaseModel
9
+ import semantic_version # type: ignore[import-untyped]
12
10
  import uvicorn
13
- from fastapi.staticfiles import StaticFiles
14
-
15
11
  from dotenv import load_dotenv
12
+ from fastapi import WebSocket, WebSocketDisconnect
13
+ from fastapi.staticfiles import StaticFiles
14
+ from pydantic import BaseModel
16
15
 
17
16
  from ..types_ import (
18
17
  BashCommand,
19
18
  BashInteraction,
20
- WriteIfEmpty,
21
- FileEditFindReplace,
22
19
  FileEdit,
20
+ FileEditFindReplace,
23
21
  Initialize,
22
+ KnowledgeTransfer,
24
23
  ReadFiles,
25
24
  ResetShell,
26
- Specials,
25
+ WriteIfEmpty,
27
26
  )
28
27
 
29
28
 
@@ -37,6 +36,7 @@ class Mdata(BaseModel):
37
36
  | FileEdit
38
37
  | ReadFiles
39
38
  | Initialize
39
+ | KnowledgeTransfer
40
40
  | str
41
41
  )
42
42
  user_id: UUID
@@ -51,7 +51,7 @@ gpts: dict[UUID, Callable[[str], None]] = {}
51
51
  images: DefaultDict[UUID, dict[str, dict[str, Any]]] = DefaultDict(dict)
52
52
 
53
53
 
54
- CLIENT_VERSION_MINIMUM = "1.3.0"
54
+ CLIENT_VERSION_MINIMUM = "2.7.0"
55
55
 
56
56
 
57
57
  @app.websocket("/v1/register/{uuid}")
@@ -317,6 +317,35 @@ async def initialize(initialize_data: InitializeWithUUID) -> str:
317
317
  raise fastapi.HTTPException(status_code=500, detail="Timeout error")
318
318
 
319
319
 
320
+ class KTWithUUID(KnowledgeTransfer):
321
+ user_id: UUID
322
+
323
+
324
+ @app.post("/v1/knowledge_transfer")
325
+ async def knowledge_transfer(knowledge_transfer_data: KTWithUUID) -> str:
326
+ user_id = knowledge_transfer_data.user_id
327
+ if user_id not in clients:
328
+ return "Failure: id not found, ask the user to check it."
329
+
330
+ results: Optional[str] = None
331
+
332
+ def put_results(result: str) -> None:
333
+ nonlocal results
334
+ results = result
335
+
336
+ gpts[user_id] = put_results
337
+
338
+ await clients[user_id](Mdata(data=knowledge_transfer_data, user_id=user_id))
339
+
340
+ start_time = time.time()
341
+ while time.time() - start_time < 30:
342
+ if results is not None:
343
+ return results
344
+ await asyncio.sleep(0.1)
345
+
346
+ raise fastapi.HTTPException(status_code=500, detail="Timeout error")
347
+
348
+
320
349
  app.mount("/static", StaticFiles(directory="static"), name="static")
321
350
 
322
351
 
wcgw/types_.py CHANGED
@@ -59,6 +59,7 @@ class FileEdit(BaseModel):
59
59
  class Initialize(BaseModel):
60
60
  any_workspace_path: str
61
61
  initial_files_to_read: list[str]
62
+ task_id_to_resume: str
62
63
 
63
64
 
64
65
  class GetScreenInfo(BaseModel):
@@ -98,3 +99,14 @@ class Mouse(BaseModel):
98
99
  class Keyboard(BaseModel):
99
100
  action: Literal["key", "type"]
100
101
  text: str
102
+
103
+
104
+ class KnowledgeTransfer(BaseModel):
105
+ id: str
106
+ project_root_path: str
107
+ objective: str
108
+ all_user_instructions: str
109
+ current_status_of_the_task: str
110
+ all_issues_snippets: str
111
+ relevant_file_paths: list[str]
112
+ build_and_development_instructions: str
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: wcgw
3
- Version: 2.6.2
3
+ Version: 2.7.0
4
4
  Summary: Shell and coding agent on claude and chatgpt
5
5
  Project-URL: Homepage, https://github.com/rusiaaman/wcgw
6
6
  Author-email: Aman Rusia <gapypi@arcfu.com>
@@ -1,25 +1,28 @@
1
1
  wcgw/__init__.py,sha256=9K2QW7QuSLhMTVbKbBYd9UUp-ZyrfBrxcjuD_xk458k,118
2
- wcgw/types_.py,sha256=pi976GyRLBKeCcpqNmVm5tI0iXO4okkkUGDa41ITr50,1792
2
+ wcgw/types_.py,sha256=qSl3h1D2mAcKmW3jzEmD-jdMYP1DE1G3f5RzIEcXuHY,2090
3
3
  wcgw/client/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  wcgw/client/__main__.py,sha256=wcCrL4PjG51r5wVKqJhcoJPTLfHW0wNbD31DrUN0MWI,28
5
- wcgw/client/anthropic_client.py,sha256=kmS93LEVwXCppfbgCsfWe_SVmTAtsI3x8PrP8DJDYMY,21041
5
+ wcgw/client/anthropic_client.py,sha256=U9uE9mBiaq01FnxFC6cBIIAeSpO16UxrZJKeVGqItiA,22135
6
6
  wcgw/client/cli.py,sha256=-z0kpDAW3mzfQrQeZfaVJhBCAQY3HXnt9GdgQ8s-u0Y,1003
7
7
  wcgw/client/common.py,sha256=OCH7Tx64jojz3M3iONUrGMadE07W21DiZs5sOxWX1Qc,1456
8
8
  wcgw/client/computer_use.py,sha256=35NKAlMrxwD0TBlMMRnbCwz4g8TBRGOlcy-cmS-yJ_A,15247
9
- wcgw/client/diff-instructions.txt,sha256=s5AJKG23JsjwRYhFZFQVvwDpF67vElawrmdXwvukR1A,1683
10
- wcgw/client/openai_client.py,sha256=AP0B-fKNNlYCAEL0MrT3UxeCcvca7jJZp9kYkinUqSM,17706
9
+ wcgw/client/diff-instructions.txt,sha256=tmJ9Fu9XdO_72lYXQQNY9RZyx91bjxrXJf9d_KBz57k,1611
10
+ wcgw/client/memory.py,sha256=wdsd_czb01XM6Gu80HRsLM-Ll5gJ4E7UEnkneh5sWFI,1704
11
+ wcgw/client/openai_client.py,sha256=ONTbQAHvU2O0HxmYlqbVjXwsf4rvHfQvnNKSUEu4mXc,19013
11
12
  wcgw/client/openai_utils.py,sha256=KfMB1-p2zDiA7pPWwAVarochf7-qeL1UMgtlDV9DtKA,2662
12
13
  wcgw/client/sys_utils.py,sha256=GajPntKhaTUMn6EOmopENWZNR2G_BJyuVbuot0x6veI,1376
13
- wcgw/client/tools.py,sha256=JU-bZSJlqYapguXqA2sTamgTefmMYP3HpUTI2yXoqKU,48043
14
+ wcgw/client/tools.py,sha256=TM2wBGoi9C1BmbDY1GSl-T4T1soPBDuNm1PwmF1BlqA,44606
15
+ wcgw/client/file_ops/diff_edit.py,sha256=o0ucArVwn5p6QTDgYsjLfMy4TJXxUG3qcppFBNF3bbQ,16751
16
+ wcgw/client/file_ops/search_replace.py,sha256=89ieDC9fTsIKPDx7CJwnwpX32dRdSlMKoBtKVXc7VWI,3971
14
17
  wcgw/client/mcp_server/Readme.md,sha256=I8N4dHkTUVGNQ63BQkBMBhCCBTgqGOSF_pUR6iOEiUk,2495
15
18
  wcgw/client/mcp_server/__init__.py,sha256=hyPPwO9cabAJsOMWhKyat9yl7OlSmIobaoAZKHu3DMc,381
16
- wcgw/client/mcp_server/server.py,sha256=bhswcJQt2jWIwVcQRrEaa9E6LkuE_vK7fNG51bsV4hw,12414
19
+ wcgw/client/mcp_server/server.py,sha256=yi5PXSa1FFam3BtkFKgvda6QcTcErFP7vDCRNQtyCl0,13883
17
20
  wcgw/client/repo_ops/display_tree.py,sha256=5FD4hfMkM2cIZnXlu7WfJswJLthj0SkuHlkGH6dpWQU,4632
18
21
  wcgw/client/repo_ops/path_prob.py,sha256=SWf0CDn37rtlsYRQ51ufSxay-heaQoVIhr1alB9tZ4M,2144
19
22
  wcgw/client/repo_ops/paths_model.vocab,sha256=M1pXycYDQehMXtpp-qAgU7rtzeBbCOiJo4qcYFY0kqk,315087
20
23
  wcgw/client/repo_ops/paths_tokens.model,sha256=jiwwE4ae8ADKuTZISutXuM5Wfyc_FBmN5rxTjoNnCos,1569052
21
24
  wcgw/client/repo_ops/repo_context.py,sha256=5NqRxBY0K-SBFXJ0Ybt7llzYOBD8pRkTpruMMJHWxv4,4336
22
- wcgw/relay/serve.py,sha256=CYY0mAAzR6nXkdGqLA9dXkgBcMCKPXEAmBcDyutUnjQ,8769
25
+ wcgw/relay/serve.py,sha256=dWT2cmmjqEvh6dqGoI0zEpXxl0w7SVts5j1iIl7ycL4,9555
23
26
  wcgw/relay/static/privacy.txt,sha256=s9qBdbx2SexCpC_z33sg16TptmAwDEehMCLz4L50JLc,529
24
27
  mcp_wcgw/__init__.py,sha256=fKCgOdN7cn7gR3YGFaGyV5Goe8A2sEyllLcsRkN0i-g,2601
25
28
  mcp_wcgw/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -43,8 +46,8 @@ mcp_wcgw/shared/memory.py,sha256=dBsOghxHz8-tycdSVo9kSujbsC8xb_tYsGmuJobuZnw,281
43
46
  mcp_wcgw/shared/progress.py,sha256=ymxOsb8XO5Mhlop7fRfdbmvPodANj7oq6O4dD0iUcnw,1048
44
47
  mcp_wcgw/shared/session.py,sha256=e44a0LQOW8gwdLs9_DE9oDsxqW2U8mXG3d5KT95bn5o,10393
45
48
  mcp_wcgw/shared/version.py,sha256=d2LZii-mgsPIxpshjkXnOTUmk98i0DT4ff8VpA_kAvE,111
46
- wcgw-2.6.2.dist-info/METADATA,sha256=o8ZxaCTFXNBmv11mIMW5t0IS5s0GTYD_Hl7u_bzYZoU,6979
47
- wcgw-2.6.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
48
- wcgw-2.6.2.dist-info/entry_points.txt,sha256=eKo1omwbAggWlQ0l7GKoR7uV1-j16nk9tK0BhC2Oz_E,120
49
- wcgw-2.6.2.dist-info/licenses/LICENSE,sha256=BvY8xqjOfc3X2qZpGpX3MZEmF-4Dp0LqgKBbT6L_8oI,11142
50
- wcgw-2.6.2.dist-info/RECORD,,
49
+ wcgw-2.7.0.dist-info/METADATA,sha256=0qgZ9bqCnkUZNTKWAEbmxBq8xMHRZL-DKmikYxxIWfk,6979
50
+ wcgw-2.7.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
51
+ wcgw-2.7.0.dist-info/entry_points.txt,sha256=eKo1omwbAggWlQ0l7GKoR7uV1-j16nk9tK0BhC2Oz_E,120
52
+ wcgw-2.7.0.dist-info/licenses/LICENSE,sha256=BvY8xqjOfc3X2qZpGpX3MZEmF-4Dp0LqgKBbT6L_8oI,11142
53
+ wcgw-2.7.0.dist-info/RECORD,,
File without changes