wcgw 2.0.0__tar.gz → 2.0.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of wcgw might be problematic. Click here for more details.
- {wcgw-2.0.0 → wcgw-2.0.2}/PKG-INFO +14 -7
- {wcgw-2.0.0 → wcgw-2.0.2}/README.md +13 -6
- {wcgw-2.0.0 → wcgw-2.0.2}/pyproject.toml +1 -1
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/client/computer_use.py +13 -4
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/client/tools.py +1 -3
- {wcgw-2.0.0 → wcgw-2.0.2}/uv.lock +1 -1
- {wcgw-2.0.0 → wcgw-2.0.2}/.github/workflows/python-publish.yml +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/.github/workflows/python-tests.yml +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/.github/workflows/python-types.yml +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/.gitignore +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/.python-version +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/.vscode/settings.json +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/gpt_action_json_schema.json +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/gpt_instructions.txt +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/openai.md +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/__init__.py +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/__init__.py +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/client/__init__.py +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/client/__main__.py +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/client/anthropic_client.py +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/client/cli.py +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/client/common.py +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/client/diff-instructions.txt +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/client/mcp_server/Readme.md +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/client/mcp_server/__init__.py +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/client/mcp_server/server.py +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/client/openai_client.py +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/client/openai_utils.py +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/client/sys_utils.py +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/relay/serve.py +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/relay/static/privacy.txt +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/src/wcgw/types_.py +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/static/claude-ss.jpg +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/static/computer-use.jpg +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/static/example.jpg +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/static/rocket-icon.png +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/static/ss1.png +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/tests/test_basic.py +0 -0
- {wcgw-2.0.0 → wcgw-2.0.2}/tests/test_tools.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: wcgw
|
|
3
|
-
Version: 2.0.
|
|
3
|
+
Version: 2.0.2
|
|
4
4
|
Summary: What could go wrong giving full shell access to chatgpt?
|
|
5
5
|
Project-URL: Homepage, https://github.com/rusiaaman/wcgw
|
|
6
6
|
Author-email: Aman Rusia <gapypi@arcfu.com>
|
|
@@ -27,9 +27,10 @@ Requires-Dist: uvicorn>=0.31.0
|
|
|
27
27
|
Requires-Dist: websockets>=13.1
|
|
28
28
|
Description-Content-Type: text/markdown
|
|
29
29
|
|
|
30
|
-
# Shell and Coding agent
|
|
30
|
+
# Shell and Coding agent for Claude and Chatgpt
|
|
31
31
|
|
|
32
|
-
- An MCP server on claude desktop for autonomous shell, coding and desktop control agent.
|
|
32
|
+
- Claude - An MCP server on claude desktop for autonomous shell, coding and desktop control agent.
|
|
33
|
+
- Chatgpt - Allows custom gpt to talk to your shell via a relay server.
|
|
33
34
|
|
|
34
35
|
[](https://github.com/rusiaaman/wcgw/actions/workflows/python-tests.yml)
|
|
35
36
|
[](https://github.com/rusiaaman/wcgw/actions/workflows/python-types.yml)
|
|
@@ -37,7 +38,7 @@ Description-Content-Type: text/markdown
|
|
|
37
38
|
|
|
38
39
|
## Updates
|
|
39
40
|
|
|
40
|
-
- [01 Dec 2024]
|
|
41
|
+
- [01 Dec 2024] Removed author hosted relay server for chatgpt.
|
|
41
42
|
|
|
42
43
|
- [26 Nov 2024] Introduced claude desktop support through mcp
|
|
43
44
|
|
|
@@ -49,7 +50,7 @@ Description-Content-Type: text/markdown
|
|
|
49
50
|
- ⚡ **Interactive Command Handling**: Supports interactive commands using arrow keys, interrupt, and ansi escape sequences.
|
|
50
51
|
- ⚡ **REPL support**: [beta] Supports python/node and other REPL execution.
|
|
51
52
|
|
|
52
|
-
## Setup
|
|
53
|
+
## Claude Setup
|
|
53
54
|
|
|
54
55
|
Update `claude_desktop_config.json` (~/Library/Application Support/Claude/claude_desktop_config.json)
|
|
55
56
|
|
|
@@ -74,7 +75,7 @@ Update `claude_desktop_config.json` (~/Library/Application Support/Claude/claude
|
|
|
74
75
|
|
|
75
76
|
Then restart claude app.
|
|
76
77
|
|
|
77
|
-
|
|
78
|
+
### [Optional] Computer use support using desktop on docker
|
|
78
79
|
|
|
79
80
|
Computer use is disabled by default. Add `--computer-use` to enable it. This will add necessary tools to Claude including ScreenShot, Mouse and Keyboard control.
|
|
80
81
|
|
|
@@ -123,7 +124,12 @@ Then ask claude to execute shell commands, read files, edit files, run your code
|
|
|
123
124
|
|
|
124
125
|
If you've run the docker for LLM to access, you can ask it to control the "docker os". If you don't provide the docker container id to it, it'll try to search for available docker using `docker ps` command.
|
|
125
126
|
|
|
126
|
-
|
|
127
|
+
|
|
128
|
+
## Chatgpt Setup
|
|
129
|
+
|
|
130
|
+
Read here: https://github.com/rusiaaman/wcgw/blob/main/openai.md
|
|
131
|
+
|
|
132
|
+
## Examples
|
|
127
133
|
|
|
128
134
|
### Computer use example
|
|
129
135
|
|
|
@@ -133,6 +139,7 @@ If you've run the docker for LLM to access, you can ask it to control the "docke
|
|
|
133
139
|
|
|
134
140
|

|
|
135
141
|
|
|
142
|
+
|
|
136
143
|
## [Optional] Local shell access with openai API key or anthropic API key
|
|
137
144
|
|
|
138
145
|
### Openai
|
|
@@ -1,6 +1,7 @@
|
|
|
1
|
-
# Shell and Coding agent
|
|
1
|
+
# Shell and Coding agent for Claude and Chatgpt
|
|
2
2
|
|
|
3
|
-
- An MCP server on claude desktop for autonomous shell, coding and desktop control agent.
|
|
3
|
+
- Claude - An MCP server on claude desktop for autonomous shell, coding and desktop control agent.
|
|
4
|
+
- Chatgpt - Allows custom gpt to talk to your shell via a relay server.
|
|
4
5
|
|
|
5
6
|
[](https://github.com/rusiaaman/wcgw/actions/workflows/python-tests.yml)
|
|
6
7
|
[](https://github.com/rusiaaman/wcgw/actions/workflows/python-types.yml)
|
|
@@ -8,7 +9,7 @@
|
|
|
8
9
|
|
|
9
10
|
## Updates
|
|
10
11
|
|
|
11
|
-
- [01 Dec 2024]
|
|
12
|
+
- [01 Dec 2024] Removed author hosted relay server for chatgpt.
|
|
12
13
|
|
|
13
14
|
- [26 Nov 2024] Introduced claude desktop support through mcp
|
|
14
15
|
|
|
@@ -20,7 +21,7 @@
|
|
|
20
21
|
- ⚡ **Interactive Command Handling**: Supports interactive commands using arrow keys, interrupt, and ansi escape sequences.
|
|
21
22
|
- ⚡ **REPL support**: [beta] Supports python/node and other REPL execution.
|
|
22
23
|
|
|
23
|
-
## Setup
|
|
24
|
+
## Claude Setup
|
|
24
25
|
|
|
25
26
|
Update `claude_desktop_config.json` (~/Library/Application Support/Claude/claude_desktop_config.json)
|
|
26
27
|
|
|
@@ -45,7 +46,7 @@ Update `claude_desktop_config.json` (~/Library/Application Support/Claude/claude
|
|
|
45
46
|
|
|
46
47
|
Then restart claude app.
|
|
47
48
|
|
|
48
|
-
|
|
49
|
+
### [Optional] Computer use support using desktop on docker
|
|
49
50
|
|
|
50
51
|
Computer use is disabled by default. Add `--computer-use` to enable it. This will add necessary tools to Claude including ScreenShot, Mouse and Keyboard control.
|
|
51
52
|
|
|
@@ -94,7 +95,12 @@ Then ask claude to execute shell commands, read files, edit files, run your code
|
|
|
94
95
|
|
|
95
96
|
If you've run the docker for LLM to access, you can ask it to control the "docker os". If you don't provide the docker container id to it, it'll try to search for available docker using `docker ps` command.
|
|
96
97
|
|
|
97
|
-
|
|
98
|
+
|
|
99
|
+
## Chatgpt Setup
|
|
100
|
+
|
|
101
|
+
Read here: https://github.com/rusiaaman/wcgw/blob/main/openai.md
|
|
102
|
+
|
|
103
|
+
## Examples
|
|
98
104
|
|
|
99
105
|
### Computer use example
|
|
100
106
|
|
|
@@ -104,6 +110,7 @@ If you've run the docker for LLM to access, you can ask it to control the "docke
|
|
|
104
110
|
|
|
105
111
|

|
|
106
112
|
|
|
113
|
+
|
|
107
114
|
## [Optional] Local shell access with openai API key or anthropic API key
|
|
108
115
|
|
|
109
116
|
### Openai
|
|
@@ -245,9 +245,17 @@ class ComputerTool:
|
|
|
245
245
|
return self.shell(f"{self.xdotool} key -- {text}")
|
|
246
246
|
elif action == "type":
|
|
247
247
|
results: list[ToolResult] = []
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
248
|
+
all_lines = text.splitlines()
|
|
249
|
+
for i, line in enumerate(all_lines):
|
|
250
|
+
for chunk in chunks(line, TYPING_GROUP_SIZE):
|
|
251
|
+
cmd = f"{self.xdotool} type --delay {TYPING_DELAY_MS} -- {shlex.quote(chunk)}"
|
|
252
|
+
results.append(self.shell(cmd, take_screenshot=False))
|
|
253
|
+
if i < len(all_lines) - 1:
|
|
254
|
+
results.append(
|
|
255
|
+
self.shell(
|
|
256
|
+
f"{self.xdotool} key Return", take_screenshot=False
|
|
257
|
+
)
|
|
258
|
+
)
|
|
251
259
|
screenshot_base64 = self.screenshot().base64_image
|
|
252
260
|
return ToolResult(
|
|
253
261
|
output="".join(result.output or "" for result in results),
|
|
@@ -343,8 +351,9 @@ class ComputerTool:
|
|
|
343
351
|
|
|
344
352
|
def shell(self, command: str, take_screenshot: bool = True) -> ToolResult:
|
|
345
353
|
"""Run a shell command and return the output, error, and optionally a screenshot."""
|
|
354
|
+
escaped_command = shlex.quote(command)
|
|
346
355
|
_, stdout, stderr = command_run(
|
|
347
|
-
f"docker exec {self.docker_image_id}
|
|
356
|
+
f"docker exec {self.docker_image_id} bash -c {escaped_command}",
|
|
348
357
|
)
|
|
349
358
|
base64_image = None
|
|
350
359
|
|
|
@@ -929,9 +929,7 @@ def register_client(server_url: str, client_uuid: str = "") -> None:
|
|
|
929
929
|
client_version = importlib.metadata.version("wcgw")
|
|
930
930
|
websocket.send(client_version)
|
|
931
931
|
|
|
932
|
-
print(
|
|
933
|
-
f"Connected. Share this user id with the chatbot: {client_uuid} \nLink: https://chatgpt.com/g/g-Us0AAXkRh-wcgw-giving-shell-access"
|
|
934
|
-
)
|
|
932
|
+
print(f"Connected. Share this user id with the chatbot: {client_uuid}")
|
|
935
933
|
while True:
|
|
936
934
|
# Wait to receive data from the server
|
|
937
935
|
message = websocket.recv()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|