wcgw 1.5.4__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of wcgw might be problematic. Click here for more details.
- wcgw/client/anthropic_client.py +57 -28
- wcgw/client/computer_use.py +1 -1
- wcgw/client/mcp_server/__init__.py +1 -0
- wcgw/client/tools.py +13 -2
- wcgw-2.0.0.dist-info/METADATA +156 -0
- {wcgw-1.5.4.dist-info → wcgw-2.0.0.dist-info}/RECORD +8 -8
- wcgw-1.5.4.dist-info/METADATA +0 -178
- {wcgw-1.5.4.dist-info → wcgw-2.0.0.dist-info}/WHEEL +0 -0
- {wcgw-1.5.4.dist-info → wcgw-2.0.0.dist-info}/entry_points.txt +0 -0
wcgw/client/anthropic_client.py
CHANGED
|
@@ -4,7 +4,7 @@ import mimetypes
|
|
|
4
4
|
from pathlib import Path
|
|
5
5
|
import sys
|
|
6
6
|
import traceback
|
|
7
|
-
from typing import Callable, DefaultDict, Optional, cast
|
|
7
|
+
from typing import Callable, DefaultDict, Optional, cast, Literal
|
|
8
8
|
import anthropic
|
|
9
9
|
from anthropic import Anthropic
|
|
10
10
|
from anthropic.types import (
|
|
@@ -110,7 +110,10 @@ def parse_user_message_special(msg: str) -> MessageParam:
|
|
|
110
110
|
"type": "image",
|
|
111
111
|
"source": {
|
|
112
112
|
"type": "base64",
|
|
113
|
-
"media_type":
|
|
113
|
+
"media_type": cast(
|
|
114
|
+
'Literal["image/jpeg", "image/png", "image/gif", "image/webp"]',
|
|
115
|
+
image_type or "image/png",
|
|
116
|
+
),
|
|
114
117
|
"data": image_b64,
|
|
115
118
|
},
|
|
116
119
|
}
|
|
@@ -360,53 +363,79 @@ System information:
|
|
|
360
363
|
type_ = chunk.type
|
|
361
364
|
if type_ in {"message_start", "message_stop"}:
|
|
362
365
|
continue
|
|
363
|
-
elif type_ == "content_block_start"
|
|
366
|
+
elif type_ == "content_block_start" and hasattr(
|
|
367
|
+
chunk, "content_block"
|
|
368
|
+
):
|
|
364
369
|
content_block = chunk.content_block
|
|
365
|
-
if
|
|
370
|
+
if (
|
|
371
|
+
hasattr(content_block, "type")
|
|
372
|
+
and content_block.type == "text"
|
|
373
|
+
and hasattr(content_block, "text")
|
|
374
|
+
):
|
|
366
375
|
chunk_str = content_block.text
|
|
367
376
|
assistant_console.print(chunk_str, end="")
|
|
368
377
|
full_response += chunk_str
|
|
369
378
|
elif content_block.type == "tool_use":
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
+
if (
|
|
380
|
+
hasattr(content_block, "input")
|
|
381
|
+
and hasattr(content_block, "name")
|
|
382
|
+
and hasattr(content_block, "id")
|
|
383
|
+
):
|
|
384
|
+
assert content_block.input == {}
|
|
385
|
+
tool_calls.append(
|
|
386
|
+
{
|
|
387
|
+
"name": str(content_block.name),
|
|
388
|
+
"input": str(""),
|
|
389
|
+
"done": False,
|
|
390
|
+
"id": str(content_block.id),
|
|
391
|
+
}
|
|
392
|
+
)
|
|
379
393
|
else:
|
|
380
394
|
error_console.log(
|
|
381
395
|
f"Ignoring unknown content block type {content_block.type}"
|
|
382
396
|
)
|
|
383
|
-
elif type_ == "content_block_delta":
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
397
|
+
elif type_ == "content_block_delta" and hasattr(chunk, "delta"):
|
|
398
|
+
delta = chunk.delta
|
|
399
|
+
if hasattr(delta, "type"):
|
|
400
|
+
delta_type = str(delta.type)
|
|
401
|
+
if delta_type == "text_delta" and hasattr(delta, "text"):
|
|
402
|
+
chunk_str = delta.text
|
|
403
|
+
assistant_console.print(chunk_str, end="")
|
|
404
|
+
full_response += chunk_str
|
|
405
|
+
elif delta_type == "input_json_delta" and hasattr(
|
|
406
|
+
delta, "partial_json"
|
|
407
|
+
):
|
|
408
|
+
partial_json = delta.partial_json
|
|
409
|
+
if isinstance(tool_calls[-1]["input"], str):
|
|
410
|
+
tool_calls[-1]["input"] += partial_json
|
|
411
|
+
else:
|
|
412
|
+
error_console.log(
|
|
413
|
+
f"Ignoring unknown content block delta type {delta_type}"
|
|
414
|
+
)
|
|
390
415
|
else:
|
|
391
|
-
|
|
392
|
-
f"Ignoring unknown content block delta type {chunk.delta.type}"
|
|
393
|
-
)
|
|
416
|
+
raise ValueError("Content block delta has no type")
|
|
394
417
|
elif type_ == "content_block_stop":
|
|
395
418
|
if tool_calls and not tool_calls[-1]["done"]:
|
|
396
419
|
tc = tool_calls[-1]
|
|
420
|
+
tool_name = str(tc["name"])
|
|
421
|
+
tool_input = str(tc["input"])
|
|
422
|
+
tool_id = str(tc["id"])
|
|
423
|
+
|
|
397
424
|
tool_parsed = which_tool_name(
|
|
398
|
-
|
|
399
|
-
).model_validate_json(
|
|
425
|
+
tool_name
|
|
426
|
+
).model_validate_json(tool_input)
|
|
427
|
+
|
|
400
428
|
system_console.print(
|
|
401
429
|
f"\n---------------------------------------\n# Assistant invoked tool: {tool_parsed}"
|
|
402
430
|
)
|
|
431
|
+
|
|
403
432
|
_histories.append(
|
|
404
433
|
{
|
|
405
434
|
"role": "assistant",
|
|
406
435
|
"content": [
|
|
407
436
|
ToolUseBlockParam(
|
|
408
|
-
id=
|
|
409
|
-
name=
|
|
437
|
+
id=tool_id,
|
|
438
|
+
name=tool_name,
|
|
410
439
|
input=tool_parsed.model_dump(),
|
|
411
440
|
type="tool_use",
|
|
412
441
|
)
|
|
@@ -458,7 +487,7 @@ System information:
|
|
|
458
487
|
tool_results.append(
|
|
459
488
|
ToolResultBlockParam(
|
|
460
489
|
type="tool_result",
|
|
461
|
-
tool_use_id=tc["id"],
|
|
490
|
+
tool_use_id=str(tc["id"]),
|
|
462
491
|
content=tool_results_content,
|
|
463
492
|
)
|
|
464
493
|
)
|
wcgw/client/computer_use.py
CHANGED
|
@@ -161,7 +161,7 @@ class ComputerTool:
|
|
|
161
161
|
assert not result.error, result.error
|
|
162
162
|
assert result.output, "Could not get screen info"
|
|
163
163
|
width, height, display_num = map(
|
|
164
|
-
lambda x: None if not x else int(x), result.output.split(",")
|
|
164
|
+
lambda x: None if not x else int(x), result.output.strip().split(",")
|
|
165
165
|
)
|
|
166
166
|
if width is None:
|
|
167
167
|
width = 1080
|
wcgw/client/tools.py
CHANGED
|
@@ -962,7 +962,7 @@ run = Typer(pretty_exceptions_show_locals=False, no_args_is_help=True)
|
|
|
962
962
|
|
|
963
963
|
@run.command()
|
|
964
964
|
def app(
|
|
965
|
-
server_url: str = "
|
|
965
|
+
server_url: str = "",
|
|
966
966
|
client_uuid: Optional[str] = None,
|
|
967
967
|
version: bool = typer.Option(False, "--version", "-v"),
|
|
968
968
|
) -> None:
|
|
@@ -970,7 +970,18 @@ def app(
|
|
|
970
970
|
version_ = importlib.metadata.version("wcgw")
|
|
971
971
|
print(f"wcgw version: {version_}")
|
|
972
972
|
exit()
|
|
973
|
-
|
|
973
|
+
if not server_url:
|
|
974
|
+
server_url = os.environ.get("WCGW_RELAY_SERVER", "")
|
|
975
|
+
if not server_url:
|
|
976
|
+
print(
|
|
977
|
+
"Error: Please provide relay server url using --server_url or WCGW_RELAY_SERVER environment variable"
|
|
978
|
+
)
|
|
979
|
+
print(
|
|
980
|
+
"\tNOTE: you need to run a relay server first, author doesn't host a relay server anymore."
|
|
981
|
+
)
|
|
982
|
+
print("\thttps://github.com/rusiaaman/wcgw/blob/main/openai.md")
|
|
983
|
+
print("\tExample `--server-url=ws://localhost:8000/v1/register`")
|
|
984
|
+
raise typer.Exit(1)
|
|
974
985
|
register_client(server_url, client_uuid or "")
|
|
975
986
|
|
|
976
987
|
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: wcgw
|
|
3
|
+
Version: 2.0.0
|
|
4
|
+
Summary: What could go wrong giving full shell access to chatgpt?
|
|
5
|
+
Project-URL: Homepage, https://github.com/rusiaaman/wcgw
|
|
6
|
+
Author-email: Aman Rusia <gapypi@arcfu.com>
|
|
7
|
+
Requires-Python: <3.13,>=3.11
|
|
8
|
+
Requires-Dist: anthropic>=0.39.0
|
|
9
|
+
Requires-Dist: fastapi>=0.115.0
|
|
10
|
+
Requires-Dist: mcp
|
|
11
|
+
Requires-Dist: mypy>=1.11.2
|
|
12
|
+
Requires-Dist: nltk>=3.9.1
|
|
13
|
+
Requires-Dist: openai>=1.46.0
|
|
14
|
+
Requires-Dist: petname>=2.6
|
|
15
|
+
Requires-Dist: pexpect>=4.9.0
|
|
16
|
+
Requires-Dist: pydantic>=2.9.2
|
|
17
|
+
Requires-Dist: pyte>=0.8.2
|
|
18
|
+
Requires-Dist: python-dotenv>=1.0.1
|
|
19
|
+
Requires-Dist: rich>=13.8.1
|
|
20
|
+
Requires-Dist: semantic-version>=2.10.0
|
|
21
|
+
Requires-Dist: shell>=1.0.1
|
|
22
|
+
Requires-Dist: tiktoken==0.7.0
|
|
23
|
+
Requires-Dist: toml>=0.10.2
|
|
24
|
+
Requires-Dist: typer>=0.12.5
|
|
25
|
+
Requires-Dist: types-pexpect>=4.9.0.20240806
|
|
26
|
+
Requires-Dist: uvicorn>=0.31.0
|
|
27
|
+
Requires-Dist: websockets>=13.1
|
|
28
|
+
Description-Content-Type: text/markdown
|
|
29
|
+
|
|
30
|
+
# Shell and Coding agent on Claude desktop app
|
|
31
|
+
|
|
32
|
+
- An MCP server on claude desktop for autonomous shell, coding and desktop control agent.
|
|
33
|
+
|
|
34
|
+
[](https://github.com/rusiaaman/wcgw/actions/workflows/python-tests.yml)
|
|
35
|
+
[](https://github.com/rusiaaman/wcgw/actions/workflows/python-types.yml)
|
|
36
|
+
[](https://github.com/rusiaaman/wcgw/actions/workflows/python-publish.yml)
|
|
37
|
+
|
|
38
|
+
## Updates
|
|
39
|
+
|
|
40
|
+
- [01 Dec 2024] Deprecated chatgpt app support
|
|
41
|
+
|
|
42
|
+
- [26 Nov 2024] Introduced claude desktop support through mcp
|
|
43
|
+
|
|
44
|
+
## 🚀 Highlights
|
|
45
|
+
|
|
46
|
+
- ⚡ **Full Shell Access**: No restrictions, complete control.
|
|
47
|
+
- ⚡ **Desktop control on Claude**: Screen capture, mouse control, keyboard control on claude desktop (on mac with docker linux)
|
|
48
|
+
- ⚡ **Create, Execute, Iterate**: Ask claude to keep running compiler checks till all errors are fixed, or ask it to keep checking for the status of a long running command till it's done.
|
|
49
|
+
- ⚡ **Interactive Command Handling**: Supports interactive commands using arrow keys, interrupt, and ansi escape sequences.
|
|
50
|
+
- ⚡ **REPL support**: [beta] Supports python/node and other REPL execution.
|
|
51
|
+
|
|
52
|
+
## Setup
|
|
53
|
+
|
|
54
|
+
Update `claude_desktop_config.json` (~/Library/Application Support/Claude/claude_desktop_config.json)
|
|
55
|
+
|
|
56
|
+
```json
|
|
57
|
+
{
|
|
58
|
+
"mcpServers": {
|
|
59
|
+
"wcgw": {
|
|
60
|
+
"command": "uv",
|
|
61
|
+
"args": [
|
|
62
|
+
"tool",
|
|
63
|
+
"run",
|
|
64
|
+
"--from",
|
|
65
|
+
"wcgw@latest",
|
|
66
|
+
"--python",
|
|
67
|
+
"3.12",
|
|
68
|
+
"wcgw_mcp"
|
|
69
|
+
]
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
Then restart claude app.
|
|
76
|
+
|
|
77
|
+
## [Optional] Computer use support using desktop on docker
|
|
78
|
+
|
|
79
|
+
Computer use is disabled by default. Add `--computer-use` to enable it. This will add necessary tools to Claude including ScreenShot, Mouse and Keyboard control.
|
|
80
|
+
|
|
81
|
+
```json
|
|
82
|
+
{
|
|
83
|
+
"mcpServers": {
|
|
84
|
+
"wcgw": {
|
|
85
|
+
"command": "uv",
|
|
86
|
+
"args": [
|
|
87
|
+
"tool",
|
|
88
|
+
"run",
|
|
89
|
+
"--from",
|
|
90
|
+
"wcgw@latest",
|
|
91
|
+
"--python",
|
|
92
|
+
"3.12",
|
|
93
|
+
"wcgw_mcp",
|
|
94
|
+
"--computer-use"
|
|
95
|
+
]
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
Claude will be able to connect to any docker container with linux environment. Native system control isn't supported outside docker.
|
|
102
|
+
|
|
103
|
+
You'll need to run a docker image with desktop and optional VNC connection. Here's a demo image:
|
|
104
|
+
|
|
105
|
+
```sh
|
|
106
|
+
docker run -p 6080:6080 ghcr.io/anthropics/anthropic-quickstarts:computer-use-demo-latest
|
|
107
|
+
```
|
|
108
|
+
|
|
109
|
+
Then ask claude desktop app to control the docker os. It'll connect to the docker container and control it.
|
|
110
|
+
|
|
111
|
+
Connect to `http://localhost:6080/vnc.html` for desktop view (VNC) of the system running in the docker.
|
|
112
|
+
|
|
113
|
+
## Usage
|
|
114
|
+
|
|
115
|
+
Wait for a few seconds. You should be able to see this icon if everything goes right.
|
|
116
|
+
|
|
117
|
+

|
|
118
|
+
over here
|
|
119
|
+
|
|
120
|
+

|
|
121
|
+
|
|
122
|
+
Then ask claude to execute shell commands, read files, edit files, run your code, etc.
|
|
123
|
+
|
|
124
|
+
If you've run the docker for LLM to access, you can ask it to control the "docker os". If you don't provide the docker container id to it, it'll try to search for available docker using `docker ps` command.
|
|
125
|
+
|
|
126
|
+
## Example
|
|
127
|
+
|
|
128
|
+
### Computer use example
|
|
129
|
+
|
|
130
|
+

|
|
131
|
+
|
|
132
|
+
### Shell example
|
|
133
|
+
|
|
134
|
+

|
|
135
|
+
|
|
136
|
+
## [Optional] Local shell access with openai API key or anthropic API key
|
|
137
|
+
|
|
138
|
+
### Openai
|
|
139
|
+
|
|
140
|
+
Add `OPENAI_API_KEY` and `OPENAI_ORG_ID` env variables.
|
|
141
|
+
|
|
142
|
+
Then run
|
|
143
|
+
|
|
144
|
+
`uvx --from wcgw@latest wcgw_local --limit 0.1` # Cost limit $0.1
|
|
145
|
+
|
|
146
|
+
You can now directly write messages or press enter key to open vim for multiline message and text pasting.
|
|
147
|
+
|
|
148
|
+
### Anthropic
|
|
149
|
+
|
|
150
|
+
Add `ANTHROPIC_API_KEY` env variable.
|
|
151
|
+
|
|
152
|
+
Then run
|
|
153
|
+
|
|
154
|
+
`uvx --from wcgw@latest wcgw_local --claude`
|
|
155
|
+
|
|
156
|
+
You can now directly write messages or press enter key to open vim for multiline message and text pasting.
|
|
@@ -2,21 +2,21 @@ wcgw/__init__.py,sha256=9K2QW7QuSLhMTVbKbBYd9UUp-ZyrfBrxcjuD_xk458k,118
|
|
|
2
2
|
wcgw/types_.py,sha256=rDz4olJS2zvYC13jzeOppA2tci-tVDyWAqeA5BesAaU,1773
|
|
3
3
|
wcgw/client/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
4
|
wcgw/client/__main__.py,sha256=wcCrL4PjG51r5wVKqJhcoJPTLfHW0wNbD31DrUN0MWI,28
|
|
5
|
-
wcgw/client/anthropic_client.py,sha256=
|
|
5
|
+
wcgw/client/anthropic_client.py,sha256=yhFavV51c7zl2AqWQS2i-4KtLh4BRI-odFlppg5sIDY,19965
|
|
6
6
|
wcgw/client/cli.py,sha256=-z0kpDAW3mzfQrQeZfaVJhBCAQY3HXnt9GdgQ8s-u0Y,1003
|
|
7
7
|
wcgw/client/common.py,sha256=grH-yV_4tnTQZ29xExn4YicGLxEq98z-HkEZwH0ReSg,1410
|
|
8
|
-
wcgw/client/computer_use.py,sha256=
|
|
8
|
+
wcgw/client/computer_use.py,sha256=7m_EMdyvJXZz5L0sVJA9zN2pQNtTkiQE__ie3qsvfG8,14878
|
|
9
9
|
wcgw/client/diff-instructions.txt,sha256=s5AJKG23JsjwRYhFZFQVvwDpF67vElawrmdXwvukR1A,1683
|
|
10
10
|
wcgw/client/openai_client.py,sha256=F5TEv5DhU9twsywSZGtuVkPo6xVaaoaEjvIh88FnIUQ,17780
|
|
11
11
|
wcgw/client/openai_utils.py,sha256=YNwCsA-Wqq7jWrxP0rfQmBTb1dI0s7dWXzQqyTzOZT4,2629
|
|
12
12
|
wcgw/client/sys_utils.py,sha256=GajPntKhaTUMn6EOmopENWZNR2G_BJyuVbuot0x6veI,1376
|
|
13
|
-
wcgw/client/tools.py,sha256=
|
|
13
|
+
wcgw/client/tools.py,sha256=DQ5oRhaXkNDMQccbM59tyMjL76VIyihM5rgkvN-_2B4,33237
|
|
14
14
|
wcgw/client/mcp_server/Readme.md,sha256=I8N4dHkTUVGNQ63BQkBMBhCCBTgqGOSF_pUR6iOEiUk,2495
|
|
15
|
-
wcgw/client/mcp_server/__init__.py,sha256=
|
|
15
|
+
wcgw/client/mcp_server/__init__.py,sha256=hyPPwO9cabAJsOMWhKyat9yl7OlSmIobaoAZKHu3DMc,381
|
|
16
16
|
wcgw/client/mcp_server/server.py,sha256=M9pJ3DktGsxf6cufXbZ0xxs0HIKNLGc75O_biV2UKYA,10571
|
|
17
17
|
wcgw/relay/serve.py,sha256=RUcUeyL4Xt0EEo12Ul6VQjb4tRle4uIdsa85v7XXxEw,8771
|
|
18
18
|
wcgw/relay/static/privacy.txt,sha256=s9qBdbx2SexCpC_z33sg16TptmAwDEehMCLz4L50JLc,529
|
|
19
|
-
wcgw-
|
|
20
|
-
wcgw-
|
|
21
|
-
wcgw-
|
|
22
|
-
wcgw-
|
|
19
|
+
wcgw-2.0.0.dist-info/METADATA,sha256=oJwWfTlEZnRw470t40fgZMdVRfS5qSstxBgDSISWEb4,5054
|
|
20
|
+
wcgw-2.0.0.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
|
|
21
|
+
wcgw-2.0.0.dist-info/entry_points.txt,sha256=eKo1omwbAggWlQ0l7GKoR7uV1-j16nk9tK0BhC2Oz_E,120
|
|
22
|
+
wcgw-2.0.0.dist-info/RECORD,,
|
wcgw-1.5.4.dist-info/METADATA
DELETED
|
@@ -1,178 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.3
|
|
2
|
-
Name: wcgw
|
|
3
|
-
Version: 1.5.4
|
|
4
|
-
Summary: What could go wrong giving full shell access to chatgpt?
|
|
5
|
-
Project-URL: Homepage, https://github.com/rusiaaman/wcgw
|
|
6
|
-
Author-email: Aman Rusia <gapypi@arcfu.com>
|
|
7
|
-
Requires-Python: <3.13,>=3.11
|
|
8
|
-
Requires-Dist: anthropic>=0.39.0
|
|
9
|
-
Requires-Dist: fastapi>=0.115.0
|
|
10
|
-
Requires-Dist: mcp
|
|
11
|
-
Requires-Dist: mypy>=1.11.2
|
|
12
|
-
Requires-Dist: nltk>=3.9.1
|
|
13
|
-
Requires-Dist: openai>=1.46.0
|
|
14
|
-
Requires-Dist: petname>=2.6
|
|
15
|
-
Requires-Dist: pexpect>=4.9.0
|
|
16
|
-
Requires-Dist: pydantic>=2.9.2
|
|
17
|
-
Requires-Dist: pyte>=0.8.2
|
|
18
|
-
Requires-Dist: python-dotenv>=1.0.1
|
|
19
|
-
Requires-Dist: rich>=13.8.1
|
|
20
|
-
Requires-Dist: semantic-version>=2.10.0
|
|
21
|
-
Requires-Dist: shell>=1.0.1
|
|
22
|
-
Requires-Dist: tiktoken==0.7.0
|
|
23
|
-
Requires-Dist: toml>=0.10.2
|
|
24
|
-
Requires-Dist: typer>=0.12.5
|
|
25
|
-
Requires-Dist: types-pexpect>=4.9.0.20240806
|
|
26
|
-
Requires-Dist: uvicorn>=0.31.0
|
|
27
|
-
Requires-Dist: websockets>=13.1
|
|
28
|
-
Description-Content-Type: text/markdown
|
|
29
|
-
|
|
30
|
-
# Shell and Coding agent on Chatgpt and Claude desktop apps
|
|
31
|
-
|
|
32
|
-
- An MCP server on claude desktop for autonomous shell, coding and desktop control agent.
|
|
33
|
-
- A custom gpt on chatgpt web/desktop apps to interact with your local shell, edit files, run code, etc.
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
[](https://github.com/rusiaaman/wcgw/actions/workflows/python-tests.yml)
|
|
37
|
-
[](https://github.com/rusiaaman/wcgw/actions/workflows/python-publish.yml)
|
|
38
|
-
|
|
39
|
-
[New feature] [26-Nov-2024] Claude desktop support for shell, computer-control, coding agent.
|
|
40
|
-
[src/wcgw/client/mcp_server/Readme.md](src/wcgw/client/mcp_server/Readme.md)
|
|
41
|
-
|
|
42
|
-
### 🚀 Highlights
|
|
43
|
-
|
|
44
|
-
- ⚡ **Full Shell Access**: No restrictions, complete control.
|
|
45
|
-
- ⚡ **Desktop control on Claude**: Screen capture, mouse control, keyboard control on claude desktop (on mac with docker linux)
|
|
46
|
-
- ⚡ **Create, Execute, Iterate**: Ask the gpt to keep running compiler checks till all errors are fixed, or ask it to keep checking for the status of a long running command till it's done.
|
|
47
|
-
- ⚡ **Interactive Command Handling**: Supports interactive commands using arrow keys, interrupt, and ansi escape sequences.
|
|
48
|
-
- ⚡ **REPL support**: [beta] Supports python/node and other REPL execution.
|
|
49
|
-
|
|
50
|
-
## Claude
|
|
51
|
-
Full readme [src/wcgw/client/mcp_server/Readme.md](src/wcgw/client/mcp_server/Readme.md)
|
|
52
|
-
### Setup
|
|
53
|
-
|
|
54
|
-
Update `claude_desktop_config.json`
|
|
55
|
-
|
|
56
|
-
```json
|
|
57
|
-
{
|
|
58
|
-
"mcpServers": {
|
|
59
|
-
"wcgw": {
|
|
60
|
-
"command": "uvx",
|
|
61
|
-
"args": ["--from", "wcgw@latest", "wcgw_mcp"]
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
```
|
|
66
|
-
|
|
67
|
-
Then restart claude app.
|
|
68
|
-
You can then ask claude to execute shell commands, read files, edit files, run your code, etc.
|
|
69
|
-
|
|
70
|
-
## ChatGPT
|
|
71
|
-
|
|
72
|
-
### 🪜 Steps:
|
|
73
|
-
|
|
74
|
-
1. Run the [cli client](https://github.com/rusiaaman/wcgw?tab=readme-ov-file#client) in any directory of choice.
|
|
75
|
-
2. Share the generated id with this GPT: `https://chatgpt.com/g/g-Us0AAXkRh-wcgw-giving-shell-access`
|
|
76
|
-
3. The custom GPT can now run any command on your cli
|
|
77
|
-
|
|
78
|
-
### Client
|
|
79
|
-
|
|
80
|
-
You need to keep running this client for GPT to access your shell. Run it in a version controlled project's root.
|
|
81
|
-
|
|
82
|
-
#### Option 1: using uv [Recommended]
|
|
83
|
-
|
|
84
|
-
```sh
|
|
85
|
-
$ curl -LsSf https://astral.sh/uv/install.sh | sh
|
|
86
|
-
$ uvx wcgw@latest
|
|
87
|
-
```
|
|
88
|
-
|
|
89
|
-
#### Option 2: using pip
|
|
90
|
-
|
|
91
|
-
Supports python >=3.10 and <3.13
|
|
92
|
-
|
|
93
|
-
```sh
|
|
94
|
-
$ pip3 install wcgw
|
|
95
|
-
$ wcgw
|
|
96
|
-
```
|
|
97
|
-
|
|
98
|
-
This will print a UUID that you need to share with the gpt.
|
|
99
|
-
|
|
100
|
-
### Chat
|
|
101
|
-
|
|
102
|
-
Open the following link or search the "wcgw" custom gpt using "Explore GPTs" on chatgpt.com
|
|
103
|
-
|
|
104
|
-
https://chatgpt.com/g/g-Us0AAXkRh-wcgw-giving-shell-access
|
|
105
|
-
|
|
106
|
-
Finally, let the chatgpt know your user id in any format. E.g., "user_id=<your uuid>" followed by rest of your instructions.
|
|
107
|
-
|
|
108
|
-
NOTE: you can resume a broken connection
|
|
109
|
-
`wcgw --client-uuid $previous_uuid`
|
|
110
|
-
|
|
111
|
-
### How it works on chatgpt app?
|
|
112
|
-
|
|
113
|
-
Your commands are relayed through a server to the terminal client. [You could host the server on your own](https://github.com/rusiaaman/wcgw?tab=readme-ov-file#creating-your-own-custom-gpt-and-the-relay-server). For public convenience I've hosted one at https://wcgw.arcfu.com thanks to the gcloud free tier plan.
|
|
114
|
-
|
|
115
|
-
Chatgpt sends a request to the relay server using the user id that you share with it. The relay server holds a websocket with the terminal client against the user id and acts as a proxy to pass the request.
|
|
116
|
-
|
|
117
|
-
It's secure in both the directions. Either a malicious actor or a malicious Chatgpt has to correctly guess your UUID for any security breach.
|
|
118
|
-
|
|
119
|
-
# Showcase
|
|
120
|
-
|
|
121
|
-
## Claude desktop
|
|
122
|
-
|
|
123
|
-
### Resize image and move it to a new dir
|
|
124
|
-
|
|
125
|
-

|
|
126
|
-
|
|
127
|
-
## Chatgpt app
|
|
128
|
-
|
|
129
|
-
### Unit tests and github actions
|
|
130
|
-
|
|
131
|
-
[The first version of unit tests and github workflow to test on multiple python versions were written by the custom chatgpt](https://chatgpt.com/share/6717f922-8998-8005-b825-45d4b348b4dd)
|
|
132
|
-
|
|
133
|
-
### Create a todo app using react + typescript + vite
|
|
134
|
-
|
|
135
|
-

|
|
136
|
-
|
|
137
|
-
# Privacy
|
|
138
|
-
|
|
139
|
-
The relay server doesn't store any data. I can't access any information passing through it and only secure channels are used to communicate.
|
|
140
|
-
|
|
141
|
-
You may host the server on your own and create a custom gpt using the following section.
|
|
142
|
-
|
|
143
|
-
# Creating your own custom gpt and the relay server.
|
|
144
|
-
|
|
145
|
-
I've used the following instructions and action json schema to create the custom GPT. (Replace wcgw.arcfu.com with the address to your server)
|
|
146
|
-
|
|
147
|
-
https://github.com/rusiaaman/wcgw/blob/main/gpt_instructions.txt
|
|
148
|
-
https://github.com/rusiaaman/wcgw/blob/main/gpt_action_json_schema.json
|
|
149
|
-
|
|
150
|
-
Run the server
|
|
151
|
-
`gunicorn --worker-class uvicorn.workers.UvicornWorker --bind 0.0.0.0:443 src.wcgw.relay.serve:app --certfile fullchain.pem --keyfile privkey.pem`
|
|
152
|
-
|
|
153
|
-
If you don't have public ip and domain name, you can use `ngrok` or similar services to get a https address to the api.
|
|
154
|
-
|
|
155
|
-
The specify the server url in the `wcgw` command like so
|
|
156
|
-
`wcgw --server-url https://your-url/v1/register`
|
|
157
|
-
|
|
158
|
-
# [Optional] Local shell access with openai API key or anthropic API key
|
|
159
|
-
|
|
160
|
-
## Openai
|
|
161
|
-
|
|
162
|
-
Add `OPENAI_API_KEY` and `OPENAI_ORG_ID` env variables.
|
|
163
|
-
|
|
164
|
-
Then run
|
|
165
|
-
|
|
166
|
-
`uvx --from wcgw@latest wcgw_local --limit 0.1` # Cost limit $0.1
|
|
167
|
-
|
|
168
|
-
You can now directly write messages or press enter key to open vim for multiline message and text pasting.
|
|
169
|
-
|
|
170
|
-
## Anthropic
|
|
171
|
-
|
|
172
|
-
Add `ANTHROPIC_API_KEY` env variable.
|
|
173
|
-
|
|
174
|
-
Then run
|
|
175
|
-
|
|
176
|
-
`uvx --from wcgw@latest wcgw_local --claude`
|
|
177
|
-
|
|
178
|
-
You can now directly write messages or press enter key to open vim for multiline message and text pasting.
|
|
File without changes
|
|
File without changes
|