letswork 2.0.4__tar.gz → 2.0.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. {letswork-2.0.4 → letswork-2.0.6}/PKG-INFO +1 -1
  2. {letswork-2.0.4 → letswork-2.0.6}/letswork/cli.py +15 -8
  3. letswork-2.0.6/letswork/proxy.py +109 -0
  4. letswork-2.0.6/letswork/tunnel.py +52 -0
  5. {letswork-2.0.4 → letswork-2.0.6}/pyproject.toml +1 -1
  6. letswork-2.0.4/letswork/proxy.py +0 -130
  7. letswork-2.0.4/letswork/remote_client.py +0 -115
  8. letswork-2.0.4/letswork/tui/__init__.py +0 -1
  9. letswork-2.0.4/letswork/tui/app.py +0 -354
  10. letswork-2.0.4/letswork/tui/approval_panel.py +0 -78
  11. letswork-2.0.4/letswork/tui/chat.py +0 -54
  12. letswork-2.0.4/letswork/tui/chat_app.py +0 -145
  13. letswork-2.0.4/letswork/tui/file_tree.py +0 -191
  14. letswork-2.0.4/letswork/tui/file_viewer.py +0 -184
  15. letswork-2.0.4/letswork/tunnel.py +0 -37
  16. {letswork-2.0.4 → letswork-2.0.6}/.github/workflows/ci.yml +0 -0
  17. {letswork-2.0.4 → letswork-2.0.6}/.github/workflows/publish.yml +0 -0
  18. {letswork-2.0.4 → letswork-2.0.6}/.gitignore +0 -0
  19. {letswork-2.0.4 → letswork-2.0.6}/README.md +0 -0
  20. {letswork-2.0.4 → letswork-2.0.6}/docs/architecture.md +0 -0
  21. {letswork-2.0.4 → letswork-2.0.6}/docs/spec.md +0 -0
  22. {letswork-2.0.4 → letswork-2.0.6}/docs/tasks.md +0 -0
  23. {letswork-2.0.4 → letswork-2.0.6}/letswork/__init__.py +0 -0
  24. {letswork-2.0.4 → letswork-2.0.6}/letswork/approval.py +0 -0
  25. {letswork-2.0.4 → letswork-2.0.6}/letswork/auth.py +0 -0
  26. {letswork-2.0.4 → letswork-2.0.6}/letswork/events.py +0 -0
  27. {letswork-2.0.4 → letswork-2.0.6}/letswork/filelock.py +0 -0
  28. {letswork-2.0.4 → letswork-2.0.6}/letswork/launcher.py +0 -0
  29. {letswork-2.0.4 → letswork-2.0.6}/letswork/server.py +0 -0
  30. {letswork-2.0.4 → letswork-2.0.6}/server.json +0 -0
  31. {letswork-2.0.4 → letswork-2.0.6}/tests/__init__.py +0 -0
  32. {letswork-2.0.4 → letswork-2.0.6}/tests/test_auth.py +0 -0
  33. {letswork-2.0.4 → letswork-2.0.6}/tests/test_filelock.py +0 -0
  34. {letswork-2.0.4 → letswork-2.0.6}/tests/test_server.py +0 -0
  35. {letswork-2.0.4 → letswork-2.0.6}/tests/test_tunnel.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: letswork
3
- Version: 2.0.4
3
+ Version: 2.0.6
4
4
  Summary: Real-time collaborative coding via MCP — two developers, one codebase
5
5
  Author: Sai Charan Rajoju
6
6
  License-Expression: MIT
@@ -37,10 +37,22 @@ def start(port, debug):
37
37
  server_module.approval_queue = approval_queue
38
38
 
39
39
  def _on_approved(change):
40
+ # Release the lock so the file can be written to again
41
+ rel = os.path.relpath(
42
+ os.path.abspath(os.path.join(project_root, change.path)),
43
+ os.path.abspath(project_root),
44
+ )
45
+ server_module.lock_manager.release_lock(rel, change.user_id)
40
46
  event_log.emit(EventType.FILE_WRITE, change.user_id,
41
47
  {"path": change.path, "status": "approved"})
42
48
 
43
49
  def _on_rejected(change):
50
+ # Release the lock so the guest can retry or move on
51
+ rel = os.path.relpath(
52
+ os.path.abspath(os.path.join(project_root, change.path)),
53
+ os.path.abspath(project_root),
54
+ )
55
+ server_module.lock_manager.release_lock(rel, change.user_id)
44
56
  event_log.emit(EventType.FILE_WRITE, change.user_id,
45
57
  {"path": change.path, "status": "rejected"})
46
58
 
@@ -148,14 +160,9 @@ def start(port, debug):
148
160
  click.echo(f" [{ts}] 🔌 {event.user_id} connected")
149
161
  elif event.event_type == EventType.ERROR:
150
162
  click.echo(f" [{ts}] ⚠️ {event.data.get('error', '?')}")
151
- elif event.event_type == EventType.PING:
152
- click.echo(f" [{ts}] 🏓 {event.user_id} pinged")
153
- if debug:
154
- if event.event_type not in (
155
- EventType.FILE_WRITE, EventType.CONNECTION,
156
- EventType.ERROR, EventType.PING,
157
- ):
158
- click.echo(f" [{ts}] [debug] {event.event_type.value} — {event.data}")
163
+ elif debug:
164
+ # Debug-only events (ping, file reads, locks, etc.)
165
+ click.echo(f" [{ts}] [debug] {event.event_type.value} by {event.user_id} — {event.data}")
159
166
 
160
167
  event_log.on_event(_notify)
161
168
 
@@ -0,0 +1,109 @@
1
+ """
2
+ LetsWork stdio MCP proxy.
3
+
4
+ Claude Code connects to this as a stdio MCP server (reliable, no streaming issues).
5
+ This proxy forwards all tool calls to the host's HTTP MCP server over Cloudflare.
6
+
7
+ Usage (done automatically by `letswork join`):
8
+ claude mcp add letswork -- letswork-proxy --url <URL> --token <TOKEN>
9
+ """
10
+ import sys
11
+ import asyncio
12
+ import argparse
13
+ import httpx
14
+ from mcp.server import Server
15
+ from mcp.server.stdio import stdio_server
16
+ from mcp import types
17
+
18
+
19
+ def make_proxy_server(base_url: str, token: str) -> Server:
20
+ """Create an MCP Server that forwards calls to the remote host."""
21
+ # Ensure URL ends with /mcp
22
+ url = base_url.rstrip("/")
23
+ if not url.endswith("/mcp"):
24
+ url = url + "/mcp"
25
+
26
+ server = Server("letswork-proxy")
27
+
28
+ async def _http_post(payload: dict) -> dict:
29
+ async with httpx.AsyncClient(timeout=30) as client:
30
+ r = await client.post(
31
+ url,
32
+ json=payload,
33
+ headers={
34
+ "Content-Type": "application/json",
35
+ "Accept": "application/json, text/event-stream",
36
+ },
37
+ )
38
+ r.raise_for_status()
39
+ # Parse SSE response: find 'data: {...}' line
40
+ for line in r.text.splitlines():
41
+ if line.startswith("data: "):
42
+ import json
43
+ return json.loads(line[6:])
44
+ raise RuntimeError("No data in response")
45
+
46
+ @server.list_tools()
47
+ async def list_tools() -> list[types.Tool]:
48
+ resp = await _http_post({
49
+ "jsonrpc": "2.0", "id": 1,
50
+ "method": "tools/list", "params": {},
51
+ })
52
+ tools = []
53
+ for t in resp.get("result", {}).get("tools", []):
54
+ schema = t.get("inputSchema", {"type": "object", "properties": {}})
55
+ # Strip 'token' from schema — proxy injects it automatically
56
+ schema = dict(schema)
57
+ props = dict(schema.get("properties", {}))
58
+ props.pop("token", None)
59
+ schema["properties"] = props
60
+ required = [r for r in schema.get("required", []) if r != "token"]
61
+ if required:
62
+ schema["required"] = required
63
+ elif "required" in schema:
64
+ del schema["required"]
65
+ tools.append(types.Tool(
66
+ name=t["name"],
67
+ description=t.get("description", ""),
68
+ inputSchema=schema,
69
+ ))
70
+ return tools
71
+
72
+ @server.call_tool()
73
+ async def call_tool(name: str, arguments: dict) -> list[types.TextContent]:
74
+ # Inject token automatically so guest doesn't have to think about it
75
+ arguments = {**arguments, "token": token}
76
+ resp = await _http_post({
77
+ "jsonrpc": "2.0", "id": 1,
78
+ "method": "tools/call",
79
+ "params": {"name": name, "arguments": arguments},
80
+ })
81
+ result = resp.get("result", {})
82
+ content = result.get("content", [])
83
+ out = []
84
+ for item in content:
85
+ if item.get("type") == "text":
86
+ out.append(types.TextContent(type="text", text=item["text"]))
87
+ if not out:
88
+ out.append(types.TextContent(type="text", text=str(result)))
89
+ return out
90
+
91
+ return server
92
+
93
+
94
+ async def _run(url: str, token: str) -> None:
95
+ server = make_proxy_server(url, token)
96
+ async with stdio_server() as (read_stream, write_stream):
97
+ await server.run(read_stream, write_stream, server.create_initialization_options())
98
+
99
+
100
+ def main() -> None:
101
+ parser = argparse.ArgumentParser(description="LetsWork MCP stdio proxy")
102
+ parser.add_argument("--url", required=True, help="Host MCP URL")
103
+ parser.add_argument("--token", required=True, help="Session token")
104
+ args = parser.parse_args()
105
+ asyncio.run(_run(args.url, args.token))
106
+
107
+
108
+ if __name__ == "__main__":
109
+ main()
@@ -0,0 +1,52 @@
1
+ import subprocess
2
+ import shutil
3
+ import re
4
+ import time
5
+ import threading
6
+
7
+
8
+ def start_tunnel(port: int) -> tuple[str, subprocess.Popen]:
9
+ """Start a Cloudflare tunnel pointing to the local MCP server port. Returns the HTTPS URL and the subprocess handle."""
10
+ if shutil.which("cloudflared") is None:
11
+ raise RuntimeError("cloudflared is not installed. Install it from https://developers.cloudflare.com/cloudflare-one/connections/connect-networks/downloads/")
12
+
13
+ command = ["cloudflared", "tunnel", "--url", f"http://localhost:{port}"]
14
+ process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
15
+
16
+ url_found = threading.Event()
17
+ result: list[str] = []
18
+
19
+ def _read_stderr():
20
+ deadline = time.monotonic() + 60 # 60s timeout
21
+ while time.monotonic() < deadline:
22
+ line = process.stderr.readline()
23
+ if not line:
24
+ if process.poll() is not None:
25
+ break
26
+ continue
27
+ text = line.decode("utf-8", errors="replace").strip()
28
+ match = re.search(r"https://[a-zA-Z0-9\-]+\.trycloudflare\.com", text)
29
+ if match:
30
+ result.append(match.group(0))
31
+ url_found.set()
32
+ return
33
+
34
+ reader = threading.Thread(target=_read_stderr, daemon=True)
35
+ reader.start()
36
+
37
+ if url_found.wait(timeout=60):
38
+ return result[0], process
39
+
40
+ process.terminate()
41
+ if process.poll() is None:
42
+ process.wait(timeout=5)
43
+ raise RuntimeError("Failed to start tunnel: could not find tunnel URL within 60 seconds")
44
+
45
+
46
+ def stop_tunnel(process: subprocess.Popen) -> None:
47
+ """Gracefully terminate the cloudflared subprocess."""
48
+ process.terminate()
49
+ try:
50
+ process.wait(timeout=5)
51
+ except subprocess.TimeoutExpired:
52
+ process.kill()
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "letswork"
7
- version = "2.0.4"
7
+ version = "2.0.6"
8
8
  description = "Real-time collaborative coding via MCP — two developers, one codebase"
9
9
  readme = "README.md"
10
10
  license = "MIT"
@@ -1,130 +0,0 @@
1
- """
2
- LetsWork stdio MCP proxy.
3
-
4
- Claude Code connects to this as a stdio MCP server (reliable, no streaming issues).
5
- This proxy forwards all tool calls to the host's HTTP MCP server using a proper
6
- MCP client session (required by FastMCP's streamable HTTP transport).
7
-
8
- Usage (done automatically by `letswork join`):
9
- claude mcp add letswork -- letswork-proxy --url <URL> --token <TOKEN>
10
- """
11
- import sys
12
- import asyncio
13
- import argparse
14
- import logging
15
- from mcp.server import Server
16
- from mcp.server.stdio import stdio_server
17
- from mcp.client.streamable_http import streamablehttp_client
18
- from mcp import ClientSession, types
19
-
20
- log = logging.getLogger("letswork.proxy")
21
-
22
-
23
- def _setup_logging(debug: bool) -> None:
24
- level = logging.DEBUG if debug else logging.WARNING
25
- logging.basicConfig(
26
- stream=sys.stderr,
27
- level=level,
28
- format="[proxy %(levelname)s] %(message)s",
29
- )
30
-
31
-
32
- def make_proxy_server(base_url: str, token: str) -> tuple:
33
- """Create an MCP Server that forwards calls to the remote host via a proper session."""
34
- url = base_url.rstrip("/")
35
- if not url.endswith("/mcp"):
36
- url = url + "/mcp"
37
-
38
- server = Server("letswork-proxy")
39
- # Shared session state — populated once the client connects
40
- _session: ClientSession | None = None
41
-
42
- async def _get_session() -> ClientSession:
43
- nonlocal _session
44
- if _session is None:
45
- raise RuntimeError("Not connected to host")
46
- return _session
47
-
48
- @server.list_tools()
49
- async def list_tools() -> list[types.Tool]:
50
- session = await _get_session()
51
- result = await session.list_tools()
52
- tools = []
53
- for t in result.tools:
54
- schema = t.inputSchema if t.inputSchema else {"type": "object", "properties": {}}
55
- # Strip 'token' — proxy injects it automatically
56
- schema = dict(schema)
57
- props = dict(schema.get("properties", {}))
58
- props.pop("token", None)
59
- schema["properties"] = props
60
- required = [r for r in schema.get("required", []) if r != "token"]
61
- if required:
62
- schema["required"] = required
63
- elif "required" in schema:
64
- del schema["required"]
65
- tools.append(types.Tool(
66
- name=t.name,
67
- description=t.description or "",
68
- inputSchema=schema,
69
- ))
70
- return tools
71
-
72
- @server.call_tool()
73
- async def call_tool(name: str, arguments: dict) -> list[types.TextContent]:
74
- session = await _get_session()
75
- # Inject token automatically
76
- arguments = {**arguments, "token": token}
77
- log.debug(f"→ tool call: {name}({list(k for k in arguments if k != 'token')})")
78
- try:
79
- result = await session.call_tool(name, arguments)
80
- except Exception as e:
81
- log.error(f"✗ tool call {name} failed: {e}")
82
- raise
83
- out = []
84
- for item in result.content:
85
- if item.type == "text":
86
- out.append(types.TextContent(type="text", text=item.text))
87
- if not out:
88
- out.append(types.TextContent(type="text", text=str(result)))
89
- log.debug(f"← {name} OK")
90
- return out
91
-
92
- async def run(read_stream, write_stream):
93
- nonlocal _session
94
- log.debug(f"Connecting to host at {url}")
95
- try:
96
- async with streamablehttp_client(url) as (host_read, host_write, _):
97
- async with ClientSession(host_read, host_write) as session:
98
- await session.initialize()
99
- _session = session
100
- log.debug("Connected to host MCP server")
101
- await server.run(
102
- read_stream, write_stream,
103
- server.create_initialization_options(),
104
- )
105
- except Exception as e:
106
- log.error(f"Proxy connection failed: {e}")
107
- raise
108
-
109
- return server, run
110
-
111
-
112
- async def _main(url: str, token: str, debug: bool) -> None:
113
- _setup_logging(debug)
114
- log.debug(f"Starting proxy → {url}")
115
- _server, run = make_proxy_server(url, token)
116
- async with stdio_server() as (read_stream, write_stream):
117
- await run(read_stream, write_stream)
118
-
119
-
120
- def main() -> None:
121
- parser = argparse.ArgumentParser(description="LetsWork MCP stdio proxy")
122
- parser.add_argument("--url", required=True, help="Host MCP URL")
123
- parser.add_argument("--token", required=True, help="Session token")
124
- parser.add_argument("--debug", action="store_true", help="Enable debug logging")
125
- args = parser.parse_args()
126
- asyncio.run(_main(args.url, args.token, args.debug))
127
-
128
-
129
- if __name__ == "__main__":
130
- main()
@@ -1,115 +0,0 @@
1
- import asyncio
2
- import threading
3
- from mcp.client.streamable_http import streamablehttp_client
4
- from mcp import ClientSession
5
-
6
- _RECONNECT_DELAYS = [1, 2, 5, 10, 30] # seconds between retries
7
-
8
-
9
- class RemoteClient:
10
- """
11
- Connects to Host MCP server over streamable-http,
12
- exposes sync methods for TUI widgets.
13
- Auto-reconnects if the connection drops.
14
- """
15
- def __init__(self, mcp_url: str, token: str):
16
- self.mcp_url = mcp_url
17
- self.token = token
18
- self._session: ClientSession | None = None
19
- self._loop: asyncio.AbstractEventLoop | None = None
20
- self._thread: threading.Thread | None = None
21
- self._connected = False
22
- self._should_run = False
23
- self._on_reconnect: callable | None = None
24
-
25
- def on_reconnect(self, callback: callable) -> None:
26
- """Register a callback invoked after each successful reconnect."""
27
- self._on_reconnect = callback
28
-
29
- def connect(self) -> bool:
30
- self._should_run = True
31
- loop = asyncio.new_event_loop()
32
- self._loop = loop
33
- ready_event = threading.Event()
34
-
35
- async def _run_loop():
36
- attempt = 0
37
- while self._should_run:
38
- try:
39
- async with streamablehttp_client(self.mcp_url) as (read, write, _):
40
- async with ClientSession(read, write) as session:
41
- await session.initialize()
42
- self._session = session
43
- self._connected = True
44
- attempt = 0
45
- ready_event.set()
46
- if self._on_reconnect:
47
- self._on_reconnect()
48
- while self._connected and self._should_run:
49
- await asyncio.sleep(0.1)
50
- except Exception:
51
- self._connected = False
52
- self._session = None
53
- if not ready_event.is_set():
54
- # First connect failed — signal caller and stop retrying
55
- ready_event.set()
56
- return
57
- # Reconnect with backoff
58
- delay = _RECONNECT_DELAYS[min(attempt, len(_RECONNECT_DELAYS) - 1)]
59
- attempt += 1
60
- await asyncio.sleep(delay)
61
-
62
- def thread_target():
63
- loop.run_until_complete(_run_loop())
64
-
65
- self._thread = threading.Thread(target=thread_target, daemon=True)
66
- self._thread.start()
67
-
68
- ready_event.wait(timeout=10)
69
- return self._connected
70
-
71
- def disconnect(self):
72
- self._should_run = False
73
- self._connected = False
74
- if self._thread:
75
- self._thread.join(timeout=5)
76
- self._session = None
77
- self._loop = None
78
-
79
- def _run_async(self, coro) -> any:
80
- if not self._connected or self._loop is None:
81
- raise RuntimeError("Not connected")
82
- future = asyncio.run_coroutine_threadsafe(coro, self._loop)
83
- return future.result(timeout=15)
84
-
85
- def _call_tool(self, tool_name: str, arguments: dict) -> str:
86
- if not self._connected or self._session is None:
87
- return "Error: not connected"
88
- try:
89
- result = self._run_async(
90
- self._session.call_tool(tool_name, arguments)
91
- )
92
- for item in result.content:
93
- if item.type == "text":
94
- return item.text
95
- return str(result)
96
- except Exception as e:
97
- return f"Error: {e}"
98
-
99
- def list_files(self, path: str = ".") -> str:
100
- return self._call_tool("list_files", {"token": self.token, "path": path})
101
-
102
- def read_file(self, path: str) -> str:
103
- return self._call_tool("read_file", {"token": self.token, "path": path})
104
-
105
- def write_file(self, path: str, content: str) -> str:
106
- return self._call_tool("write_file", {"token": self.token, "path": path, "content": content})
107
-
108
- def lock_file(self, path: str) -> str:
109
- return self._call_tool("lock_file", {"token": self.token, "path": path})
110
-
111
- def unlock_file(self, path: str) -> str:
112
- return self._call_tool("unlock_file", {"token": self.token, "path": path})
113
-
114
- def get_status(self) -> str:
115
- return self._call_tool("get_status", {"token": self.token})
@@ -1 +0,0 @@
1
- """LetsWork TUI dashboard."""