mle-kit-mcp 0.2.3__py3-none-any.whl → 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mle_kit_mcp/llm_proxy.py +0 -10
- mle_kit_mcp/tools/llm_proxy.py +8 -20
- mle_kit_mcp/tools/text_editor.py +2 -2
- {mle_kit_mcp-0.2.3.dist-info → mle_kit_mcp-0.2.5.dist-info}/METADATA +1 -1
- {mle_kit_mcp-0.2.3.dist-info → mle_kit_mcp-0.2.5.dist-info}/RECORD +9 -9
- {mle_kit_mcp-0.2.3.dist-info → mle_kit_mcp-0.2.5.dist-info}/WHEEL +0 -0
- {mle_kit_mcp-0.2.3.dist-info → mle_kit_mcp-0.2.5.dist-info}/entry_points.txt +0 -0
- {mle_kit_mcp-0.2.3.dist-info → mle_kit_mcp-0.2.5.dist-info}/licenses/LICENSE +0 -0
- {mle_kit_mcp-0.2.3.dist-info → mle_kit_mcp-0.2.5.dist-info}/top_level.txt +0 -0
mle_kit_mcp/llm_proxy.py
CHANGED
@@ -6,7 +6,6 @@ from openai import AsyncOpenAI
|
|
6
6
|
from fastapi import FastAPI, Request, HTTPException
|
7
7
|
from fastapi.responses import JSONResponse
|
8
8
|
|
9
|
-
ACCESS_TOKEN = os.getenv("ACCESS_TOKEN", "")
|
10
9
|
OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY", "")
|
11
10
|
OPENROUTER_BASE_URL = os.getenv("OPENROUTER_BASE_URL", "https://openrouter.ai/api/v1")
|
12
11
|
|
@@ -14,17 +13,8 @@ app = FastAPI()
|
|
14
13
|
client = AsyncOpenAI(base_url=OPENROUTER_BASE_URL, api_key=OPENROUTER_API_KEY)
|
15
14
|
|
16
15
|
|
17
|
-
def _check_auth(request: Request) -> None:
|
18
|
-
auth = request.headers.get("authorization", "")
|
19
|
-
if not ACCESS_TOKEN or auth != f"Bearer {ACCESS_TOKEN}":
|
20
|
-
raise HTTPException(status_code=401, detail="Unauthorized")
|
21
|
-
if not OPENROUTER_API_KEY:
|
22
|
-
raise HTTPException(status_code=500, detail="OpenRouter key not configured")
|
23
|
-
|
24
|
-
|
25
16
|
@app.post("/v1/chat/completions")
|
26
17
|
async def chat_completions(request: Request) -> JSONResponse:
|
27
|
-
_check_auth(request)
|
28
18
|
payload = await request.json()
|
29
19
|
if isinstance(payload, dict) and payload.get("stream"):
|
30
20
|
payload.pop("stream", None)
|
mle_kit_mcp/tools/llm_proxy.py
CHANGED
@@ -2,15 +2,12 @@ import os
|
|
2
2
|
import json
|
3
3
|
import time
|
4
4
|
import random
|
5
|
-
import secrets
|
6
5
|
from pathlib import Path
|
7
|
-
from typing import Optional
|
8
6
|
|
9
7
|
from dotenv import load_dotenv
|
10
8
|
|
11
9
|
from mle_kit_mcp.tools.bash import get_container
|
12
10
|
from mle_kit_mcp.files import get_workspace_dir
|
13
|
-
from mle_kit_mcp.utils import find_free_port
|
14
11
|
|
15
12
|
from mle_kit_mcp.tools.remote_gpu import (
|
16
13
|
get_instance as _remote_get_instance,
|
@@ -30,13 +27,12 @@ def _write_proxy_script(script_path: Path) -> None:
|
|
30
27
|
script_path.write_text(script)
|
31
28
|
|
32
29
|
|
33
|
-
def llm_proxy_local(
|
30
|
+
def llm_proxy_local() -> str:
|
34
31
|
"""
|
35
32
|
Start a lightweight OpenRouter proxy inside the same Docker container used by the "bash" tool.
|
36
33
|
|
37
|
-
Returns a JSON string with
|
34
|
+
Returns a JSON string with url and scope.
|
38
35
|
The url is reachable from inside the "bash" container as localhost.
|
39
|
-
Use the token in the Authorization header: "Bearer <token>" when calling the proxy.
|
40
36
|
It runs a standard OpenAI compatible server, so you can use it with any OpenAI compatible client.
|
41
37
|
You can use all models available on OpenRouter, for instance:
|
42
38
|
- openai/gpt-5-mini
|
@@ -57,10 +53,9 @@ def llm_proxy_local(port: Optional[int] = None) -> str:
|
|
57
53
|
dependencies_cmd = f"python -m pip install --quiet --no-input {DEPENDENCIES}"
|
58
54
|
container.exec_run(["bash", "-lc", dependencies_cmd])
|
59
55
|
|
60
|
-
chosen_port =
|
61
|
-
token = secrets.token_urlsafe(24)
|
56
|
+
chosen_port = random.randint(5000, 6000)
|
62
57
|
launch_cmd = (
|
63
|
-
f"OPENROUTER_API_KEY='{api_key}'
|
58
|
+
f"OPENROUTER_API_KEY='{api_key}' "
|
64
59
|
f"nohup python {OUTPUT_SCRIPT_FILE_NAME} "
|
65
60
|
f"--host 127.0.0.1 --port {chosen_port} "
|
66
61
|
f"> llm_proxy.log 2>&1 "
|
@@ -81,27 +76,22 @@ def llm_proxy_local(port: Optional[int] = None) -> str:
|
|
81
76
|
return json.dumps(
|
82
77
|
{
|
83
78
|
"url": f"http://127.0.0.1:{chosen_port}/v1/chat/completions",
|
84
|
-
"token": token,
|
85
79
|
"scope": "bash-container",
|
86
80
|
}
|
87
81
|
)
|
88
82
|
|
89
83
|
|
90
|
-
def llm_proxy_remote(
|
84
|
+
def llm_proxy_remote() -> str:
|
91
85
|
"""
|
92
86
|
Start a lightweight OpenRouter proxy on the remote GPU machine.
|
93
87
|
|
94
|
-
Returns a JSON string with
|
88
|
+
Returns a JSON string with url and scope.
|
95
89
|
The url is reachable from inside the remote machine as localhost.
|
96
|
-
Use the token in the Authorization header: "Bearer <token>" when calling the proxy.
|
97
90
|
It runs a standard OpenAI compatible server, so you can use it with any OpenAI compatible client.
|
98
91
|
You can use all models available on OpenRouter, for instance:
|
99
92
|
- openai/gpt-5-mini
|
100
93
|
- google/gemini-2.5-pro
|
101
94
|
- anthropic/claude-sonnet-4
|
102
|
-
|
103
|
-
Args:
|
104
|
-
port: Optional fixed port to bind on the remote. Random if omitted.
|
105
95
|
"""
|
106
96
|
|
107
97
|
load_dotenv()
|
@@ -113,13 +103,12 @@ def llm_proxy_remote(port: Optional[int] = None) -> str:
|
|
113
103
|
_write_proxy_script(script_path)
|
114
104
|
_remote_send_rsync(instance, f"{script_path}", "/root")
|
115
105
|
|
116
|
-
chosen_port =
|
117
|
-
token = secrets.token_urlsafe(24)
|
106
|
+
chosen_port = random.randint(5000, 6000)
|
118
107
|
dependencies_cmd = f"python3 -m pip install -q --no-input {DEPENDENCIES}"
|
119
108
|
_remote_run_command(instance, dependencies_cmd, timeout=300)
|
120
109
|
|
121
110
|
launch_cmd = (
|
122
|
-
f"OPENROUTER_API_KEY='{api_key}'
|
111
|
+
f"OPENROUTER_API_KEY='{api_key}' "
|
123
112
|
f"nohup python {OUTPUT_SCRIPT_FILE_NAME} "
|
124
113
|
f"--host 127.0.0.1 --port {chosen_port} "
|
125
114
|
f"> openrouter_proxy.log 2>&1 "
|
@@ -140,7 +129,6 @@ def llm_proxy_remote(port: Optional[int] = None) -> str:
|
|
140
129
|
return json.dumps(
|
141
130
|
{
|
142
131
|
"url": f"http://127.0.0.1:{chosen_port}/v1/chat/completions",
|
143
|
-
"token": token,
|
144
132
|
"scope": "remote-gpu",
|
145
133
|
}
|
146
134
|
)
|
mle_kit_mcp/tools/text_editor.py
CHANGED
@@ -5,8 +5,8 @@ from pathlib import Path
|
|
5
5
|
from mle_kit_mcp.files import get_workspace_dir
|
6
6
|
from mle_kit_mcp.utils import truncate_content
|
7
7
|
|
8
|
-
WRITE_MAX_OUTPUT_LENGTH =
|
9
|
-
READ_MAX_OUTPUT_LENGTH =
|
8
|
+
WRITE_MAX_OUTPUT_LENGTH = 1000
|
9
|
+
READ_MAX_OUTPUT_LENGTH = 10000
|
10
10
|
|
11
11
|
# Global state for undo operations
|
12
12
|
FILE_HISTORY: Dict[str, List[List[str]]] = defaultdict(list)
|
@@ -1,18 +1,18 @@
|
|
1
1
|
mle_kit_mcp/__init__.py,sha256=2Ru2I5u4cE7DrkkAsibDUEF1K6sYtqppb9VyFrRoQKI,94
|
2
2
|
mle_kit_mcp/__main__.py,sha256=rcmsOtJd3SA82exjrcGBuxuptcoxF8AXI7jNjiVq2BY,59
|
3
3
|
mle_kit_mcp/files.py,sha256=ux53kWw7hBAcOmS9qNI4gpQX8XcQPT2LICC--S5-TGI,635
|
4
|
-
mle_kit_mcp/llm_proxy.py,sha256=
|
4
|
+
mle_kit_mcp/llm_proxy.py,sha256=01BG6OA8husOQXxgJQ7RnTNEE_1HDczlCNoAVnYWURQ,1225
|
5
5
|
mle_kit_mcp/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
6
6
|
mle_kit_mcp/server.py,sha256=R59DC7HVdpPcGHVXghPGUhrRKfn9Uqmy2MPkaYNfyzw,1320
|
7
7
|
mle_kit_mcp/utils.py,sha256=iHNcEZZzPD37bEYE18SzJ3WUjLP3Ym-kc91SwcW1vlI,1984
|
8
8
|
mle_kit_mcp/tools/__init__.py,sha256=r2fIg2mZ6zaeq0CzEKCEdeUTjV0pcA9NZaaOfBNVTnE,332
|
9
9
|
mle_kit_mcp/tools/bash.py,sha256=kunYHc3dyPGOooT-KY9L7eI_N22lBrcDbTlcp_yTTws,2820
|
10
|
-
mle_kit_mcp/tools/llm_proxy.py,sha256=
|
10
|
+
mle_kit_mcp/tools/llm_proxy.py,sha256=uEPZETqJWGOwLESWfvIjJidL6LSRry5j-qq6rpjtxLM,4623
|
11
11
|
mle_kit_mcp/tools/remote_gpu.py,sha256=2Wjp6fYTGX9i6bZUhSWM5WMKh0eMc73wCdezes04bDg,12546
|
12
|
-
mle_kit_mcp/tools/text_editor.py,sha256=
|
13
|
-
mle_kit_mcp-0.2.
|
14
|
-
mle_kit_mcp-0.2.
|
15
|
-
mle_kit_mcp-0.2.
|
16
|
-
mle_kit_mcp-0.2.
|
17
|
-
mle_kit_mcp-0.2.
|
18
|
-
mle_kit_mcp-0.2.
|
12
|
+
mle_kit_mcp/tools/text_editor.py,sha256=hkobiyYB5um6bs5sWYDQ1S2Y5n31i7I6fOBUDFkNhmM,9531
|
13
|
+
mle_kit_mcp-0.2.5.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
14
|
+
mle_kit_mcp-0.2.5.dist-info/METADATA,sha256=pKWNkp9pLxtGAofpzamJA4BuTkzxwf0hqq8CNl1s0ts,1074
|
15
|
+
mle_kit_mcp-0.2.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
16
|
+
mle_kit_mcp-0.2.5.dist-info/entry_points.txt,sha256=-iHSUVPN49jkBj1ySpc-P0rVF5-IPHw-KWNayNIiEsk,49
|
17
|
+
mle_kit_mcp-0.2.5.dist-info/top_level.txt,sha256=XeBtCq_CnVI0gh0Z_daZOLmGl5XPlkA8RgHaj5s5VQY,12
|
18
|
+
mle_kit_mcp-0.2.5.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|