hud-python 0.4.31__py3-none-any.whl → 0.4.32__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hud-python might be problematic. Click here for more details.
- hud/cli/flows/tasks.py +81 -13
- hud/cli/push.py +1 -0
- hud/cli/rl/remote_runner.py +77 -62
- hud/utils/tests/test_version.py +1 -1
- hud/version.py +1 -1
- {hud_python-0.4.31.dist-info → hud_python-0.4.32.dist-info}/METADATA +1 -1
- {hud_python-0.4.31.dist-info → hud_python-0.4.32.dist-info}/RECORD +10 -10
- {hud_python-0.4.31.dist-info → hud_python-0.4.32.dist-info}/WHEEL +0 -0
- {hud_python-0.4.31.dist-info → hud_python-0.4.32.dist-info}/entry_points.txt +0 -0
- {hud_python-0.4.31.dist-info → hud_python-0.4.32.dist-info}/licenses/LICENSE +0 -0
hud/cli/flows/tasks.py
CHANGED
|
@@ -27,9 +27,27 @@ def _is_remote_url(url: str) -> bool:
|
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
def _validate_tasks(tasks: list[Task]) -> bool:
|
|
30
|
-
"""Validate the tasks file.
|
|
30
|
+
"""Validate the tasks file: return True if tasks already reference a remote MCP URL.
|
|
31
|
+
|
|
32
|
+
A task is considered remote if any "url" field anywhere inside mcp_config
|
|
33
|
+
is a valid remote URL (e.g., https://mcp.hud.so/v3/mcp).
|
|
34
|
+
"""
|
|
35
|
+
def _has_remote_url(obj: Any) -> bool:
|
|
36
|
+
if isinstance(obj, dict):
|
|
37
|
+
for k, v in obj.items():
|
|
38
|
+
if k == "url" and isinstance(v, str) and _is_remote_url(v):
|
|
39
|
+
return True
|
|
40
|
+
if _has_remote_url(v):
|
|
41
|
+
return True
|
|
42
|
+
elif isinstance(obj, list):
|
|
43
|
+
for item in obj:
|
|
44
|
+
if _has_remote_url(item):
|
|
45
|
+
return True
|
|
46
|
+
return False
|
|
47
|
+
|
|
31
48
|
for task in tasks:
|
|
32
|
-
|
|
49
|
+
cfg = task.mcp_config or {}
|
|
50
|
+
if not _has_remote_url(cfg):
|
|
33
51
|
return False
|
|
34
52
|
return True
|
|
35
53
|
|
|
@@ -100,7 +118,7 @@ def _ensure_pushed(env_dir: Path, lock_data: dict[str, Any]) -> dict[str, Any]:
|
|
|
100
118
|
require_docker_running()
|
|
101
119
|
|
|
102
120
|
# If Docker or login is not configured, the push function will fail and halt.
|
|
103
|
-
push_environment(str(env_dir))
|
|
121
|
+
push_environment(str(env_dir), yes=True)
|
|
104
122
|
|
|
105
123
|
# Reload lock after push
|
|
106
124
|
lock_path = env_dir / "hud.lock.yaml"
|
|
@@ -111,7 +129,21 @@ def _ensure_pushed(env_dir: Path, lock_data: dict[str, Any]) -> dict[str, Any]:
|
|
|
111
129
|
|
|
112
130
|
|
|
113
131
|
def _derive_remote_image(lock_data: dict[str, Any]) -> str:
|
|
114
|
-
"""Derive org/name:tag from lock file
|
|
132
|
+
"""Derive org/name:tag from lock file for MCP header.
|
|
133
|
+
|
|
134
|
+
Preference order:
|
|
135
|
+
1) lock_data["push"]["image_with_tag"] if present
|
|
136
|
+
2) Derive from lock_data["image"] (may be a digest; falls back to latest)
|
|
137
|
+
"""
|
|
138
|
+
push_info = lock_data.get("push", {}) if isinstance(lock_data, dict) else {}
|
|
139
|
+
|
|
140
|
+
# 1) Exact image_with_tag if present
|
|
141
|
+
pushed_with_tag = str(push_info.get("image_with_tag", "")).strip()
|
|
142
|
+
if pushed_with_tag:
|
|
143
|
+
name, tag = extract_name_and_tag(pushed_with_tag)
|
|
144
|
+
return f"{name}:{tag}"
|
|
145
|
+
|
|
146
|
+
# Base name always comes from lock_data.image to preserve org/repo
|
|
115
147
|
image_ref = str(lock_data.get("image", "")).strip()
|
|
116
148
|
if not image_ref:
|
|
117
149
|
raise typer.Exit("Lock file missing image reference")
|
|
@@ -157,19 +189,55 @@ def convert_tasks_to_remote(tasks_file: str) -> str:
|
|
|
157
189
|
# Derive remote image name org/name:tag
|
|
158
190
|
remote_image = _derive_remote_image(lock_data)
|
|
159
191
|
|
|
192
|
+
# Helper to strip extra fields from tool calls
|
|
193
|
+
def _simplify_tool_call(tool: Any) -> Any:
|
|
194
|
+
def _one(x: Any) -> dict[str, Any]:
|
|
195
|
+
try:
|
|
196
|
+
data = x.model_dump() if hasattr(x, "model_dump") else dict(x)
|
|
197
|
+
except Exception:
|
|
198
|
+
try:
|
|
199
|
+
data = dict(x)
|
|
200
|
+
except Exception:
|
|
201
|
+
return {}
|
|
202
|
+
# Keep only name and arguments
|
|
203
|
+
name = data.get("name")
|
|
204
|
+
arguments = data.get("arguments", {})
|
|
205
|
+
return {"name": name, "arguments": arguments}
|
|
206
|
+
|
|
207
|
+
if tool is None:
|
|
208
|
+
return None
|
|
209
|
+
if isinstance(tool, list):
|
|
210
|
+
return [_one(x) for x in tool]
|
|
211
|
+
return _one(tool)
|
|
212
|
+
|
|
160
213
|
# Convert to list[dict]
|
|
161
214
|
tasks_payload: list[dict[str, Any]] = []
|
|
162
215
|
for t in tasks:
|
|
163
|
-
item =
|
|
164
|
-
|
|
165
|
-
"
|
|
166
|
-
"
|
|
167
|
-
|
|
168
|
-
"
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
216
|
+
item: dict[str, Any] = {
|
|
217
|
+
"prompt": t.prompt,
|
|
218
|
+
"mcp_config": {
|
|
219
|
+
"hud": {
|
|
220
|
+
"url": "https://mcp.hud.so/v3/mcp",
|
|
221
|
+
"headers": {
|
|
222
|
+
"Authorization": "Bearer ${HUD_API_KEY}",
|
|
223
|
+
"Mcp-Image": remote_image,
|
|
224
|
+
},
|
|
225
|
+
}
|
|
226
|
+
},
|
|
172
227
|
}
|
|
228
|
+
|
|
229
|
+
# Optional fields, omit Nones
|
|
230
|
+
if t.setup_tool is not None:
|
|
231
|
+
item["setup_tool"] = _simplify_tool_call(t.setup_tool)
|
|
232
|
+
if t.evaluate_tool is not None:
|
|
233
|
+
item["evaluate_tool"] = _simplify_tool_call(t.evaluate_tool)
|
|
234
|
+
if t.agent_tools is not None:
|
|
235
|
+
item["agent_tools"] = t.agent_tools
|
|
236
|
+
if t.system_prompt is not None:
|
|
237
|
+
item["system_prompt"] = t.system_prompt
|
|
238
|
+
if t.metadata:
|
|
239
|
+
item["metadata"] = t.metadata
|
|
240
|
+
|
|
173
241
|
tasks_payload.append(item)
|
|
174
242
|
|
|
175
243
|
# Write new file: remote_<name>.json (always JSON array)
|
hud/cli/push.py
CHANGED
|
@@ -332,6 +332,7 @@ def push_environment(
|
|
|
332
332
|
"source": local_image,
|
|
333
333
|
"pushedAt": datetime.now(UTC).isoformat().replace("+00:00", "Z"),
|
|
334
334
|
"registry": pushed_digest.split("/")[0] if "/" in pushed_digest else "docker.io",
|
|
335
|
+
"image_with_tag": image,
|
|
335
336
|
}
|
|
336
337
|
|
|
337
338
|
# Save updated lock file
|
hud/cli/rl/remote_runner.py
CHANGED
|
@@ -10,6 +10,7 @@ import os
|
|
|
10
10
|
import subprocess
|
|
11
11
|
import time
|
|
12
12
|
from pathlib import Path
|
|
13
|
+
import uuid
|
|
13
14
|
|
|
14
15
|
from rich.console import Console
|
|
15
16
|
|
|
@@ -29,6 +30,43 @@ GPU_PRICING = {
|
|
|
29
30
|
}
|
|
30
31
|
|
|
31
32
|
|
|
33
|
+
def ensure_vllm_deployed(model_name: str, gpu_type: str = "A100", timeout: int = 600) -> None:
|
|
34
|
+
"""Deploy vLLM for a model if needed and wait until it's ready.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
model_name: The name of the model to deploy vLLM for
|
|
38
|
+
gpu_type: GPU type to use for deployment (e.g., A100, H100)
|
|
39
|
+
timeout: Max seconds to wait for vLLM to be ready
|
|
40
|
+
"""
|
|
41
|
+
# Check current model status
|
|
42
|
+
info = rl_api.get_model(model_name)
|
|
43
|
+
if info.vllm_url:
|
|
44
|
+
hud_console.success("vLLM server already running")
|
|
45
|
+
return
|
|
46
|
+
|
|
47
|
+
hud_console.info(f"Deploying vLLM server for {model_name}...")
|
|
48
|
+
rl_api.deploy_vllm(model_name, gpu_type=gpu_type)
|
|
49
|
+
hud_console.success("vLLM deployment started")
|
|
50
|
+
|
|
51
|
+
hud_console.info("Waiting for vLLM server to be ready...")
|
|
52
|
+
start_time = time.time()
|
|
53
|
+
with hud_console.progress() as progress:
|
|
54
|
+
progress.update(
|
|
55
|
+
"Checking deployment status (see live status on https://app.hud.so/models)"
|
|
56
|
+
)
|
|
57
|
+
while True:
|
|
58
|
+
if time.time() - start_time > timeout:
|
|
59
|
+
hud_console.error("Timeout waiting for vLLM deployment")
|
|
60
|
+
raise ValueError("vLLM deployment timeout")
|
|
61
|
+
info = rl_api.get_model(model_name)
|
|
62
|
+
if info.vllm_url or info.status == "ready":
|
|
63
|
+
hud_console.success(
|
|
64
|
+
f"vLLM server ready at http://rl.hud.so/v1/models/{model_name}/vllm"
|
|
65
|
+
)
|
|
66
|
+
break
|
|
67
|
+
time.sleep(5)
|
|
68
|
+
|
|
69
|
+
|
|
32
70
|
def run_remote_training(
|
|
33
71
|
tasks_file: str | None,
|
|
34
72
|
model: str | None,
|
|
@@ -128,49 +166,55 @@ def run_remote_training(
|
|
|
128
166
|
from rich.prompt import Prompt
|
|
129
167
|
|
|
130
168
|
# Ask for model name
|
|
131
|
-
|
|
169
|
+
base_default = model_type.split("/")[-1].lower()
|
|
170
|
+
default_name = base_default
|
|
171
|
+
existing_names = {m.name for m in active_models}
|
|
172
|
+
suffix = 1
|
|
173
|
+
while default_name in existing_names:
|
|
174
|
+
default_name = f"{base_default}-{suffix}"
|
|
175
|
+
suffix += 1
|
|
176
|
+
|
|
132
177
|
hud_console.info(f"Enter model name (default: {default_name}):")
|
|
133
178
|
model_name = Prompt.ask("Model name", default=default_name)
|
|
134
179
|
model_name = model_name.replace("/", "-").lower()
|
|
135
180
|
|
|
136
|
-
# Create the model
|
|
181
|
+
# Create the model with retry on name conflict
|
|
137
182
|
hud_console.info(f"Creating model: {model_name}")
|
|
138
183
|
try:
|
|
139
184
|
rl_api.create_model(model_name, model_type)
|
|
140
185
|
hud_console.success(f"Created model: {model_name}")
|
|
186
|
+
ensure_vllm_deployed(model_name, gpu_type="A100")
|
|
141
187
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
hud_console.info("Waiting for vLLM server to be ready...")
|
|
149
|
-
max_wait = 600 # 10 minutes
|
|
150
|
-
start_time = time.time()
|
|
151
|
-
|
|
152
|
-
with hud_console.progress() as progress:
|
|
153
|
-
progress.update(
|
|
154
|
-
"Checking deployment status (see live status on https://app.hud.so/models)"
|
|
155
|
-
)
|
|
156
|
-
|
|
188
|
+
except Exception as e:
|
|
189
|
+
# If the name already exists, suggest a new name and prompt once
|
|
190
|
+
message = str(e)
|
|
191
|
+
if "already exists" in message or "409" in message:
|
|
192
|
+
alt_name = f"{model_name}-1"
|
|
193
|
+
i = 1
|
|
157
194
|
while True:
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
model_info = rl_api.get_model(model_name)
|
|
163
|
-
if model_info.status == "ready":
|
|
164
|
-
hud_console.success(
|
|
165
|
-
f"vLLM server ready at http://rl.hud.so/v1/models/{model_name}/vllm"
|
|
166
|
-
)
|
|
195
|
+
candidate = f"{model_name}-{str(uuid.uuid4())[:4]}"
|
|
196
|
+
if candidate not in existing_names:
|
|
197
|
+
alt_name = candidate
|
|
167
198
|
break
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
199
|
+
i += 1
|
|
200
|
+
hud_console.warning(
|
|
201
|
+
f"Model '{model_name}' exists. Suggesting '{alt_name}' instead."
|
|
202
|
+
)
|
|
203
|
+
try:
|
|
204
|
+
from rich.prompt import Prompt as _Prompt
|
|
205
|
+
|
|
206
|
+
chosen = _Prompt.ask("Use different name", default=alt_name)
|
|
207
|
+
chosen = chosen.replace("/", "-").lower()
|
|
208
|
+
rl_api.create_model(chosen, model_type)
|
|
209
|
+
hud_console.success(f"Created model: {chosen}")
|
|
210
|
+
model_name = chosen
|
|
211
|
+
ensure_vllm_deployed(model_name, gpu_type="A100")
|
|
212
|
+
except Exception as e2:
|
|
213
|
+
hud_console.error(f"Failed to create model: {e2}")
|
|
214
|
+
raise
|
|
215
|
+
else:
|
|
216
|
+
hud_console.error(f"Failed to create model: {e}")
|
|
217
|
+
raise
|
|
174
218
|
|
|
175
219
|
else:
|
|
176
220
|
# Existing model selected
|
|
@@ -194,36 +238,7 @@ def run_remote_training(
|
|
|
194
238
|
return
|
|
195
239
|
|
|
196
240
|
# Ensure vLLM is deployed
|
|
197
|
-
|
|
198
|
-
hud_console.info(f"Deploying vLLM server for {model_name}...")
|
|
199
|
-
rl_api.deploy_vllm(model_name, gpu_type="A100")
|
|
200
|
-
hud_console.success("vLLM deployment started")
|
|
201
|
-
|
|
202
|
-
# Wait for deployment
|
|
203
|
-
hud_console.info("Waiting for vLLM server to be ready...")
|
|
204
|
-
max_wait = 600 # 10 minutes
|
|
205
|
-
start_time = time.time()
|
|
206
|
-
|
|
207
|
-
with hud_console.progress() as progress:
|
|
208
|
-
progress.update(
|
|
209
|
-
"Checking deployment status (see live status on https://app.hud.so/models)"
|
|
210
|
-
)
|
|
211
|
-
|
|
212
|
-
while True:
|
|
213
|
-
if time.time() - start_time > max_wait:
|
|
214
|
-
hud_console.error("Timeout waiting for vLLM deployment")
|
|
215
|
-
raise ValueError("vLLM deployment timeout")
|
|
216
|
-
|
|
217
|
-
model_info = rl_api.get_model(model_name)
|
|
218
|
-
if model_info.vllm_url:
|
|
219
|
-
hud_console.success(
|
|
220
|
-
f"vLLM server ready at http://rl.hud.so/v1/models/{model_name}/vllm"
|
|
221
|
-
)
|
|
222
|
-
break
|
|
223
|
-
|
|
224
|
-
time.sleep(5)
|
|
225
|
-
else:
|
|
226
|
-
hud_console.success("vLLM server already running")
|
|
241
|
+
ensure_vllm_deployed(model_name, gpu_type="A100")
|
|
227
242
|
except KeyboardInterrupt:
|
|
228
243
|
hud_console.dim_info("Training cancelled", "")
|
|
229
244
|
return
|
hud/utils/tests/test_version.py
CHANGED
hud/version.py
CHANGED
|
@@ -2,7 +2,7 @@ hud/__init__.py,sha256=JMDFUE1pP0J1Xl_miBdt7ERvoffZmTzSFe8yxz512A8,552
|
|
|
2
2
|
hud/__main__.py,sha256=YR8Dq8OhINOsVfQ55PmRXXg4fEK84Rt_-rMtJ5rvhWo,145
|
|
3
3
|
hud/settings.py,sha256=sMS31iW1m-5VpWk-Blhi5-obLcUA0fwxWE1GgJz-vqU,2708
|
|
4
4
|
hud/types.py,sha256=Cn9suZ_ZitLnxmnknfbCYVvmLsXRWI56kJ3LXtdfI6M,10157
|
|
5
|
-
hud/version.py,sha256=
|
|
5
|
+
hud/version.py,sha256=amJJuAW2iW2bL8JO7j8MSy3wzSXv2pKn1GXk85UJEVM,105
|
|
6
6
|
hud/agents/__init__.py,sha256=UoIkljWdbq4bM0LD-mSaw6w826EqdEjOk7r6glNYwYQ,286
|
|
7
7
|
hud/agents/base.py,sha256=_u1zR3gXzZ1RlTCUYdMcvgHqdJBC4-AB1lZt0yBx8lg,35406
|
|
8
8
|
hud/agents/claude.py,sha256=wHiw8iAnjnRmZyKRKcOhagCDQMhz9Z6rlSBWqH1X--M,15781
|
|
@@ -30,10 +30,10 @@ hud/cli/get.py,sha256=sksKrdzBGZa7ZuSoQkc0haj-CvOGVSSikoVXeaUd3N4,6274
|
|
|
30
30
|
hud/cli/init.py,sha256=McZwpxZMXD-It_PXINCUy-SwUaPiQ7jdpSU5-F-caO8,19671
|
|
31
31
|
hud/cli/list_func.py,sha256=EVi2Vc3Lb3glBNJxFx4MPnZknZ4xmuJz1OFg_dc8a_E,7177
|
|
32
32
|
hud/cli/pull.py,sha256=Vd1l1-IwskyACzhtC8Df1SYINUZEYmFxrLl0s9cNN6c,12151
|
|
33
|
-
hud/cli/push.py,sha256=
|
|
33
|
+
hud/cli/push.py,sha256=dmjF-hGlMfq73tquDxsTuM9t50zrkE9PFJqW5vRmYSw,18380
|
|
34
34
|
hud/cli/remove.py,sha256=8vGQyXDqgtjz85_vtusoIG8zurH4RHz6z8UMevQRYM4,6861
|
|
35
35
|
hud/cli/flows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
36
|
-
hud/cli/flows/tasks.py,sha256=
|
|
36
|
+
hud/cli/flows/tasks.py,sha256=cOIXs03_iAS68nIYj5NpzqlSh1vSbIZtp3dnt8PEyl4,8855
|
|
37
37
|
hud/cli/rl/__init__.py,sha256=BeqXdmzPwVBptz4j796XJRxSC5B_9tQta5aKd0jDMvo,5000
|
|
38
38
|
hud/cli/rl/config.py,sha256=iNhCxotM33OEiP9gqPvn8A_AxrBVe6fcFCQTvc13xzA,2884
|
|
39
39
|
hud/cli/rl/display.py,sha256=hqJVGmO9csYinladhZwjF-GMvppYWngxDHajTyIJ_gM,5214
|
|
@@ -41,7 +41,7 @@ hud/cli/rl/gpu.py,sha256=peXS-NdUF5RyuSs0aZoCzGLboneBUpCy8f9f99WMrG0,2009
|
|
|
41
41
|
hud/cli/rl/gpu_utils.py,sha256=H5ckPwgj5EVP3yJ5eVihR5R7Y6Gp6pt8ZUfWCCwcLG4,11072
|
|
42
42
|
hud/cli/rl/local_runner.py,sha256=GssmDgCxGfFsi31aFj22vwCiwa9ELllEwQjbActxSXY,21514
|
|
43
43
|
hud/cli/rl/presets.py,sha256=DzOO82xL5QyzdVtlX-Do1CODMvDz9ILMPapjU92jcZg,3051
|
|
44
|
-
hud/cli/rl/remote_runner.py,sha256=
|
|
44
|
+
hud/cli/rl/remote_runner.py,sha256=nSLPkBek1pBQiOYg-yqkc3La5dkQEN9xn9DaauSlYAA,13747
|
|
45
45
|
hud/cli/rl/rl_api.py,sha256=INJobvSa50ccR037u_GPsDa_9WboWyNwqEaoh9hcXj0,4306
|
|
46
46
|
hud/cli/rl/vllm.py,sha256=Gq_M6KsQArGz7FNIdemuM5mk16mu3xe8abpO2GCCuOE,6093
|
|
47
47
|
hud/cli/tests/__init__.py,sha256=ZrGVkmH7DHXGqOvjOSNGZeMYaFIRB2K8c6hwr8FPJ-8,68
|
|
@@ -198,10 +198,10 @@ hud/utils/tests/test_init.py,sha256=2QLQSGgyP9wJhOvPCusm_zjJad0qApOZi1BXpxcdHXQ,
|
|
|
198
198
|
hud/utils/tests/test_mcp.py,sha256=0pUa16mL-bqbZDXp5NHBnt1gO5o10BOg7zTMHZ1DNPM,4023
|
|
199
199
|
hud/utils/tests/test_progress.py,sha256=QSF7Kpi03Ff_l3mAeqW9qs1nhK50j9vBiSobZq7T4f4,7394
|
|
200
200
|
hud/utils/tests/test_telemetry.py,sha256=5jl7bEx8C8b-FfFUko5pf4UY-mPOR-9HaeL98dGtVHM,2781
|
|
201
|
-
hud/utils/tests/test_version.py,sha256=
|
|
201
|
+
hud/utils/tests/test_version.py,sha256=HVazB_GTYA2lJ3Oc10a6KPOvPoBBQOGuYoJl7yIazWw,160
|
|
202
202
|
hud/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
203
|
-
hud_python-0.4.
|
|
204
|
-
hud_python-0.4.
|
|
205
|
-
hud_python-0.4.
|
|
206
|
-
hud_python-0.4.
|
|
207
|
-
hud_python-0.4.
|
|
203
|
+
hud_python-0.4.32.dist-info/METADATA,sha256=Ve-zWpi0dWcv8bjC4wCcNZnQtsrlFTOwAaWs4RmQ_W0,20861
|
|
204
|
+
hud_python-0.4.32.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
205
|
+
hud_python-0.4.32.dist-info/entry_points.txt,sha256=jJbodNFg1m0-CDofe5AHvB4zKBq7sSdP97-ohaQ3ae4,63
|
|
206
|
+
hud_python-0.4.32.dist-info/licenses/LICENSE,sha256=yIzBheVUf86FC1bztAcr7RYWWNxyd3B-UJQ3uddg1HA,1078
|
|
207
|
+
hud_python-0.4.32.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|