chutes 0.5.3rc2__py3-none-any.whl → 0.5.3rc4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- chutes/_version.py +1 -1
- chutes/entrypoint/run.py +32 -0
- chutes/util/hf.py +229 -37
- {chutes-0.5.3rc2.dist-info → chutes-0.5.3rc4.dist-info}/METADATA +2 -2
- {chutes-0.5.3rc2.dist-info → chutes-0.5.3rc4.dist-info}/RECORD +9 -9
- {chutes-0.5.3rc2.dist-info → chutes-0.5.3rc4.dist-info}/LICENSE +0 -0
- {chutes-0.5.3rc2.dist-info → chutes-0.5.3rc4.dist-info}/WHEEL +0 -0
- {chutes-0.5.3rc2.dist-info → chutes-0.5.3rc4.dist-info}/entry_points.txt +0 -0
- {chutes-0.5.3rc2.dist-info → chutes-0.5.3rc4.dist-info}/top_level.txt +0 -0
chutes/_version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
version = "0.5.3.
|
|
1
|
+
version = "0.5.3.rc4"
|
chutes/entrypoint/run.py
CHANGED
|
@@ -34,6 +34,7 @@ from fastapi import Request, Response, status, HTTPException
|
|
|
34
34
|
from fastapi.responses import ORJSONResponse
|
|
35
35
|
from starlette.middleware.base import BaseHTTPMiddleware
|
|
36
36
|
from chutes.entrypoint.verify import GpuVerifier
|
|
37
|
+
from chutes.util.hf import verify_cache, CacheVerificationError
|
|
37
38
|
from prometheus_client import generate_latest, CONTENT_TYPE_LATEST
|
|
38
39
|
from substrateinterface import Keypair, KeypairType
|
|
39
40
|
from chutes.entrypoint._shared import (
|
|
@@ -1411,6 +1412,37 @@ def run_chute(
|
|
|
1411
1412
|
chute.add_api_route("/_toca", envdump.handle_toca, methods=["POST"])
|
|
1412
1413
|
chute.add_api_route("/_eslurp", envdump.handle_slurp, methods=["POST"])
|
|
1413
1414
|
|
|
1415
|
+
async def _handle_hf_check(request: Request):
|
|
1416
|
+
"""
|
|
1417
|
+
Verify HuggingFace cache integrity.
|
|
1418
|
+
"""
|
|
1419
|
+
data = request.state.decrypted
|
|
1420
|
+
repo_id = data.get("repo_id")
|
|
1421
|
+
revision = data.get("revision")
|
|
1422
|
+
full_hash_check = data.get("full_hash_check", False)
|
|
1423
|
+
|
|
1424
|
+
if not repo_id or not revision:
|
|
1425
|
+
return {
|
|
1426
|
+
"error": True,
|
|
1427
|
+
"reason": "bad_request",
|
|
1428
|
+
"message": "repo_id and revision are required",
|
|
1429
|
+
"repo_id": repo_id,
|
|
1430
|
+
"revision": revision,
|
|
1431
|
+
}
|
|
1432
|
+
|
|
1433
|
+
try:
|
|
1434
|
+
result = await verify_cache(
|
|
1435
|
+
repo_id=repo_id,
|
|
1436
|
+
revision=revision,
|
|
1437
|
+
full_hash_check=full_hash_check,
|
|
1438
|
+
)
|
|
1439
|
+
result["error"] = False
|
|
1440
|
+
return result
|
|
1441
|
+
except CacheVerificationError as e:
|
|
1442
|
+
return e.to_dict()
|
|
1443
|
+
|
|
1444
|
+
chute.add_api_route("/_hf_check", _handle_hf_check, methods=["POST"])
|
|
1445
|
+
|
|
1414
1446
|
logger.success("Added all chutes internal endpoints.")
|
|
1415
1447
|
|
|
1416
1448
|
# Job shutdown/kill endpoint.
|
chutes/util/hf.py
CHANGED
|
@@ -6,8 +6,10 @@ import os
|
|
|
6
6
|
import shutil
|
|
7
7
|
import asyncio
|
|
8
8
|
import aiohttp
|
|
9
|
+
import hashlib
|
|
9
10
|
from pathlib import Path
|
|
10
11
|
from loguru import logger
|
|
12
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
11
13
|
|
|
12
14
|
PROXY_URL = "https://api.chutes.ai/misc/hf_repo_info"
|
|
13
15
|
|
|
@@ -31,7 +33,38 @@ def purge_model_cache(repo_id: str, cache_dir: str = "/cache") -> bool:
|
|
|
31
33
|
class CacheVerificationError(Exception):
|
|
32
34
|
"""Raised when cache verification fails."""
|
|
33
35
|
|
|
34
|
-
|
|
36
|
+
def __init__(
|
|
37
|
+
self,
|
|
38
|
+
message: str,
|
|
39
|
+
reason: str = "verification_failed",
|
|
40
|
+
repo_id: str | None = None,
|
|
41
|
+
revision: str | None = None,
|
|
42
|
+
mismatches: list[str] | None = None,
|
|
43
|
+
missing: list[str] | None = None,
|
|
44
|
+
extra: list[str] | None = None,
|
|
45
|
+
errors: list[str] | None = None,
|
|
46
|
+
):
|
|
47
|
+
super().__init__(message)
|
|
48
|
+
self.reason = reason
|
|
49
|
+
self.repo_id = repo_id
|
|
50
|
+
self.revision = revision
|
|
51
|
+
self.mismatches = mismatches or []
|
|
52
|
+
self.missing = missing or []
|
|
53
|
+
self.extra = extra or []
|
|
54
|
+
self.errors = errors or []
|
|
55
|
+
|
|
56
|
+
def to_dict(self) -> dict:
|
|
57
|
+
return {
|
|
58
|
+
"error": True,
|
|
59
|
+
"reason": self.reason,
|
|
60
|
+
"message": str(self),
|
|
61
|
+
"repo_id": self.repo_id,
|
|
62
|
+
"revision": self.revision,
|
|
63
|
+
"mismatches": self.mismatches,
|
|
64
|
+
"missing": self.missing,
|
|
65
|
+
"extra": self.extra,
|
|
66
|
+
"errors": self.errors,
|
|
67
|
+
}
|
|
35
68
|
|
|
36
69
|
|
|
37
70
|
def _get_hf_token() -> str | None:
|
|
@@ -40,23 +73,59 @@ def _get_hf_token() -> str | None:
|
|
|
40
73
|
|
|
41
74
|
def _get_symlink_hash(file_path: Path) -> str | None:
|
|
42
75
|
"""
|
|
43
|
-
Extract
|
|
76
|
+
Extract hash from symlink target (blob filename).
|
|
44
77
|
"""
|
|
45
78
|
if file_path.is_symlink():
|
|
46
79
|
target = os.readlink(file_path)
|
|
47
80
|
blob_name = Path(target).name
|
|
48
|
-
|
|
81
|
+
# 64 chars = SHA256 (LFS), 40 chars = SHA1 (git blob)
|
|
82
|
+
if len(blob_name) in (40, 64):
|
|
49
83
|
return blob_name
|
|
50
84
|
return None
|
|
51
85
|
|
|
52
86
|
|
|
87
|
+
def git_blob_hash(filepath: Path, chunk_size: int = 8 * 1024 * 1024) -> str:
|
|
88
|
+
"""
|
|
89
|
+
Compute git blob SHA-1 for a file using streaming (memory efficient).
|
|
90
|
+
Git blob format: "blob {size}\0{content}"
|
|
91
|
+
"""
|
|
92
|
+
size = filepath.stat().st_size
|
|
93
|
+
sha1 = hashlib.sha1()
|
|
94
|
+
sha1.update(f"blob {size}\0".encode())
|
|
95
|
+
with open(filepath, "rb") as f:
|
|
96
|
+
while chunk := f.read(chunk_size):
|
|
97
|
+
sha1.update(chunk)
|
|
98
|
+
return sha1.hexdigest()
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def compute_sha256(filepath: Path, chunk_size: int = 8 * 1024 * 1024) -> str:
|
|
102
|
+
"""
|
|
103
|
+
Compute SHA256 hash of a file using streaming (memory efficient).
|
|
104
|
+
"""
|
|
105
|
+
sha256 = hashlib.sha256()
|
|
106
|
+
with open(filepath, "rb") as f:
|
|
107
|
+
while chunk := f.read(chunk_size):
|
|
108
|
+
sha256.update(chunk)
|
|
109
|
+
return sha256.hexdigest()
|
|
110
|
+
|
|
111
|
+
|
|
53
112
|
async def verify_cache(
|
|
54
113
|
repo_id: str,
|
|
55
114
|
revision: str,
|
|
56
115
|
cache_dir: str = "/cache",
|
|
116
|
+
full_hash_check: bool = False,
|
|
117
|
+
max_workers: int = 4,
|
|
57
118
|
) -> dict:
|
|
58
119
|
"""
|
|
59
120
|
Verify cached HuggingFace model files match checksums on the Hub.
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
repo_id: HuggingFace repository ID
|
|
124
|
+
revision: Git revision (commit hash, branch, or tag)
|
|
125
|
+
cache_dir: Cache directory path
|
|
126
|
+
full_hash_check: If True, compute full file hashes instead of just
|
|
127
|
+
checking symlink names. Slower but more thorough.
|
|
128
|
+
max_workers: Number of parallel workers for hash computation
|
|
60
129
|
"""
|
|
61
130
|
cache_dir = Path(cache_dir)
|
|
62
131
|
params = {
|
|
@@ -73,6 +142,22 @@ async def verify_cache(
|
|
|
73
142
|
async with session.get(
|
|
74
143
|
PROXY_URL, params=params, timeout=aiohttp.ClientTimeout(total=30)
|
|
75
144
|
) as resp:
|
|
145
|
+
if resp.status == 404:
|
|
146
|
+
text = await resp.text()
|
|
147
|
+
raise CacheVerificationError(
|
|
148
|
+
f"Repository or revision not found: {repo_id}@{revision} - {text}",
|
|
149
|
+
reason="not_found",
|
|
150
|
+
repo_id=repo_id,
|
|
151
|
+
revision=revision,
|
|
152
|
+
)
|
|
153
|
+
if resp.status in (401, 403):
|
|
154
|
+
text = await resp.text()
|
|
155
|
+
raise CacheVerificationError(
|
|
156
|
+
f"Access denied to {repo_id}: {text}",
|
|
157
|
+
reason="access_denied",
|
|
158
|
+
repo_id=repo_id,
|
|
159
|
+
revision=revision,
|
|
160
|
+
)
|
|
76
161
|
if resp.status != 200:
|
|
77
162
|
text = await resp.text()
|
|
78
163
|
logger.warning(
|
|
@@ -80,34 +165,53 @@ async def verify_cache(
|
|
|
80
165
|
)
|
|
81
166
|
return {"verified": 0, "skipped": 0, "total": 0, "skipped_api_error": True}
|
|
82
167
|
repo_info = await resp.json()
|
|
168
|
+
except CacheVerificationError:
|
|
169
|
+
raise
|
|
83
170
|
except (aiohttp.ClientError, asyncio.TimeoutError) as e:
|
|
84
171
|
logger.warning(f"Cache verification skipped - proxy request failed: {e}")
|
|
85
172
|
return {"verified": 0, "skipped": 0, "total": 0, "skipped_api_error": True}
|
|
86
173
|
|
|
87
|
-
# Build remote files dict: {path: (
|
|
174
|
+
# Build remote files dict: {path: (hash, size, is_lfs)}
|
|
88
175
|
remote_files = {}
|
|
89
176
|
for item in repo_info["files"]:
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
remote_files[item["path"]] = (item.get("sha256"), item.get("size"))
|
|
177
|
+
is_lfs = item.get("is_lfs", False)
|
|
178
|
+
if is_lfs:
|
|
179
|
+
remote_files[item["path"]] = (item.get("sha256"), item.get("size"), True)
|
|
94
180
|
else:
|
|
95
|
-
remote_files[item["path"]] = (item.get("blob_id"), item.get("size"))
|
|
181
|
+
remote_files[item["path"]] = (item.get("blob_id"), item.get("size"), False)
|
|
182
|
+
|
|
183
|
+
# Directories.
|
|
184
|
+
directories = repo_info.get("directories")
|
|
185
|
+
if directories is not None:
|
|
186
|
+
for dir_path in directories:
|
|
187
|
+
if dir_path not in remote_files:
|
|
188
|
+
remote_files[dir_path] = (None, None, False)
|
|
189
|
+
else:
|
|
190
|
+
for item in repo_info["files"]:
|
|
191
|
+
parts = item["path"].split("/")
|
|
192
|
+
for i in range(1, len(parts)):
|
|
193
|
+
dir_path = "/".join(parts[:i])
|
|
194
|
+
if dir_path not in remote_files:
|
|
195
|
+
remote_files[dir_path] = (None, None, False)
|
|
96
196
|
|
|
97
197
|
# Find local cache
|
|
98
198
|
repo_folder_name = f"models--{repo_id.replace('/', '--')}"
|
|
99
199
|
snapshot_dir = cache_dir / "hub" / repo_folder_name / "snapshots" / revision
|
|
100
200
|
|
|
101
201
|
if not snapshot_dir.exists():
|
|
102
|
-
raise CacheVerificationError(
|
|
103
|
-
|
|
104
|
-
|
|
202
|
+
raise CacheVerificationError(
|
|
203
|
+
f"Cache directory not found: {snapshot_dir}",
|
|
204
|
+
reason="cache_not_found",
|
|
205
|
+
repo_id=repo_id,
|
|
206
|
+
revision=revision,
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
# Get local files and directories
|
|
105
210
|
local_files = {}
|
|
106
211
|
for path in snapshot_dir.rglob("*"):
|
|
107
|
-
if path.is_file() or path.is_symlink():
|
|
212
|
+
if path.is_file() or path.is_symlink() or path.is_dir():
|
|
108
213
|
rel_path = str(path.relative_to(snapshot_dir))
|
|
109
|
-
|
|
110
|
-
local_files[rel_path] = path
|
|
214
|
+
local_files[rel_path] = path
|
|
111
215
|
|
|
112
216
|
verified = 0
|
|
113
217
|
skipped = 0
|
|
@@ -115,21 +219,23 @@ async def verify_cache(
|
|
|
115
219
|
missing = []
|
|
116
220
|
errors = []
|
|
117
221
|
|
|
118
|
-
|
|
222
|
+
# Files needing hash computation: (remote_path, resolved_path, expected_hash, hash_type)
|
|
223
|
+
files_to_hash = []
|
|
224
|
+
|
|
225
|
+
for remote_path, (remote_hash, remote_size, is_lfs) in remote_files.items():
|
|
119
226
|
local_path = local_files.get(remote_path)
|
|
120
227
|
|
|
121
228
|
if not local_path or (not local_path.exists() and not local_path.is_symlink()):
|
|
122
229
|
missing.append(remote_path)
|
|
123
230
|
continue
|
|
124
231
|
|
|
125
|
-
|
|
126
|
-
if remote_hash is None or len(remote_hash) == 40:
|
|
232
|
+
if remote_hash is None:
|
|
127
233
|
skipped += 1
|
|
128
234
|
continue
|
|
129
235
|
|
|
130
236
|
resolved_path = local_path.resolve()
|
|
131
237
|
|
|
132
|
-
# Check size
|
|
238
|
+
# Check size first (quick sanity check)
|
|
133
239
|
if remote_size is not None:
|
|
134
240
|
try:
|
|
135
241
|
actual_size = resolved_path.stat().st_size
|
|
@@ -142,21 +248,76 @@ async def verify_cache(
|
|
|
142
248
|
errors.append(f"{remote_path}: cannot stat: {e}")
|
|
143
249
|
continue
|
|
144
250
|
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
251
|
+
if is_lfs:
|
|
252
|
+
if full_hash_check:
|
|
253
|
+
# Queue for full SHA256 computation
|
|
254
|
+
files_to_hash.append((remote_path, resolved_path, remote_hash, "sha256"))
|
|
255
|
+
else:
|
|
256
|
+
# Fast check via symlink name
|
|
257
|
+
symlink_hash = _get_symlink_hash(local_path)
|
|
258
|
+
if symlink_hash:
|
|
259
|
+
if symlink_hash != remote_hash:
|
|
260
|
+
mismatches.append(
|
|
261
|
+
f"{remote_path}: hash {symlink_hash} != expected {remote_hash}"
|
|
262
|
+
)
|
|
263
|
+
else:
|
|
264
|
+
verified += 1
|
|
265
|
+
else:
|
|
266
|
+
errors.append(f"{remote_path}: LFS file not a symlink, cannot fast-verify")
|
|
151
267
|
else:
|
|
152
|
-
#
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
268
|
+
# Non-LFS file: verify via git blob hash
|
|
269
|
+
if full_hash_check:
|
|
270
|
+
# Queue for git blob hash computation
|
|
271
|
+
files_to_hash.append((remote_path, resolved_path, remote_hash, "git_blob"))
|
|
272
|
+
else:
|
|
273
|
+
# Fast check via symlink name (if available)
|
|
274
|
+
symlink_hash = _get_symlink_hash(local_path)
|
|
275
|
+
if symlink_hash:
|
|
276
|
+
if symlink_hash != remote_hash:
|
|
277
|
+
mismatches.append(
|
|
278
|
+
f"{remote_path}: hash {symlink_hash} != expected {remote_hash}"
|
|
279
|
+
)
|
|
280
|
+
else:
|
|
281
|
+
verified += 1
|
|
282
|
+
else:
|
|
283
|
+
# Not a symlink, must compute hash
|
|
284
|
+
files_to_hash.append((remote_path, resolved_path, remote_hash, "git_blob"))
|
|
285
|
+
|
|
286
|
+
# Compute hashes in parallel using thread pool
|
|
287
|
+
if files_to_hash:
|
|
288
|
+
loop = asyncio.get_running_loop()
|
|
289
|
+
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
|
290
|
+
|
|
291
|
+
def compute_hash_sync(item):
|
|
292
|
+
remote_path, resolved_path, expected_hash, hash_type = item
|
|
293
|
+
try:
|
|
294
|
+
if hash_type == "sha256":
|
|
295
|
+
computed = compute_sha256(resolved_path)
|
|
296
|
+
else: # git_blob
|
|
297
|
+
computed = git_blob_hash(resolved_path)
|
|
298
|
+
return (remote_path, computed, expected_hash, None)
|
|
299
|
+
except Exception as e:
|
|
300
|
+
return (remote_path, None, expected_hash, str(e))
|
|
301
|
+
|
|
302
|
+
futures = [
|
|
303
|
+
loop.run_in_executor(executor, compute_hash_sync, item) for item in files_to_hash
|
|
304
|
+
]
|
|
305
|
+
results = await asyncio.gather(*futures)
|
|
306
|
+
|
|
307
|
+
for remote_path, computed, expected, error in results:
|
|
308
|
+
if error:
|
|
309
|
+
errors.append(f"{remote_path}: hash computation failed: {error}")
|
|
310
|
+
elif computed != expected:
|
|
311
|
+
mismatches.append(f"{remote_path}: hash {computed} != expected {expected}")
|
|
312
|
+
else:
|
|
313
|
+
verified += 1
|
|
314
|
+
|
|
315
|
+
# Check for extra local files (ignore _ prefixed paths not in remote)
|
|
316
|
+
extra = [
|
|
317
|
+
p
|
|
318
|
+
for p in local_files
|
|
319
|
+
if p not in remote_files and not any(part.startswith("_") for part in Path(p).parts)
|
|
320
|
+
]
|
|
160
321
|
|
|
161
322
|
# Build error message if needed
|
|
162
323
|
if mismatches or missing or extra or errors:
|
|
@@ -169,7 +330,16 @@ async def verify_cache(
|
|
|
169
330
|
msg_parts.append(f"Extra ({len(extra)}): " + ", ".join(extra))
|
|
170
331
|
if errors:
|
|
171
332
|
msg_parts.append(f"Errors ({len(errors)}): " + "; ".join(errors))
|
|
172
|
-
raise CacheVerificationError(
|
|
333
|
+
raise CacheVerificationError(
|
|
334
|
+
"\n".join(msg_parts),
|
|
335
|
+
reason="integrity_mismatch",
|
|
336
|
+
repo_id=repo_id,
|
|
337
|
+
revision=revision,
|
|
338
|
+
mismatches=mismatches,
|
|
339
|
+
missing=missing,
|
|
340
|
+
extra=extra,
|
|
341
|
+
errors=errors,
|
|
342
|
+
)
|
|
173
343
|
|
|
174
344
|
logger.success(f"Successfully verified HF cache for {repo_id=} {revision=}")
|
|
175
345
|
|
|
@@ -194,17 +364,39 @@ if __name__ == "__main__":
|
|
|
194
364
|
"--revision", required=True, help="Git revision (commit hash, branch, or tag)"
|
|
195
365
|
)
|
|
196
366
|
parser.add_argument(
|
|
197
|
-
"--cache-dir",
|
|
367
|
+
"--cache-dir",
|
|
368
|
+
default=str(Path(HF_HUB_CACHE).parent),
|
|
369
|
+
help="Cache directory (default: ~/.cache/huggingface)",
|
|
370
|
+
)
|
|
371
|
+
parser.add_argument(
|
|
372
|
+
"--full-hash-check",
|
|
373
|
+
action="store_true",
|
|
374
|
+
help="Compute full file hashes (slower but verifies actual content integrity)",
|
|
375
|
+
)
|
|
376
|
+
parser.add_argument(
|
|
377
|
+
"--max-workers",
|
|
378
|
+
type=int,
|
|
379
|
+
default=4,
|
|
380
|
+
help="Number of parallel workers for hash computation (default: 4)",
|
|
198
381
|
)
|
|
199
382
|
args = parser.parse_args()
|
|
200
383
|
|
|
201
384
|
try:
|
|
202
|
-
result = asyncio.run(
|
|
385
|
+
result = asyncio.run(
|
|
386
|
+
verify_cache(
|
|
387
|
+
args.repo_id,
|
|
388
|
+
args.revision,
|
|
389
|
+
args.cache_dir,
|
|
390
|
+
full_hash_check=args.full_hash_check,
|
|
391
|
+
max_workers=args.max_workers,
|
|
392
|
+
)
|
|
393
|
+
)
|
|
203
394
|
if result["skipped_api_error"]:
|
|
204
395
|
print("⚠️ Verification skipped (API unavailable)")
|
|
205
396
|
else:
|
|
206
397
|
print(
|
|
207
|
-
f"✅ Verified {result['verified']}/{result['total']} files
|
|
398
|
+
f"✅ Verified {result['verified']}/{result['total']} files "
|
|
399
|
+
f"(skipped {result['skipped']} without hash)"
|
|
208
400
|
)
|
|
209
401
|
except CacheVerificationError as e:
|
|
210
402
|
print(f"❌ {e}")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: chutes
|
|
3
|
-
Version: 0.5.
|
|
3
|
+
Version: 0.5.3rc4
|
|
4
4
|
Summary: Chutes development kit and CLI.
|
|
5
5
|
Home-page: https://github.com/rayonlabs/chutes
|
|
6
6
|
Author: Jon Durbin
|
|
@@ -34,7 +34,7 @@ Requires-Dist: aiofiles>=23
|
|
|
34
34
|
Requires-Dist: semver
|
|
35
35
|
Requires-Dist: huggingface-hub
|
|
36
36
|
Requires-Dist: setproctitle
|
|
37
|
-
Requires-Dist: cllmv==0.1.
|
|
37
|
+
Requires-Dist: cllmv==0.1.1
|
|
38
38
|
Provides-Extra: dev
|
|
39
39
|
Requires-Dist: black; extra == "dev"
|
|
40
40
|
Requires-Dist: flake8; extra == "dev"
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
chutes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
chutes/_version.py,sha256=
|
|
2
|
+
chutes/_version.py,sha256=cKnghV_3APd96pp_jy-68hcCFvmouXrmvLbwmMnTTB8,22
|
|
3
3
|
chutes/cfsv,sha256=vkR_q3qB0hkD8Knv8rqqrb4A29y1muHW2-eNYZiuI-g,892932
|
|
4
4
|
chutes/cfsv_v2,sha256=fa8-WFqaNeD-BXcCayUjCvR0NNizcc6Ewd6vNmFjPc0,893296
|
|
5
5
|
chutes/cfsv_v3,sha256=aIe3rnM3-g416Ry7eMyDXDOJkLZPxW_EjKJDwh7BMGc,1198496
|
|
@@ -37,7 +37,7 @@ chutes/entrypoint/logger.py,sha256=MGfKmxni_4daAwFWb7JcSBDXTpmBJE8guACz2uYY6fE,9
|
|
|
37
37
|
chutes/entrypoint/login.py,sha256=9d_MzS3ZQ884d4NAc8JGwvyNR6SaUazrpseOTAIhWNU,6076
|
|
38
38
|
chutes/entrypoint/register.py,sha256=jHwZphEZCLpF4BRXT7iaieBZOvmpByCz25UQd804Kp0,9080
|
|
39
39
|
chutes/entrypoint/report.py,sha256=-AeUFF8DFoaL14C8lAfVqd97aDH7fLDCzxZo-COYINg,1949
|
|
40
|
-
chutes/entrypoint/run.py,sha256=
|
|
40
|
+
chutes/entrypoint/run.py,sha256=yf1_3VHW0Xka8kHB5-dqVY0XoltarniFHfdjYLsRDx8,60611
|
|
41
41
|
chutes/entrypoint/secret.py,sha256=_mtBXm_YL-nDg9XWmg2S4gGU_OdNBub_zb6jpLV7458,1581
|
|
42
42
|
chutes/entrypoint/share.py,sha256=_1yIdIMwK5khxNZ8WApTRXx1htMi_dg1_VV7j1IVUjs,1532
|
|
43
43
|
chutes/entrypoint/ssh.py,sha256=ryBRL_-bREyYpV_cZAhhZBFyIgvdWsNIIRwnM_sX41g,2285
|
|
@@ -70,12 +70,12 @@ chutes/pyarmor_runtime_006563/py313/pyarmor_runtime.so,sha256=1Fm2wCKz5770HEKbkv
|
|
|
70
70
|
chutes/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
71
71
|
chutes/util/auth.py,sha256=V9BShLNym6bPmp44d0FUg79e8N9oe446UqwoYqo5sPk,2238
|
|
72
72
|
chutes/util/context.py,sha256=_dHVEM6S6CYwgaxrA0yZCplSDA2HuiyNOmedSsyiDWA,355
|
|
73
|
-
chutes/util/hf.py,sha256=
|
|
73
|
+
chutes/util/hf.py,sha256=C76T-k3uYTnCJ1I3jgJf62Bkv66ltZYLN-fnIaP-zXE,14260
|
|
74
74
|
chutes/util/schema.py,sha256=b0NX-hWhc0FKnWD6FObBmQxINrJDFNocKsbGJn79D-8,6699
|
|
75
75
|
chutes/util/user.py,sha256=WBx6vyw0P32wJ2GFYMjO5oQJPA3jn_XsyRglNJx0PhY,423
|
|
76
|
-
chutes-0.5.
|
|
77
|
-
chutes-0.5.
|
|
78
|
-
chutes-0.5.
|
|
79
|
-
chutes-0.5.
|
|
80
|
-
chutes-0.5.
|
|
81
|
-
chutes-0.5.
|
|
76
|
+
chutes-0.5.3rc4.dist-info/LICENSE,sha256=9qFhoY0O1XdKOczAAc7vcveZzk32-a0Wq2_diH_hAD8,1067
|
|
77
|
+
chutes-0.5.3rc4.dist-info/METADATA,sha256=gIMUE4xU0F12_XS5W_Ra6nqsQxBK_RlhoAAodyZhkX8,18789
|
|
78
|
+
chutes-0.5.3rc4.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
|
|
79
|
+
chutes-0.5.3rc4.dist-info/entry_points.txt,sha256=93PU_dKxEGA1hCGaabvRXO9WpyFgon6eqT4wIN91YAU,42
|
|
80
|
+
chutes-0.5.3rc4.dist-info/top_level.txt,sha256=oRxU-Kvd5BhaNbBQtqZCp9uzY0FrMBocoL0Q6kokxzA,7
|
|
81
|
+
chutes-0.5.3rc4.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|