contree-mcp 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- contree_mcp/__init__.py +0 -0
- contree_mcp/__main__.py +25 -0
- contree_mcp/app.py +240 -0
- contree_mcp/arguments.py +35 -0
- contree_mcp/auth/__init__.py +2 -0
- contree_mcp/auth/registry.py +236 -0
- contree_mcp/backend_types.py +301 -0
- contree_mcp/cache.py +208 -0
- contree_mcp/client.py +711 -0
- contree_mcp/context.py +53 -0
- contree_mcp/docs.py +1203 -0
- contree_mcp/file_cache.py +381 -0
- contree_mcp/prompts.py +238 -0
- contree_mcp/py.typed +0 -0
- contree_mcp/resources/__init__.py +17 -0
- contree_mcp/resources/guide.py +715 -0
- contree_mcp/resources/image_lineage.py +46 -0
- contree_mcp/resources/image_ls.py +32 -0
- contree_mcp/resources/import_operation.py +52 -0
- contree_mcp/resources/instance_operation.py +52 -0
- contree_mcp/resources/read_file.py +33 -0
- contree_mcp/resources/static.py +12 -0
- contree_mcp/server.py +77 -0
- contree_mcp/tools/__init__.py +39 -0
- contree_mcp/tools/cancel_operation.py +36 -0
- contree_mcp/tools/download.py +128 -0
- contree_mcp/tools/get_guide.py +54 -0
- contree_mcp/tools/get_image.py +30 -0
- contree_mcp/tools/get_operation.py +26 -0
- contree_mcp/tools/import_image.py +99 -0
- contree_mcp/tools/list_files.py +80 -0
- contree_mcp/tools/list_images.py +50 -0
- contree_mcp/tools/list_operations.py +46 -0
- contree_mcp/tools/read_file.py +47 -0
- contree_mcp/tools/registry_auth.py +71 -0
- contree_mcp/tools/registry_token_obtain.py +80 -0
- contree_mcp/tools/rsync.py +46 -0
- contree_mcp/tools/run.py +97 -0
- contree_mcp/tools/set_tag.py +31 -0
- contree_mcp/tools/upload.py +50 -0
- contree_mcp/tools/wait_operations.py +79 -0
- contree_mcp-0.1.0.dist-info/METADATA +450 -0
- contree_mcp-0.1.0.dist-info/RECORD +46 -0
- contree_mcp-0.1.0.dist-info/WHEEL +4 -0
- contree_mcp-0.1.0.dist-info/entry_points.txt +2 -0
- contree_mcp-0.1.0.dist-info/licenses/LICENSE +176 -0
contree_mcp/client.py
ADDED
|
@@ -0,0 +1,711 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import base64
|
|
3
|
+
import hashlib
|
|
4
|
+
import importlib.metadata
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
import platform
|
|
8
|
+
import sys
|
|
9
|
+
from collections.abc import AsyncIterator, Mapping
|
|
10
|
+
from contextlib import asynccontextmanager
|
|
11
|
+
from functools import cached_property
|
|
12
|
+
from io import BytesIO
|
|
13
|
+
from types import MappingProxyType
|
|
14
|
+
from typing import IO, Any, Generic, Literal, TypeVar
|
|
15
|
+
from urllib.parse import unquote
|
|
16
|
+
from uuid import UUID
|
|
17
|
+
|
|
18
|
+
import httpx
|
|
19
|
+
from httpx import Headers
|
|
20
|
+
from pydantic import BaseModel, ByteSize
|
|
21
|
+
from typing_extensions import Self
|
|
22
|
+
|
|
23
|
+
from .backend_types import (
|
|
24
|
+
DirectoryList,
|
|
25
|
+
FileResponse,
|
|
26
|
+
Image,
|
|
27
|
+
ImageCredentials,
|
|
28
|
+
ImageListResponse,
|
|
29
|
+
ImageRegistry,
|
|
30
|
+
ImportImageMetadata,
|
|
31
|
+
InstanceFileSpec,
|
|
32
|
+
InstanceMetadata,
|
|
33
|
+
InstanceSpawnResponse,
|
|
34
|
+
OperationKind,
|
|
35
|
+
OperationListResponse,
|
|
36
|
+
OperationResponse,
|
|
37
|
+
OperationResult,
|
|
38
|
+
OperationStatus,
|
|
39
|
+
OperationSummary,
|
|
40
|
+
Stream,
|
|
41
|
+
)
|
|
42
|
+
from .cache import Cache
|
|
43
|
+
|
|
44
|
+
ModelT = TypeVar("ModelT", bound=BaseModel)
|
|
45
|
+
|
|
46
|
+
OperationTrackingKind = Literal["instance", "image_import"]
|
|
47
|
+
|
|
48
|
+
log = logging.getLogger(__name__)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class StreamResponse:
|
|
52
|
+
__slots__ = ("status", "headers", "body_iter")
|
|
53
|
+
|
|
54
|
+
status: int
|
|
55
|
+
headers: Headers
|
|
56
|
+
body_iter: AsyncIterator[bytes]
|
|
57
|
+
|
|
58
|
+
def __init__(self, status: int, headers: Headers, body_iter: AsyncIterator[bytes]):
|
|
59
|
+
self.status = status
|
|
60
|
+
self.headers = headers
|
|
61
|
+
self.body_iter = body_iter
|
|
62
|
+
|
|
63
|
+
async def __aiter__(self) -> AsyncIterator[bytes]:
|
|
64
|
+
async for chunk in self.body_iter:
|
|
65
|
+
yield chunk
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class StructuredResponse(Generic[ModelT]):
|
|
69
|
+
__slots__ = ("status", "headers", "body")
|
|
70
|
+
|
|
71
|
+
headers: Headers
|
|
72
|
+
status: int
|
|
73
|
+
body: ModelT
|
|
74
|
+
|
|
75
|
+
def __init__(self, status: int, headers: Headers, body: ModelT):
|
|
76
|
+
self.status = status
|
|
77
|
+
self.headers = headers
|
|
78
|
+
self.body = body
|
|
79
|
+
|
|
80
|
+
@classmethod
|
|
81
|
+
async def from_stream(
|
|
82
|
+
cls,
|
|
83
|
+
stream_response: StreamResponse,
|
|
84
|
+
model: type[ModelT],
|
|
85
|
+
payload_limit: int = 64 * 1024,
|
|
86
|
+
) -> "StructuredResponse[ModelT]":
|
|
87
|
+
content_length = int(stream_response.headers.get("Content-Length", "-1"))
|
|
88
|
+
if content_length > payload_limit:
|
|
89
|
+
raise ContreeError(f"Response too large ({content_length} bytes) for streaming response")
|
|
90
|
+
with BytesIO() as stream:
|
|
91
|
+
async for chunk in stream_response:
|
|
92
|
+
stream.write(chunk)
|
|
93
|
+
text = stream.getvalue().decode("utf-8").strip()
|
|
94
|
+
try:
|
|
95
|
+
body = model.model_validate(json.loads(text))
|
|
96
|
+
except ValueError as e:
|
|
97
|
+
raise ContreeError(f"Streaming response: invalid JSON: {e}") from e
|
|
98
|
+
except Exception as e:
|
|
99
|
+
raise ContreeError(f"Streaming response: failed to parse response: {e}") from e
|
|
100
|
+
return cls(stream_response.status, stream_response.headers, body)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
class ContreeError(Exception):
|
|
104
|
+
def __init__(self, message: str, status_code: int | None = None):
|
|
105
|
+
super().__init__(message)
|
|
106
|
+
self.message = message
|
|
107
|
+
self.status_code = status_code
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
class ContreeClient:
|
|
111
|
+
PYTHON_VERSION = f"{'.'.join(map(str, sys.version_info))}"
|
|
112
|
+
try:
|
|
113
|
+
LIBRARY_VERSION = importlib.metadata.version("contree-mcp")
|
|
114
|
+
except Exception:
|
|
115
|
+
LIBRARY_VERSION = "unknown"
|
|
116
|
+
|
|
117
|
+
OS_NAME = platform.system()
|
|
118
|
+
OS_VERSION = platform.release()
|
|
119
|
+
POLL_CONCURRENCY = 10
|
|
120
|
+
|
|
121
|
+
HEADERS = (
|
|
122
|
+
("Content-Type", "application/json"),
|
|
123
|
+
(
|
|
124
|
+
"User-Agent",
|
|
125
|
+
" ".join(
|
|
126
|
+
(
|
|
127
|
+
f"contree-mcp/{LIBRARY_VERSION}",
|
|
128
|
+
f"python/{PYTHON_VERSION}",
|
|
129
|
+
f"{OS_NAME}/{OS_VERSION}",
|
|
130
|
+
)
|
|
131
|
+
),
|
|
132
|
+
),
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
def __init__(
|
|
136
|
+
self,
|
|
137
|
+
base_url: str,
|
|
138
|
+
token: str,
|
|
139
|
+
cache: Cache,
|
|
140
|
+
timeout: float = 30.0,
|
|
141
|
+
poll_interval: float = 1.0,
|
|
142
|
+
):
|
|
143
|
+
self.base_url = base_url.rstrip("/") + "/v1"
|
|
144
|
+
self.token = token
|
|
145
|
+
self.timeout = httpx.Timeout(timeout)
|
|
146
|
+
self._cache = cache
|
|
147
|
+
|
|
148
|
+
self._poll_interval = poll_interval
|
|
149
|
+
self._poll_semaphore = asyncio.Semaphore(self.POLL_CONCURRENCY)
|
|
150
|
+
self._tracked_operations: dict[str, asyncio.Task[OperationResponse]] = {}
|
|
151
|
+
|
|
152
|
+
@property
|
|
153
|
+
def cache(self) -> Cache:
|
|
154
|
+
if self._cache is None:
|
|
155
|
+
raise RuntimeError("Cache is not configured")
|
|
156
|
+
return self._cache
|
|
157
|
+
|
|
158
|
+
@cached_property
|
|
159
|
+
def headers(self) -> Mapping[str, str]:
|
|
160
|
+
hdrs = dict(self.HEADERS)
|
|
161
|
+
hdrs["Authorization"] = f"Bearer {self.token}"
|
|
162
|
+
return MappingProxyType(hdrs)
|
|
163
|
+
|
|
164
|
+
@cached_property
|
|
165
|
+
def session(self) -> httpx.AsyncClient:
|
|
166
|
+
return httpx.AsyncClient(headers=self.headers, timeout=self.timeout)
|
|
167
|
+
|
|
168
|
+
async def cancel_incomplete_operations(self) -> None:
|
|
169
|
+
async def try_cancel(op_id: str) -> None:
|
|
170
|
+
op = await self.get_operation(op_id)
|
|
171
|
+
if not op.status.is_terminal():
|
|
172
|
+
await self.cancel_operation(op_id)
|
|
173
|
+
|
|
174
|
+
await asyncio.gather(*[try_cancel(op_id) for op_id in self._tracked_operations], return_exceptions=True)
|
|
175
|
+
|
|
176
|
+
async def close(self) -> None:
|
|
177
|
+
if self._tracked_operations:
|
|
178
|
+
log.info("Cancelling %d tracked operations", len(self._tracked_operations))
|
|
179
|
+
|
|
180
|
+
for task in self._tracked_operations.values():
|
|
181
|
+
task.cancel()
|
|
182
|
+
|
|
183
|
+
await asyncio.gather(
|
|
184
|
+
*self._tracked_operations.values(), self.cancel_incomplete_operations(), return_exceptions=True
|
|
185
|
+
)
|
|
186
|
+
self._tracked_operations.clear()
|
|
187
|
+
|
|
188
|
+
if "session" in self.__dict__:
|
|
189
|
+
await asyncio.gather(self.session.aclose(), return_exceptions=True)
|
|
190
|
+
del self.__dict__["session"]
|
|
191
|
+
|
|
192
|
+
async def __aenter__(self) -> Self:
|
|
193
|
+
return self
|
|
194
|
+
|
|
195
|
+
async def __aexit__(self, *args: Any) -> None:
|
|
196
|
+
await self.close()
|
|
197
|
+
|
|
198
|
+
async def _request(
|
|
199
|
+
self,
|
|
200
|
+
method: str,
|
|
201
|
+
path: str,
|
|
202
|
+
*,
|
|
203
|
+
model: type[ModelT],
|
|
204
|
+
params: dict[str, Any] | None = None,
|
|
205
|
+
json: dict[str, Any] | None = None,
|
|
206
|
+
headers: dict[str, str] | None = None,
|
|
207
|
+
content: bytes | IO[bytes] | None = None,
|
|
208
|
+
follow_redirects: bool = True,
|
|
209
|
+
payload_limit: int = 64 * 1024,
|
|
210
|
+
) -> "StructuredResponse[ModelT]":
|
|
211
|
+
async with self._stream_request(
|
|
212
|
+
method,
|
|
213
|
+
path,
|
|
214
|
+
params=params,
|
|
215
|
+
json=json,
|
|
216
|
+
headers=headers,
|
|
217
|
+
content=content,
|
|
218
|
+
follow_redirects=follow_redirects,
|
|
219
|
+
) as stream_response:
|
|
220
|
+
return await StructuredResponse.from_stream(
|
|
221
|
+
stream_response,
|
|
222
|
+
model=model,
|
|
223
|
+
payload_limit=payload_limit,
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
@asynccontextmanager
|
|
227
|
+
async def _stream_request(
|
|
228
|
+
self,
|
|
229
|
+
method: str,
|
|
230
|
+
path: str,
|
|
231
|
+
chunk_size: int = 64 * 1024,
|
|
232
|
+
retry_time: int | float = 2,
|
|
233
|
+
retry_count: int = 5,
|
|
234
|
+
**kwargs: Any,
|
|
235
|
+
) -> AsyncIterator[StreamResponse]:
|
|
236
|
+
"""
|
|
237
|
+
Perform an HTTP request and yield a streaming response.
|
|
238
|
+
Retries on server errors (5xx).
|
|
239
|
+
Raises ContreeError on client errors (4xx).
|
|
240
|
+
"""
|
|
241
|
+
url = f"{self.base_url}/{path.lstrip('/')}"
|
|
242
|
+
log.debug("%s %s (streaming)", method, path)
|
|
243
|
+
for _ in range(retry_count):
|
|
244
|
+
async with self.session.stream(method, url, **kwargs) as response:
|
|
245
|
+
if response.status_code >= 400:
|
|
246
|
+
error_body = await response.aread()
|
|
247
|
+
try:
|
|
248
|
+
error_msg = json.loads(error_body).get("error", error_body.decode())
|
|
249
|
+
except Exception:
|
|
250
|
+
error_msg = error_body.decode()
|
|
251
|
+
|
|
252
|
+
log.debug("%s %s -> %d: %s", method, path, response.status_code, error_msg)
|
|
253
|
+
raise ContreeError(error_msg, response.status_code)
|
|
254
|
+
if response.status_code >= 500:
|
|
255
|
+
log.debug("%s %s -> %d: server error, retrying...", method, path, response.status_code)
|
|
256
|
+
await asyncio.sleep(retry_time)
|
|
257
|
+
continue # Retry on server errors
|
|
258
|
+
|
|
259
|
+
log.debug("%s %s -> %d (streaming)", method, path, response.status_code)
|
|
260
|
+
|
|
261
|
+
async def chunk_iterator() -> AsyncIterator[bytes]:
|
|
262
|
+
async for chunk in response.aiter_bytes(chunk_size):
|
|
263
|
+
yield chunk
|
|
264
|
+
|
|
265
|
+
yield StreamResponse(status=response.status_code, headers=response.headers, body_iter=chunk_iterator())
|
|
266
|
+
return
|
|
267
|
+
|
|
268
|
+
async def _head_request(self, path: str, params: dict[str, Any] | None = None) -> int:
|
|
269
|
+
async with self._stream_request("HEAD", path, params=params) as response:
|
|
270
|
+
return response.status
|
|
271
|
+
|
|
272
|
+
async def list_images(
|
|
273
|
+
self,
|
|
274
|
+
limit: int = 100,
|
|
275
|
+
offset: int = 0,
|
|
276
|
+
tagged: bool | None = None,
|
|
277
|
+
tag_prefix: str | None = None,
|
|
278
|
+
since: str | None = None,
|
|
279
|
+
until: str | None = None,
|
|
280
|
+
) -> list[Image]:
|
|
281
|
+
params: dict[str, Any] = {"limit": limit, "offset": offset}
|
|
282
|
+
if tagged is not None:
|
|
283
|
+
params["tagged"] = "1" if tagged else "0"
|
|
284
|
+
if tag_prefix:
|
|
285
|
+
# Strip trailing separators - backend validates tag format strictly
|
|
286
|
+
params["tag"] = tag_prefix.rstrip(":/.")
|
|
287
|
+
if since:
|
|
288
|
+
params["since"] = since
|
|
289
|
+
if until:
|
|
290
|
+
params["until"] = until
|
|
291
|
+
|
|
292
|
+
response = await self._request("GET", "/images", model=ImageListResponse, params=params)
|
|
293
|
+
return response.body.images
|
|
294
|
+
|
|
295
|
+
async def import_image(
|
|
296
|
+
self,
|
|
297
|
+
registry_url: str,
|
|
298
|
+
tag: str | None = None,
|
|
299
|
+
username: str | None = None,
|
|
300
|
+
password: str | None = None,
|
|
301
|
+
timeout: int = 300,
|
|
302
|
+
) -> str:
|
|
303
|
+
credentials = ImageCredentials()
|
|
304
|
+
if username and password:
|
|
305
|
+
credentials = ImageCredentials(username=username, password=password)
|
|
306
|
+
|
|
307
|
+
metadata = ImportImageMetadata(
|
|
308
|
+
registry=ImageRegistry(url=registry_url, credentials=credentials),
|
|
309
|
+
tag=tag,
|
|
310
|
+
timeout=timeout,
|
|
311
|
+
)
|
|
312
|
+
|
|
313
|
+
response = await self._request(
|
|
314
|
+
"POST", "/images/import", model=InstanceSpawnResponse, json=metadata.model_dump(exclude_none=True)
|
|
315
|
+
)
|
|
316
|
+
operation_id = response.body.uuid
|
|
317
|
+
|
|
318
|
+
if not operation_id:
|
|
319
|
+
# Fallback to Location header
|
|
320
|
+
location = response.headers.get("Location", "") or response.headers.get("location", "")
|
|
321
|
+
operation_id = location.split("/")[-1] if location else ""
|
|
322
|
+
|
|
323
|
+
if not operation_id:
|
|
324
|
+
raise ContreeError("No operation ID returned from import request")
|
|
325
|
+
|
|
326
|
+
# Start background polling task
|
|
327
|
+
self._track_operation(operation_id, kind="image_import", registry_url=registry_url, tag=tag)
|
|
328
|
+
log.info("Importing image %s -> operation %s", registry_url, operation_id)
|
|
329
|
+
return operation_id
|
|
330
|
+
|
|
331
|
+
async def tag_image(self, image_uuid: str, tag: str) -> Image:
|
|
332
|
+
response = await self._request("PATCH", f"/images/{image_uuid}/tag", model=Image, json={"tag": tag})
|
|
333
|
+
return response.body
|
|
334
|
+
|
|
335
|
+
async def untag_image(self, image_uuid: str) -> Image:
|
|
336
|
+
response = await self._request("DELETE", f"/images/{image_uuid}/tag", model=Image)
|
|
337
|
+
return response.body
|
|
338
|
+
|
|
339
|
+
async def get_image_by_tag(self, tag: str) -> Image:
|
|
340
|
+
response = await self._request("GET", "/inspect/", model=Image, params={"tag": tag}, follow_redirects=True)
|
|
341
|
+
|
|
342
|
+
return response.body
|
|
343
|
+
|
|
344
|
+
async def get_image(self, image_uuid: str) -> Image:
|
|
345
|
+
response = await self._request("GET", f"/inspect/{image_uuid}/", model=Image)
|
|
346
|
+
return response.body
|
|
347
|
+
|
|
348
|
+
async def list_directory(self, image_uuid: str, path: str = "/") -> DirectoryList:
|
|
349
|
+
path = f"/{path.lstrip('/')}"
|
|
350
|
+
cache_key = f"{image_uuid}:{path}"
|
|
351
|
+
|
|
352
|
+
entry = await self.cache.get("list_dir", cache_key)
|
|
353
|
+
if entry:
|
|
354
|
+
return DirectoryList.model_validate(entry.data)
|
|
355
|
+
|
|
356
|
+
response = await self._request(
|
|
357
|
+
"GET", f"/inspect/{image_uuid}/list", model=DirectoryList, params={"path": path}
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
await self.cache.put("list_dir", cache_key, response.body.model_dump())
|
|
361
|
+
return response.body
|
|
362
|
+
|
|
363
|
+
async def list_directory_text(self, image_uuid: str, path: str = "/") -> str:
|
|
364
|
+
"""List files in an image directory as ls-like text format.
|
|
365
|
+
|
|
366
|
+
Uses the backend's text format option which returns output similar to `ls -l`.
|
|
367
|
+
Image content is immutable - no TTL needed.
|
|
368
|
+
"""
|
|
369
|
+
path = f"/{path.lstrip('/')}"
|
|
370
|
+
cache_key = f"{image_uuid}:{path}:text"
|
|
371
|
+
entry = await self.cache.get("list_dir_text", cache_key)
|
|
372
|
+
if entry:
|
|
373
|
+
return str(entry.data["text"])
|
|
374
|
+
async with self._stream_request(
|
|
375
|
+
"GET", f"/inspect/{image_uuid}/list", params={"path": path, "text": ""}
|
|
376
|
+
) as chunk_iter:
|
|
377
|
+
with BytesIO() as stream:
|
|
378
|
+
async for chunk in chunk_iter:
|
|
379
|
+
stream.write(chunk)
|
|
380
|
+
result = stream.getvalue().decode("utf-8")
|
|
381
|
+
await self.cache.put("list_dir_text", cache_key, {"text": result})
|
|
382
|
+
return result
|
|
383
|
+
|
|
384
|
+
async def read_file(self, image_uuid: str, path: str) -> bytes:
|
|
385
|
+
"""Read a file from an image. Image content is immutable - no TTL needed."""
|
|
386
|
+
cache_key = f"{image_uuid}:{path}"
|
|
387
|
+
|
|
388
|
+
entry = await self.cache.get("read_file", cache_key)
|
|
389
|
+
if entry:
|
|
390
|
+
return base64.b64decode(entry.data["content"])
|
|
391
|
+
|
|
392
|
+
path = f"/{path.lstrip('/')}"
|
|
393
|
+
|
|
394
|
+
async with self._stream_request("GET", f"/inspect/{image_uuid}/download", params={"path": path}) as chunk_iter:
|
|
395
|
+
with BytesIO() as stream:
|
|
396
|
+
async for chunk in chunk_iter:
|
|
397
|
+
stream.write(chunk)
|
|
398
|
+
result = stream.getvalue()
|
|
399
|
+
|
|
400
|
+
await self.cache.put("read_file", cache_key, {"content": base64.b64encode(result).decode()})
|
|
401
|
+
return result
|
|
402
|
+
|
|
403
|
+
@asynccontextmanager
|
|
404
|
+
async def stream_file(
|
|
405
|
+
self, image_uuid: str, path: str, chunk_size: int = 64 * 1024
|
|
406
|
+
) -> AsyncIterator[AsyncIterator[bytes]]:
|
|
407
|
+
"""Stream a file from an image in chunks.
|
|
408
|
+
|
|
409
|
+
Usage:
|
|
410
|
+
async with client.stream_file(image_uuid, path) as chunks:
|
|
411
|
+
async for chunk in chunks:
|
|
412
|
+
file.write(chunk)
|
|
413
|
+
"""
|
|
414
|
+
params: dict[str, Any] = {"path": path}
|
|
415
|
+
async with self._stream_request(
|
|
416
|
+
"GET",
|
|
417
|
+
f"/inspect/{image_uuid}/download",
|
|
418
|
+
params=params,
|
|
419
|
+
chunk_size=chunk_size,
|
|
420
|
+
) as chunks:
|
|
421
|
+
yield chunks # type: ignore[misc]
|
|
422
|
+
|
|
423
|
+
async def file_exists(self, image_uuid: str, path: str) -> bool:
|
|
424
|
+
"""Check if a file exists in an image. Image content is immutable - no TTL needed."""
|
|
425
|
+
cache_key = f"{image_uuid}:{path}"
|
|
426
|
+
|
|
427
|
+
entry = await self.cache.get("file_exists", cache_key)
|
|
428
|
+
if entry:
|
|
429
|
+
return bool(entry.data["exists"])
|
|
430
|
+
|
|
431
|
+
try:
|
|
432
|
+
status = await self._head_request(f"/inspect/{image_uuid}/download", params={"path": path})
|
|
433
|
+
exists = status == 200
|
|
434
|
+
except Exception:
|
|
435
|
+
exists = False
|
|
436
|
+
|
|
437
|
+
await self.cache.put("file_exists", cache_key, {"exists": exists})
|
|
438
|
+
return exists
|
|
439
|
+
|
|
440
|
+
async def upload_file(self, content: bytes | IO[bytes]) -> FileResponse:
|
|
441
|
+
"""Upload a file to the server.
|
|
442
|
+
|
|
443
|
+
Computes SHA256 hash and checks cache/server before uploading to avoid duplicates.
|
|
444
|
+
|
|
445
|
+
Args:
|
|
446
|
+
content: File content as bytes or a file-like object (IO[bytes]).
|
|
447
|
+
Using IO[bytes] allows streaming without loading entire file into RAM.
|
|
448
|
+
"""
|
|
449
|
+
# If content is file-like, read it (httpx content param expects bytes)
|
|
450
|
+
if hasattr(content, "read"):
|
|
451
|
+
content = content.read()
|
|
452
|
+
|
|
453
|
+
# Compute SHA256 hash
|
|
454
|
+
sha256 = hashlib.sha256(content).hexdigest()
|
|
455
|
+
|
|
456
|
+
# Check if file already exists (cache + server)
|
|
457
|
+
existing = await self.get_file_by_hash(sha256)
|
|
458
|
+
if existing:
|
|
459
|
+
log.debug("File already exists: uuid=%s sha256=%s...", existing.uuid, sha256[:16])
|
|
460
|
+
return existing
|
|
461
|
+
|
|
462
|
+
# Upload new file
|
|
463
|
+
size = len(content)
|
|
464
|
+
log.debug("Uploading file (%s bytes, sha256=%s...)", size, sha256[:16])
|
|
465
|
+
|
|
466
|
+
response = await self._request(
|
|
467
|
+
"POST",
|
|
468
|
+
"/files",
|
|
469
|
+
model=FileResponse,
|
|
470
|
+
content=content,
|
|
471
|
+
headers={"Content-Type": "application/octet-stream"},
|
|
472
|
+
)
|
|
473
|
+
|
|
474
|
+
# Cache the response by hash
|
|
475
|
+
await self.cache.put("file_by_hash", sha256, response.body.model_dump())
|
|
476
|
+
|
|
477
|
+
log.debug("Uploaded file: uuid=%s sha256=%s...", response.body.uuid, response.body.sha256[:16])
|
|
478
|
+
return response.body
|
|
479
|
+
|
|
480
|
+
async def check_file_exists(self, file_uuid: str) -> bool:
|
|
481
|
+
"""Check if an uploaded file exists by UUID. File existence is immutable - no TTL needed."""
|
|
482
|
+
entry = await self.cache.get("file_exists_by_uuid", file_uuid)
|
|
483
|
+
if entry:
|
|
484
|
+
return bool(entry.data["exists"])
|
|
485
|
+
|
|
486
|
+
try:
|
|
487
|
+
status = await self._head_request("/files", params={"uuid": file_uuid})
|
|
488
|
+
exists = status == 200
|
|
489
|
+
except Exception:
|
|
490
|
+
exists = False
|
|
491
|
+
|
|
492
|
+
await self.cache.put("file_exists_by_uuid", file_uuid, {"exists": exists})
|
|
493
|
+
return exists
|
|
494
|
+
|
|
495
|
+
async def get_file_by_hash(self, sha256: str) -> FileResponse | None:
|
|
496
|
+
"""Get file UUID by SHA256 hash. Hash-based lookup is immutable - no TTL needed."""
|
|
497
|
+
entry = await self.cache.get("file_by_hash", sha256)
|
|
498
|
+
if entry:
|
|
499
|
+
if entry.data.get("not_found"):
|
|
500
|
+
return None
|
|
501
|
+
return FileResponse.model_validate(entry.data)
|
|
502
|
+
|
|
503
|
+
try:
|
|
504
|
+
response = await self._request("GET", "/files", model=FileResponse, params={"sha256": sha256})
|
|
505
|
+
await self.cache.put("file_by_hash", sha256, response.body.model_dump())
|
|
506
|
+
return response.body
|
|
507
|
+
except ContreeError as e:
|
|
508
|
+
if e.status_code == 404:
|
|
509
|
+
await self.cache.put("file_by_hash", sha256, {"not_found": True})
|
|
510
|
+
return None
|
|
511
|
+
raise
|
|
512
|
+
|
|
513
|
+
async def spawn_instance(
|
|
514
|
+
self,
|
|
515
|
+
command: str,
|
|
516
|
+
image: str,
|
|
517
|
+
shell: bool = True,
|
|
518
|
+
args: list[str] | None = None,
|
|
519
|
+
env: dict[str, str] | None = None,
|
|
520
|
+
cwd: str = "/root",
|
|
521
|
+
timeout: int = 30,
|
|
522
|
+
hostname: str = "linuxkit",
|
|
523
|
+
disposable: bool = False,
|
|
524
|
+
stdin: str | None = None,
|
|
525
|
+
files: dict[str, dict[str, Any]] | None = None,
|
|
526
|
+
truncate_output_at: int = 1048576,
|
|
527
|
+
) -> str:
|
|
528
|
+
metadata = InstanceMetadata(
|
|
529
|
+
command=command,
|
|
530
|
+
image=image,
|
|
531
|
+
shell=shell,
|
|
532
|
+
args=args or [],
|
|
533
|
+
env=env or {},
|
|
534
|
+
cwd=cwd,
|
|
535
|
+
timeout=timeout,
|
|
536
|
+
hostname=hostname,
|
|
537
|
+
disposable=disposable,
|
|
538
|
+
stdin=Stream.from_bytes(stdin.encode()) if stdin else Stream(value=""),
|
|
539
|
+
truncate_output_at=ByteSize(truncate_output_at),
|
|
540
|
+
files={k: InstanceFileSpec(**v) for k, v in (files or {}).items()},
|
|
541
|
+
)
|
|
542
|
+
|
|
543
|
+
response = await self._request("POST", "/instances", model=InstanceSpawnResponse, json=metadata.model_dump())
|
|
544
|
+
operation_id = response.body.uuid
|
|
545
|
+
if not operation_id:
|
|
546
|
+
raise ContreeError("No operation ID returned from spawn_instance")
|
|
547
|
+
self._track_operation(operation_id, kind="instance", input_image=image, command=command)
|
|
548
|
+
log.debug(
|
|
549
|
+
"Spawning instance: image=%s command=%r -> operation %s",
|
|
550
|
+
image,
|
|
551
|
+
command[:50] + "..." if len(command) > 50 else command,
|
|
552
|
+
operation_id,
|
|
553
|
+
)
|
|
554
|
+
return operation_id
|
|
555
|
+
|
|
556
|
+
async def list_operations(
|
|
557
|
+
self,
|
|
558
|
+
limit: int = 100,
|
|
559
|
+
offset: int = 0,
|
|
560
|
+
status: OperationStatus | None = None,
|
|
561
|
+
kind: OperationKind | None = None,
|
|
562
|
+
since: str | None = None,
|
|
563
|
+
until: str | None = None,
|
|
564
|
+
) -> list[OperationSummary]:
|
|
565
|
+
params: dict[str, Any] = {
|
|
566
|
+
"limit": limit,
|
|
567
|
+
"offset": offset,
|
|
568
|
+
"status": status,
|
|
569
|
+
"kind": kind,
|
|
570
|
+
"since": since,
|
|
571
|
+
"until": until,
|
|
572
|
+
}
|
|
573
|
+
# Remove None values
|
|
574
|
+
params = {k: v for k, v in params.items() if v is not None}
|
|
575
|
+
response = await self._request("GET", "/operations", model=OperationListResponse, params=params)
|
|
576
|
+
return response.body.operations
|
|
577
|
+
|
|
578
|
+
async def _fetch_operation(self, operation_id: str) -> OperationResponse:
|
|
579
|
+
response = await self._request("GET", f"/operations/{operation_id}", model=OperationResponse)
|
|
580
|
+
result = response.body
|
|
581
|
+
await self.cache.put("operation", operation_id, result.model_dump())
|
|
582
|
+
return result
|
|
583
|
+
|
|
584
|
+
async def get_operation(self, operation_id: str) -> OperationResponse:
|
|
585
|
+
entry = await self.cache.get("operation", operation_id)
|
|
586
|
+
if entry:
|
|
587
|
+
return OperationResponse.model_validate(entry.data)
|
|
588
|
+
return await self._fetch_operation(operation_id)
|
|
589
|
+
|
|
590
|
+
async def cancel_operation(self, operation_id: str) -> OperationStatus:
|
|
591
|
+
current_status = await self.get_operation(operation_id)
|
|
592
|
+
if current_status.status.is_terminal():
|
|
593
|
+
return current_status.status
|
|
594
|
+
async with self._stream_request("DELETE", f"/operations/{operation_id}") as response:
|
|
595
|
+
if response.status > 400:
|
|
596
|
+
raise ContreeError(f"Failed to cancel operation {operation_id}: HTTP {response.status}")
|
|
597
|
+
log.info("Cancelled operation %s", operation_id)
|
|
598
|
+
return OperationStatus.CANCELLED
|
|
599
|
+
|
|
600
|
+
async def wait_for_operation(self, operation_id: str, max_wait: float | None = None) -> OperationResponse:
|
|
601
|
+
task = self._tracked_operations.get(operation_id)
|
|
602
|
+
if task is None:
|
|
603
|
+
op = await self.get_operation(operation_id)
|
|
604
|
+
if op.status.is_terminal():
|
|
605
|
+
return op
|
|
606
|
+
kind: OperationTrackingKind = "instance" if op.kind == OperationKind.INSTANCE else "image_import"
|
|
607
|
+
task = self._track_operation(operation_id, kind=kind)
|
|
608
|
+
try:
|
|
609
|
+
return await asyncio.wait_for(asyncio.shield(task), timeout=max_wait)
|
|
610
|
+
except (asyncio.TimeoutError, TimeoutError) as e:
|
|
611
|
+
await asyncio.shield(self.cancel_operation(operation_id))
|
|
612
|
+
raise ContreeError(f"Operation {operation_id} timed out after {max_wait}s") from e
|
|
613
|
+
except asyncio.CancelledError:
|
|
614
|
+
await asyncio.shield(self.cancel_operation(operation_id))
|
|
615
|
+
raise
|
|
616
|
+
|
|
617
|
+
def _track_operation(
|
|
618
|
+
self, operation_id: str, kind: OperationTrackingKind, **metadata: Any
|
|
619
|
+
) -> asyncio.Task[OperationResponse]:
|
|
620
|
+
if operation_id in self._tracked_operations:
|
|
621
|
+
return self._tracked_operations[operation_id]
|
|
622
|
+
|
|
623
|
+
log.debug("Tracking operation %s (kind=%s)", operation_id, kind)
|
|
624
|
+
task = asyncio.create_task(
|
|
625
|
+
self._poll_until_complete(operation_id, kind, metadata),
|
|
626
|
+
name=f"poll-{operation_id[:8]}",
|
|
627
|
+
)
|
|
628
|
+
self._tracked_operations[operation_id] = task
|
|
629
|
+
return task
|
|
630
|
+
|
|
631
|
+
def is_tracked(self, operation_id: str) -> bool:
|
|
632
|
+
return operation_id in self._tracked_operations
|
|
633
|
+
|
|
634
|
+
async def _poll_until_complete(
|
|
635
|
+
self,
|
|
636
|
+
operation_id: str,
|
|
637
|
+
kind: OperationTrackingKind,
|
|
638
|
+
metadata: dict[str, Any],
|
|
639
|
+
) -> OperationResponse:
|
|
640
|
+
try:
|
|
641
|
+
async with self._poll_semaphore:
|
|
642
|
+
while True:
|
|
643
|
+
result = await self._fetch_operation(operation_id)
|
|
644
|
+
if result.status.is_terminal():
|
|
645
|
+
log.debug("Operation %s completed: %s", operation_id, result.status.value)
|
|
646
|
+
await self._cache_lineage(operation_id, kind, result, metadata)
|
|
647
|
+
return result
|
|
648
|
+
log.debug("Operation %s still %s", operation_id, result.status.value)
|
|
649
|
+
await asyncio.sleep(self._poll_interval)
|
|
650
|
+
finally:
|
|
651
|
+
# noinspection PyAsyncCall
|
|
652
|
+
self._tracked_operations.pop(operation_id, None)
|
|
653
|
+
|
|
654
|
+
async def _cache_lineage(
|
|
655
|
+
self,
|
|
656
|
+
operation_id: str,
|
|
657
|
+
kind: OperationTrackingKind,
|
|
658
|
+
op_result: OperationResponse,
|
|
659
|
+
metadata: dict[str, Any],
|
|
660
|
+
) -> None:
|
|
661
|
+
is_success = op_result.status == OperationStatus.SUCCESS
|
|
662
|
+
result_data = op_result.result
|
|
663
|
+
if isinstance(result_data, OperationResult):
|
|
664
|
+
result_image = result_data.image
|
|
665
|
+
result_tag = result_data.tag
|
|
666
|
+
elif isinstance(result_data, dict):
|
|
667
|
+
result_image = result_data.get("image")
|
|
668
|
+
result_tag = result_data.get("tag")
|
|
669
|
+
else:
|
|
670
|
+
result_image = None
|
|
671
|
+
result_tag = None
|
|
672
|
+
|
|
673
|
+
if kind == "instance":
|
|
674
|
+
input_image = metadata.get("input_image")
|
|
675
|
+
if is_success and input_image and result_image and input_image != result_image:
|
|
676
|
+
parent_entry = await self.cache.get("image", input_image)
|
|
677
|
+
parent_id = parent_entry.id if parent_entry else None
|
|
678
|
+
await self.cache.put(
|
|
679
|
+
kind="image",
|
|
680
|
+
key=result_image,
|
|
681
|
+
data={
|
|
682
|
+
"parent_image": input_image,
|
|
683
|
+
"operation_id": operation_id,
|
|
684
|
+
"command": metadata.get("command"),
|
|
685
|
+
},
|
|
686
|
+
parent_id=parent_id,
|
|
687
|
+
)
|
|
688
|
+
elif kind == "image_import":
|
|
689
|
+
if is_success and result_image:
|
|
690
|
+
await self.cache.put(
|
|
691
|
+
kind="image",
|
|
692
|
+
key=result_image,
|
|
693
|
+
data={
|
|
694
|
+
"operation_id": operation_id,
|
|
695
|
+
"registry_url": metadata.get("registry_url"),
|
|
696
|
+
"tag": result_tag,
|
|
697
|
+
"is_import": True,
|
|
698
|
+
},
|
|
699
|
+
parent_id=None,
|
|
700
|
+
)
|
|
701
|
+
|
|
702
|
+
async def resolve_image(self, image: str) -> str:
|
|
703
|
+
image = unquote(image)
|
|
704
|
+
if image.startswith("tag:"):
|
|
705
|
+
img = await self.get_image_by_tag(image[4:])
|
|
706
|
+
return img.uuid
|
|
707
|
+
try:
|
|
708
|
+
UUID(image)
|
|
709
|
+
except ValueError as err:
|
|
710
|
+
raise ContreeError(f"Invalid image reference: {image!r}. Use UUID or 'tag:name' format.") from err
|
|
711
|
+
return image
|