fleet-python 0.2.69b2__py3-none-any.whl → 0.2.70__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fleet-python might be problematic. Click here for more details.
- fleet/__init__.py +3 -2
- fleet/_async/__init__.py +26 -2
- fleet/_async/base.py +21 -10
- fleet/_async/client.py +131 -201
- fleet/_async/env/client.py +38 -7
- fleet/_async/instance/client.py +4 -19
- fleet/_async/resources/sqlite.py +1 -150
- fleet/_async/tasks.py +13 -7
- fleet/_async/verifiers/bundler.py +22 -21
- fleet/_async/verifiers/verifier.py +20 -19
- fleet/base.py +21 -10
- fleet/client.py +128 -199
- fleet/config.py +1 -1
- fleet/env/__init__.py +8 -0
- fleet/env/client.py +38 -7
- fleet/instance/client.py +5 -20
- fleet/models.py +33 -0
- fleet/resources/sqlite.py +1 -143
- fleet/tasks.py +15 -7
- fleet/verifiers/bundler.py +22 -21
- fleet/verifiers/decorator.py +1 -1
- fleet/verifiers/verifier.py +20 -19
- {fleet_python-0.2.69b2.dist-info → fleet_python-0.2.70.dist-info}/METADATA +1 -1
- {fleet_python-0.2.69b2.dist-info → fleet_python-0.2.70.dist-info}/RECORD +27 -30
- tests/test_instance_dispatch.py +0 -607
- tests/test_sqlite_resource_dual_mode.py +0 -263
- tests/test_sqlite_shared_memory_behavior.py +0 -117
- {fleet_python-0.2.69b2.dist-info → fleet_python-0.2.70.dist-info}/WHEEL +0 -0
- {fleet_python-0.2.69b2.dist-info → fleet_python-0.2.70.dist-info}/licenses/LICENSE +0 -0
- {fleet_python-0.2.69b2.dist-info → fleet_python-0.2.70.dist-info}/top_level.txt +0 -0
fleet/base.py
CHANGED
|
@@ -2,6 +2,7 @@ import httpx
|
|
|
2
2
|
from typing import Dict, Any, Optional
|
|
3
3
|
import json
|
|
4
4
|
import logging
|
|
5
|
+
import uuid
|
|
5
6
|
|
|
6
7
|
from .models import InstanceResponse
|
|
7
8
|
from .config import GLOBAL_BASE_URL
|
|
@@ -20,6 +21,12 @@ from .exceptions import (
|
|
|
20
21
|
FleetPermissionError,
|
|
21
22
|
)
|
|
22
23
|
|
|
24
|
+
# Import version
|
|
25
|
+
try:
|
|
26
|
+
from . import __version__
|
|
27
|
+
except ImportError:
|
|
28
|
+
__version__ = "0.2.70"
|
|
29
|
+
|
|
23
30
|
logger = logging.getLogger(__name__)
|
|
24
31
|
|
|
25
32
|
|
|
@@ -38,17 +45,17 @@ class BaseWrapper:
|
|
|
38
45
|
base_url = GLOBAL_BASE_URL
|
|
39
46
|
self.base_url = base_url
|
|
40
47
|
|
|
41
|
-
def get_headers(self) -> Dict[str, str]:
|
|
48
|
+
def get_headers(self, request_id: Optional[str] = None) -> Dict[str, str]:
|
|
42
49
|
headers: Dict[str, str] = {
|
|
43
50
|
"X-Fleet-SDK-Language": "Python",
|
|
44
|
-
"X-Fleet-SDK-Version":
|
|
51
|
+
"X-Fleet-SDK-Version": __version__,
|
|
45
52
|
}
|
|
46
53
|
headers["Authorization"] = f"Bearer {self.api_key}"
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
54
|
+
|
|
55
|
+
# Add request ID for idempotency (persists across retries)
|
|
56
|
+
if request_id:
|
|
57
|
+
headers["X-Request-ID"] = request_id
|
|
58
|
+
|
|
52
59
|
return headers
|
|
53
60
|
|
|
54
61
|
|
|
@@ -67,11 +74,14 @@ class SyncWrapper(BaseWrapper):
|
|
|
67
74
|
**kwargs,
|
|
68
75
|
) -> httpx.Response:
|
|
69
76
|
base_url = base_url or self.base_url
|
|
77
|
+
# Generate unique request ID that persists across retries
|
|
78
|
+
request_id = str(uuid.uuid4())
|
|
79
|
+
|
|
70
80
|
try:
|
|
71
81
|
response = self.httpx_client.request(
|
|
72
82
|
method,
|
|
73
83
|
f"{base_url}{url}",
|
|
74
|
-
headers=self.get_headers(),
|
|
84
|
+
headers=self.get_headers(request_id=request_id),
|
|
75
85
|
params=params,
|
|
76
86
|
json=json,
|
|
77
87
|
**kwargs,
|
|
@@ -93,8 +103,9 @@ class SyncWrapper(BaseWrapper):
|
|
|
93
103
|
|
|
94
104
|
# Debug log 500 errors
|
|
95
105
|
if status_code == 500:
|
|
96
|
-
logger.error(f"Got 500 error from {response.url}")
|
|
97
|
-
logger.error(f"Response text: {response.text}")
|
|
106
|
+
# logger.error(f"Got 500 error from {response.url}")
|
|
107
|
+
# logger.error(f"Response text: {response.text}")
|
|
108
|
+
pass
|
|
98
109
|
|
|
99
110
|
# Try to parse error response as JSON
|
|
100
111
|
try:
|
fleet/client.py
CHANGED
|
@@ -21,7 +21,7 @@ import httpx
|
|
|
21
21
|
import json
|
|
22
22
|
import logging
|
|
23
23
|
import os
|
|
24
|
-
from typing import List, Optional, Dict, Any, TYPE_CHECKING
|
|
24
|
+
from typing import List, Optional, Dict, Any, TYPE_CHECKING
|
|
25
25
|
|
|
26
26
|
from .base import EnvironmentBase, SyncWrapper
|
|
27
27
|
from .models import (
|
|
@@ -35,6 +35,8 @@ from .models import (
|
|
|
35
35
|
TaskRequest,
|
|
36
36
|
TaskResponse,
|
|
37
37
|
TaskUpdateRequest,
|
|
38
|
+
Run,
|
|
39
|
+
HeartbeatResponse,
|
|
38
40
|
)
|
|
39
41
|
from .tasks import Task
|
|
40
42
|
|
|
@@ -47,11 +49,6 @@ from .instance import (
|
|
|
47
49
|
ResetResponse,
|
|
48
50
|
ExecuteFunctionResponse,
|
|
49
51
|
)
|
|
50
|
-
from .instance.models import (
|
|
51
|
-
Resource as ResourceModel,
|
|
52
|
-
ResourceType,
|
|
53
|
-
ResourceMode,
|
|
54
|
-
)
|
|
55
52
|
from .config import (
|
|
56
53
|
DEFAULT_MAX_RETRIES,
|
|
57
54
|
DEFAULT_TIMEOUT,
|
|
@@ -131,6 +128,23 @@ class SyncEnv(EnvironmentBase):
|
|
|
131
128
|
def close(self) -> InstanceResponse:
|
|
132
129
|
return _delete_instance(self._load_client, self.instance_id)
|
|
133
130
|
|
|
131
|
+
def heartbeat(self) -> HeartbeatResponse:
|
|
132
|
+
"""Send heartbeat to keep instance alive (if heartbeat monitoring is enabled).
|
|
133
|
+
|
|
134
|
+
Returns:
|
|
135
|
+
HeartbeatResponse containing heartbeat status and deadline information
|
|
136
|
+
"""
|
|
137
|
+
body = {}
|
|
138
|
+
if self.heartbeat_region:
|
|
139
|
+
body["region"] = self.heartbeat_region
|
|
140
|
+
|
|
141
|
+
response = self._load_client.request(
|
|
142
|
+
"POST",
|
|
143
|
+
f"/v1/env/instances/{self.instance_id}/heartbeat",
|
|
144
|
+
json=body
|
|
145
|
+
)
|
|
146
|
+
return HeartbeatResponse(**response.json())
|
|
147
|
+
|
|
134
148
|
def verify(self, validator: ValidatorType) -> ExecuteFunctionResponse:
|
|
135
149
|
return self.instance.verify(validator)
|
|
136
150
|
|
|
@@ -218,6 +232,7 @@ class Fleet:
|
|
|
218
232
|
image_type: Optional[str] = None,
|
|
219
233
|
ttl_seconds: Optional[int] = None,
|
|
220
234
|
run_id: Optional[str] = None,
|
|
235
|
+
heartbeat_interval: Optional[int] = None,
|
|
221
236
|
) -> SyncEnv:
|
|
222
237
|
if ":" in env_key:
|
|
223
238
|
env_key_part, env_version = env_key.split(":", 1)
|
|
@@ -254,6 +269,7 @@ class Fleet:
|
|
|
254
269
|
created_from="sdk",
|
|
255
270
|
ttl_seconds=ttl_seconds,
|
|
256
271
|
run_id=run_id,
|
|
272
|
+
heartbeat_interval=heartbeat_interval,
|
|
257
273
|
)
|
|
258
274
|
|
|
259
275
|
# Only use region-specific base URL if no custom base URL is set
|
|
@@ -276,7 +292,7 @@ class Fleet:
|
|
|
276
292
|
return self.make(env_key=f"{task.env_id}:{task.version}")
|
|
277
293
|
|
|
278
294
|
def instances(
|
|
279
|
-
self, status: Optional[str] = None, region: Optional[str] = None, run_id: Optional[str] = None
|
|
295
|
+
self, status: Optional[str] = None, region: Optional[str] = None, run_id: Optional[str] = None, profile_id: Optional[str] = None
|
|
280
296
|
) -> List[SyncEnv]:
|
|
281
297
|
params = {}
|
|
282
298
|
if status:
|
|
@@ -285,6 +301,8 @@ class Fleet:
|
|
|
285
301
|
params["region"] = region
|
|
286
302
|
if run_id:
|
|
287
303
|
params["run_id"] = run_id
|
|
304
|
+
if profile_id:
|
|
305
|
+
params["profile_id"] = profile_id
|
|
288
306
|
|
|
289
307
|
response = self.client.request("GET", "/v1/env/instances", params=params)
|
|
290
308
|
return [
|
|
@@ -292,163 +310,11 @@ class Fleet:
|
|
|
292
310
|
for instance_data in response.json()
|
|
293
311
|
]
|
|
294
312
|
|
|
295
|
-
def instance(self, instance_id:
|
|
296
|
-
"""
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
2. str starting with http:// or https://: Localhost/URL mode
|
|
301
|
-
3. str (other): Remote cloud instance mode
|
|
302
|
-
|
|
303
|
-
Args:
|
|
304
|
-
instance_id: Instance identifier (str), URL (str starting with http://),
|
|
305
|
-
or local db mapping (dict)
|
|
306
|
-
|
|
307
|
-
Returns:
|
|
308
|
-
SyncEnv: Environment instance
|
|
309
|
-
"""
|
|
310
|
-
# Local filesystem mode - dict of resource names to file paths
|
|
311
|
-
if isinstance(instance_id, dict):
|
|
312
|
-
return self._create_local_instance(instance_id)
|
|
313
|
-
|
|
314
|
-
# Localhost/direct URL mode - string starting with http:// or https://
|
|
315
|
-
elif isinstance(instance_id, str) and instance_id.startswith(("http://", "https://")):
|
|
316
|
-
return self._create_url_instance(instance_id)
|
|
317
|
-
|
|
318
|
-
# Remote mode - existing behavior
|
|
319
|
-
else:
|
|
320
|
-
response = self.client.request("GET", f"/v1/env/instances/{instance_id}")
|
|
321
|
-
instance = SyncEnv(client=self.client, **response.json())
|
|
322
|
-
instance.instance.load()
|
|
323
|
-
return instance
|
|
324
|
-
|
|
325
|
-
def _create_url_instance(self, base_url: str) -> SyncEnv:
|
|
326
|
-
"""Create instance connected to a direct URL (localhost or custom).
|
|
327
|
-
|
|
328
|
-
Args:
|
|
329
|
-
base_url: URL of the instance manager API
|
|
330
|
-
|
|
331
|
-
Returns:
|
|
332
|
-
SyncEnv: Environment instance configured for URL mode
|
|
333
|
-
"""
|
|
334
|
-
instance_client = InstanceClient(url=base_url, httpx_client=self._httpx_client)
|
|
335
|
-
|
|
336
|
-
# Create a minimal environment for URL mode
|
|
337
|
-
env = SyncEnv(
|
|
338
|
-
client=self.client,
|
|
339
|
-
instance_id=base_url,
|
|
340
|
-
env_key="localhost",
|
|
341
|
-
version="",
|
|
342
|
-
status="running",
|
|
343
|
-
subdomain="localhost",
|
|
344
|
-
created_at="",
|
|
345
|
-
updated_at="",
|
|
346
|
-
terminated_at=None,
|
|
347
|
-
team_id="",
|
|
348
|
-
region="localhost",
|
|
349
|
-
env_variables=None,
|
|
350
|
-
data_key=None,
|
|
351
|
-
data_version=None,
|
|
352
|
-
urls=None,
|
|
353
|
-
health=None,
|
|
354
|
-
)
|
|
355
|
-
env._instance = instance_client
|
|
356
|
-
return env
|
|
357
|
-
|
|
358
|
-
@staticmethod
|
|
359
|
-
def _normalize_db_path(path: str) -> tuple[str, bool]:
|
|
360
|
-
"""Normalize database path and detect if it's in-memory.
|
|
361
|
-
|
|
362
|
-
Args:
|
|
363
|
-
path: Database path - can be:
|
|
364
|
-
- File path: "./data.db"
|
|
365
|
-
- Plain memory: ":memory:"
|
|
366
|
-
- Named memory: ":memory:namespace"
|
|
367
|
-
- URI: "file:name?mode=memory&cache=shared"
|
|
368
|
-
|
|
369
|
-
Returns:
|
|
370
|
-
Tuple of (normalized_path, is_memory)
|
|
371
|
-
"""
|
|
372
|
-
import uuid
|
|
373
|
-
import sqlite3
|
|
374
|
-
|
|
375
|
-
if path == ":memory:":
|
|
376
|
-
# Plain :memory: - create unique namespace
|
|
377
|
-
name = f"mem_{uuid.uuid4().hex[:8]}"
|
|
378
|
-
return f"file:{name}?mode=memory&cache=shared", True
|
|
379
|
-
elif path.startswith(":memory:"):
|
|
380
|
-
# Named memory: :memory:current -> file:current?mode=memory&cache=shared
|
|
381
|
-
namespace = path[8:] # Remove ":memory:" prefix
|
|
382
|
-
return f"file:{namespace}?mode=memory&cache=shared", True
|
|
383
|
-
elif "mode=memory" in path:
|
|
384
|
-
# Already a proper memory URI
|
|
385
|
-
return path, True
|
|
386
|
-
else:
|
|
387
|
-
# Regular file path
|
|
388
|
-
return path, False
|
|
389
|
-
|
|
390
|
-
def _create_local_instance(self, dbs: Dict[str, str]) -> SyncEnv:
|
|
391
|
-
"""Create instance with local file-based or in-memory SQLite resources.
|
|
392
|
-
|
|
393
|
-
Args:
|
|
394
|
-
dbs: Map of resource names to paths (e.g., {"current": "./data.db"} or
|
|
395
|
-
{"current": ":memory:current"})
|
|
396
|
-
|
|
397
|
-
Returns:
|
|
398
|
-
SyncEnv: Environment instance configured for local mode
|
|
399
|
-
"""
|
|
400
|
-
import sqlite3
|
|
401
|
-
|
|
402
|
-
instance_client = InstanceClient(url="local://", httpx_client=None)
|
|
403
|
-
instance_client._resources = [] # Mark as loaded
|
|
404
|
-
instance_client._memory_anchors = {} # Store anchor connections for in-memory DBs
|
|
405
|
-
|
|
406
|
-
# Store creation parameters for local SQLiteResources
|
|
407
|
-
# This allows db() to create new instances each time (matching HTTP mode behavior)
|
|
408
|
-
for name, path in dbs.items():
|
|
409
|
-
# Normalize path and detect if it's in-memory
|
|
410
|
-
normalized_path, is_memory = self._normalize_db_path(path)
|
|
411
|
-
|
|
412
|
-
# Create anchor connection for in-memory databases
|
|
413
|
-
# This keeps the database alive as long as the env exists
|
|
414
|
-
if is_memory:
|
|
415
|
-
anchor_conn = sqlite3.connect(normalized_path, uri=True)
|
|
416
|
-
instance_client._memory_anchors[name] = anchor_conn
|
|
417
|
-
|
|
418
|
-
resource_model = ResourceModel(
|
|
419
|
-
name=name,
|
|
420
|
-
type=ResourceType.db,
|
|
421
|
-
mode=ResourceMode.rw,
|
|
422
|
-
label=f"Local: {path}",
|
|
423
|
-
)
|
|
424
|
-
instance_client._resources_state[ResourceType.db.value][name] = {
|
|
425
|
-
'type': 'local',
|
|
426
|
-
'resource_model': resource_model,
|
|
427
|
-
'db_path': normalized_path,
|
|
428
|
-
'is_memory': is_memory
|
|
429
|
-
}
|
|
430
|
-
|
|
431
|
-
# Create a minimal environment for local mode
|
|
432
|
-
env = SyncEnv(
|
|
433
|
-
client=self.client,
|
|
434
|
-
instance_id="local",
|
|
435
|
-
env_key="local",
|
|
436
|
-
version="",
|
|
437
|
-
status="running",
|
|
438
|
-
subdomain="local",
|
|
439
|
-
created_at="",
|
|
440
|
-
updated_at="",
|
|
441
|
-
terminated_at=None,
|
|
442
|
-
team_id="",
|
|
443
|
-
region="local",
|
|
444
|
-
env_variables=None,
|
|
445
|
-
data_key=None,
|
|
446
|
-
data_version=None,
|
|
447
|
-
urls=None,
|
|
448
|
-
health=None,
|
|
449
|
-
)
|
|
450
|
-
env._instance = instance_client
|
|
451
|
-
return env
|
|
313
|
+
def instance(self, instance_id: str) -> SyncEnv:
|
|
314
|
+
response = self.client.request("GET", f"/v1/env/instances/{instance_id}")
|
|
315
|
+
instance = SyncEnv(client=self.client, **response.json())
|
|
316
|
+
instance.instance.load()
|
|
317
|
+
return instance
|
|
452
318
|
|
|
453
319
|
def check_bundle_exists(self, bundle_hash: str) -> VerifiersCheckResponse:
|
|
454
320
|
return _check_bundle_exists(self.client, bundle_hash)
|
|
@@ -472,16 +338,53 @@ class Fleet:
|
|
|
472
338
|
"""
|
|
473
339
|
return _delete_instance(self.client, instance_id)
|
|
474
340
|
|
|
475
|
-
def
|
|
476
|
-
"""
|
|
341
|
+
def heartbeat(self, instance_id: str, region: Optional[str] = None) -> HeartbeatResponse:
|
|
342
|
+
"""Send heartbeat to keep instance alive (if heartbeat monitoring is enabled).
|
|
343
|
+
|
|
344
|
+
Args:
|
|
345
|
+
instance_id: The instance ID to send heartbeat for
|
|
346
|
+
region: Optional region override for cross-region heartbeats
|
|
347
|
+
|
|
348
|
+
Returns:
|
|
349
|
+
HeartbeatResponse containing heartbeat status and deadline information
|
|
350
|
+
"""
|
|
351
|
+
return _send_heartbeat(self.client, instance_id, region)
|
|
352
|
+
|
|
353
|
+
def close_all(self, run_id: Optional[str] = None, profile_id: Optional[str] = None) -> List[InstanceResponse]:
|
|
354
|
+
"""Close (delete) instances using the batch delete endpoint.
|
|
477
355
|
|
|
478
356
|
Args:
|
|
479
|
-
run_id:
|
|
357
|
+
run_id: Optional run ID to filter instances by
|
|
358
|
+
profile_id: Optional profile ID to filter instances by (use "self" for your own profile)
|
|
480
359
|
|
|
481
360
|
Returns:
|
|
482
361
|
List[InstanceResponse] containing the deleted instances
|
|
362
|
+
|
|
363
|
+
Note:
|
|
364
|
+
At least one of run_id or profile_id must be provided.
|
|
483
365
|
"""
|
|
484
|
-
return
|
|
366
|
+
return _delete_instances_batch(self.client, run_id=run_id, profile_id=profile_id)
|
|
367
|
+
|
|
368
|
+
def list_runs(
|
|
369
|
+
self, profile_id: Optional[str] = None, status: Optional[str] = "active"
|
|
370
|
+
) -> List[Run]:
|
|
371
|
+
"""List all runs (groups of instances by run_id) with aggregated statistics.
|
|
372
|
+
|
|
373
|
+
Args:
|
|
374
|
+
profile_id: Optional profile ID to filter runs by (use "self" for your own profile)
|
|
375
|
+
status: Filter by run status - "active" (default), "inactive", or "all"
|
|
376
|
+
|
|
377
|
+
Returns:
|
|
378
|
+
List[Run] containing run information with instance counts and timestamps
|
|
379
|
+
"""
|
|
380
|
+
params = {}
|
|
381
|
+
if profile_id:
|
|
382
|
+
params["profile_id"] = profile_id
|
|
383
|
+
if status:
|
|
384
|
+
params["active"] = status
|
|
385
|
+
|
|
386
|
+
response = self.client.request("GET", "/v1/env/runs", params=params)
|
|
387
|
+
return [Run(**run_data) for run_data in response.json()]
|
|
485
388
|
|
|
486
389
|
def load_tasks_from_file(self, filename: str) -> List[Task]:
|
|
487
390
|
with open(filename, "r", encoding="utf-8") as f:
|
|
@@ -561,8 +464,8 @@ class Fleet:
|
|
|
561
464
|
error_msg = f"Failed to create verifier {task_json.get('key', task_json.get('id'))}: {e}"
|
|
562
465
|
if raise_on_verifier_error:
|
|
563
466
|
raise ValueError(error_msg) from e
|
|
564
|
-
else:
|
|
565
|
-
|
|
467
|
+
# else:
|
|
468
|
+
# logger.warning(error_msg)
|
|
566
469
|
|
|
567
470
|
task = Task(
|
|
568
471
|
key=task_json.get("key", task_json.get("id")),
|
|
@@ -652,23 +555,23 @@ class Fleet:
|
|
|
652
555
|
verifier_sha=tr.verifier.sha256,
|
|
653
556
|
)
|
|
654
557
|
except Exception as e:
|
|
655
|
-
logger.warning(
|
|
656
|
-
|
|
657
|
-
)
|
|
558
|
+
# logger.warning(
|
|
559
|
+
# f"Failed to create verifier {tr.verifier.key}: {e}"
|
|
560
|
+
# )
|
|
658
561
|
return None
|
|
659
562
|
else:
|
|
660
563
|
# Fallback: try fetching by ID
|
|
661
564
|
try:
|
|
662
|
-
logger.warning(
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
)
|
|
565
|
+
# logger.warning(
|
|
566
|
+
# f"Embedded verifier code missing for {tr.verifier.key} (NoSuchKey). "
|
|
567
|
+
# f"Attempting to refetch by id {tr.verifier.verifier_id}"
|
|
568
|
+
# )
|
|
666
569
|
return self._load_verifier(tr.verifier.verifier_id)
|
|
667
570
|
except Exception as e:
|
|
668
|
-
logger.warning(
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
)
|
|
571
|
+
# logger.warning(
|
|
572
|
+
# f"Refetch by verifier id failed for {tr.verifier.key}: {e}. "
|
|
573
|
+
# "Leaving verifier unset."
|
|
574
|
+
# )
|
|
672
575
|
return None
|
|
673
576
|
|
|
674
577
|
# Add the task for parallel execution
|
|
@@ -708,7 +611,7 @@ class Fleet:
|
|
|
708
611
|
result = future.result()
|
|
709
612
|
verifier_results.append(result)
|
|
710
613
|
except Exception as e:
|
|
711
|
-
logger.warning(f"Verifier loading failed: {e}")
|
|
614
|
+
# logger.warning(f"Verifier loading failed: {e}")
|
|
712
615
|
verifier_results.append(None)
|
|
713
616
|
|
|
714
617
|
# Build tasks with results
|
|
@@ -795,10 +698,10 @@ class Fleet:
|
|
|
795
698
|
with open(filename, "w", encoding="utf-8") as f:
|
|
796
699
|
json.dump(tasks_data, f, indent=2, default=str)
|
|
797
700
|
|
|
798
|
-
logger.info(f"Exported {len(tasks)} tasks to {filename}")
|
|
701
|
+
# logger.info(f"Exported {len(tasks)} tasks to {filename}")
|
|
799
702
|
return filename
|
|
800
703
|
else:
|
|
801
|
-
logger.info("No tasks found to export")
|
|
704
|
+
# logger.info("No tasks found to export")
|
|
802
705
|
return None
|
|
803
706
|
|
|
804
707
|
def import_single_task(self, task: Task, project_key: Optional[str] = None):
|
|
@@ -827,7 +730,7 @@ class Fleet:
|
|
|
827
730
|
)
|
|
828
731
|
return response
|
|
829
732
|
except Exception as e:
|
|
830
|
-
logger.error(f"Failed to import task {task.key}: {e}")
|
|
733
|
+
# logger.error(f"Failed to import task {task.key}: {e}")
|
|
831
734
|
return None
|
|
832
735
|
|
|
833
736
|
def import_tasks(self, filename: str, project_key: Optional[str] = None):
|
|
@@ -993,8 +896,34 @@ def _delete_instance(client: SyncWrapper, instance_id: str) -> InstanceResponse:
|
|
|
993
896
|
return InstanceResponse(**response.json())
|
|
994
897
|
|
|
995
898
|
|
|
996
|
-
def
|
|
997
|
-
|
|
899
|
+
def _send_heartbeat(client: SyncWrapper, instance_id: str, region: Optional[str] = None) -> HeartbeatResponse:
|
|
900
|
+
"""Send heartbeat to keep instance alive."""
|
|
901
|
+
body = {}
|
|
902
|
+
if region:
|
|
903
|
+
body["region"] = region
|
|
904
|
+
|
|
905
|
+
response = client.request(
|
|
906
|
+
"POST",
|
|
907
|
+
f"/v1/env/instances/{instance_id}/heartbeat",
|
|
908
|
+
json=body
|
|
909
|
+
)
|
|
910
|
+
return HeartbeatResponse(**response.json())
|
|
911
|
+
|
|
912
|
+
|
|
913
|
+
def _delete_instances_batch(
|
|
914
|
+
client: SyncWrapper, run_id: Optional[str] = None, profile_id: Optional[str] = None
|
|
915
|
+
) -> List[InstanceResponse]:
|
|
916
|
+
"""Delete instances using the batch endpoint with flexible filtering."""
|
|
917
|
+
params = {}
|
|
918
|
+
if run_id:
|
|
919
|
+
params["run_id"] = run_id
|
|
920
|
+
if profile_id:
|
|
921
|
+
params["profile_id"] = profile_id
|
|
922
|
+
|
|
923
|
+
if not params:
|
|
924
|
+
raise ValueError("At least one of run_id or profile_id must be provided")
|
|
925
|
+
|
|
926
|
+
response = client.request("DELETE", "/v1/env/instances/batch", params=params)
|
|
998
927
|
return [InstanceResponse(**instance_data) for instance_data in response.json()]
|
|
999
928
|
|
|
1000
929
|
|
|
@@ -1040,17 +969,17 @@ def _execute_verifier_remote(
|
|
|
1040
969
|
request_data["bundle"] = bundle_b64
|
|
1041
970
|
|
|
1042
971
|
# Debug logging
|
|
1043
|
-
logger.debug(
|
|
1044
|
-
|
|
1045
|
-
)
|
|
1046
|
-
logger.debug(f"Request has bundle: {needs_upload}")
|
|
1047
|
-
logger.debug(f"Using client with base_url: {client.base_url}")
|
|
1048
|
-
logger.debug(f"Request data keys: {list(request_data.keys())}")
|
|
1049
|
-
logger.debug(
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
)
|
|
972
|
+
# logger.debug(
|
|
973
|
+
# f"Sending verifier execute request: key={key}, sha256={bundle_sha[:8]}..., function_name={function_name}"
|
|
974
|
+
# )
|
|
975
|
+
# logger.debug(f"Request has bundle: {needs_upload}")
|
|
976
|
+
# logger.debug(f"Using client with base_url: {client.base_url}")
|
|
977
|
+
# logger.debug(f"Request data keys: {list(request_data.keys())}")
|
|
978
|
+
# logger.debug(
|
|
979
|
+
# f"Bundle size: {len(request_data.get('bundle', ''))} chars"
|
|
980
|
+
# if "bundle" in request_data
|
|
981
|
+
# else "No bundle"
|
|
982
|
+
# )
|
|
1054
983
|
|
|
1055
984
|
# Note: This should be called on the instance URL, not the orchestrator
|
|
1056
985
|
# The instance has manager URLs for verifier execution
|
|
@@ -1058,6 +987,6 @@ def _execute_verifier_remote(
|
|
|
1058
987
|
|
|
1059
988
|
# Debug the response
|
|
1060
989
|
response_json = response.json()
|
|
1061
|
-
logger.debug(f"Verifier execute response: {response_json}")
|
|
990
|
+
# logger.debug(f"Verifier execute response: {response_json}")
|
|
1062
991
|
|
|
1063
992
|
return VerifiersExecuteResponse(**response_json)
|
fleet/config.py
CHANGED
fleet/env/__init__.py
CHANGED
|
@@ -9,6 +9,8 @@ from .client import (
|
|
|
9
9
|
list_instances,
|
|
10
10
|
close,
|
|
11
11
|
close_all,
|
|
12
|
+
list_runs,
|
|
13
|
+
heartbeat,
|
|
12
14
|
account,
|
|
13
15
|
)
|
|
14
16
|
|
|
@@ -21,6 +23,8 @@ from .._async.env.client import (
|
|
|
21
23
|
list_instances_async,
|
|
22
24
|
close_async,
|
|
23
25
|
close_all_async,
|
|
26
|
+
list_runs_async,
|
|
27
|
+
heartbeat_async,
|
|
24
28
|
account_async,
|
|
25
29
|
)
|
|
26
30
|
|
|
@@ -33,6 +37,8 @@ __all__ = [
|
|
|
33
37
|
"get",
|
|
34
38
|
"close",
|
|
35
39
|
"close_all",
|
|
40
|
+
"list_runs",
|
|
41
|
+
"heartbeat",
|
|
36
42
|
"make_async",
|
|
37
43
|
"list_envs_async",
|
|
38
44
|
"list_regions_async",
|
|
@@ -40,6 +46,8 @@ __all__ = [
|
|
|
40
46
|
"get_async",
|
|
41
47
|
"close_async",
|
|
42
48
|
"close_all_async",
|
|
49
|
+
"list_runs_async",
|
|
50
|
+
"heartbeat_async",
|
|
43
51
|
"account",
|
|
44
52
|
"account_async",
|
|
45
53
|
]
|
fleet/env/client.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from ..client import Fleet, SyncEnv, Task
|
|
2
|
-
from ..models import Environment as EnvironmentModel, AccountResponse, InstanceResponse
|
|
2
|
+
from ..models import Environment as EnvironmentModel, AccountResponse, InstanceResponse, Run, HeartbeatResponse
|
|
3
3
|
from typing import List, Optional, Dict, Any
|
|
4
4
|
|
|
5
5
|
|
|
@@ -11,6 +11,7 @@ def make(
|
|
|
11
11
|
image_type: Optional[str] = None,
|
|
12
12
|
ttl_seconds: Optional[int] = None,
|
|
13
13
|
run_id: Optional[str] = None,
|
|
14
|
+
heartbeat_interval: Optional[int] = None,
|
|
14
15
|
) -> SyncEnv:
|
|
15
16
|
return Fleet().make(
|
|
16
17
|
env_key,
|
|
@@ -20,6 +21,7 @@ def make(
|
|
|
20
21
|
image_type=image_type,
|
|
21
22
|
ttl_seconds=ttl_seconds,
|
|
22
23
|
run_id=run_id,
|
|
24
|
+
heartbeat_interval=heartbeat_interval,
|
|
23
25
|
)
|
|
24
26
|
|
|
25
27
|
|
|
@@ -36,9 +38,9 @@ def list_regions() -> List[str]:
|
|
|
36
38
|
|
|
37
39
|
|
|
38
40
|
def list_instances(
|
|
39
|
-
status: Optional[str] = None, region: Optional[str] = None, run_id: Optional[str] = None
|
|
41
|
+
status: Optional[str] = None, region: Optional[str] = None, run_id: Optional[str] = None, profile_id: Optional[str] = None
|
|
40
42
|
) -> List[SyncEnv]:
|
|
41
|
-
return Fleet().instances(status=status, region=region, run_id=run_id)
|
|
43
|
+
return Fleet().instances(status=status, region=region, run_id=run_id, profile_id=profile_id)
|
|
42
44
|
|
|
43
45
|
|
|
44
46
|
def get(instance_id: str) -> SyncEnv:
|
|
@@ -57,16 +59,45 @@ def close(instance_id: str) -> InstanceResponse:
|
|
|
57
59
|
return Fleet().close(instance_id)
|
|
58
60
|
|
|
59
61
|
|
|
60
|
-
def close_all(run_id: str) -> List[InstanceResponse]:
|
|
61
|
-
"""Close (delete)
|
|
62
|
+
def close_all(run_id: Optional[str] = None, profile_id: Optional[str] = None) -> List[InstanceResponse]:
|
|
63
|
+
"""Close (delete) instances using the batch delete endpoint.
|
|
62
64
|
|
|
63
65
|
Args:
|
|
64
|
-
run_id:
|
|
66
|
+
run_id: Optional run ID to filter instances by
|
|
67
|
+
profile_id: Optional profile ID to filter instances by (use "self" for your own profile)
|
|
65
68
|
|
|
66
69
|
Returns:
|
|
67
70
|
List[InstanceResponse] containing the deleted instances
|
|
71
|
+
|
|
72
|
+
Note:
|
|
73
|
+
At least one of run_id or profile_id must be provided.
|
|
74
|
+
"""
|
|
75
|
+
return Fleet().close_all(run_id=run_id, profile_id=profile_id)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def list_runs(profile_id: Optional[str] = None, status: Optional[str] = "active") -> List[Run]:
|
|
79
|
+
"""List all runs (groups of instances by run_id) with aggregated statistics.
|
|
80
|
+
|
|
81
|
+
Args:
|
|
82
|
+
profile_id: Optional profile ID to filter runs by (use "self" for your own profile)
|
|
83
|
+
status: Filter by run status - "active" (default), "inactive", or "all"
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
List[Run] containing run information with instance counts and timestamps
|
|
87
|
+
"""
|
|
88
|
+
return Fleet().list_runs(profile_id=profile_id, status=status)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def heartbeat(instance_id: str) -> HeartbeatResponse:
|
|
92
|
+
"""Send heartbeat to keep instance alive (if heartbeat monitoring is enabled).
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
instance_id: The instance ID to send heartbeat for
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
HeartbeatResponse containing heartbeat status and deadline information
|
|
68
99
|
"""
|
|
69
|
-
return Fleet().
|
|
100
|
+
return Fleet().heartbeat(instance_id)
|
|
70
101
|
|
|
71
102
|
|
|
72
103
|
def account() -> AccountResponse:
|