fleet-python 0.2.69b3__py3-none-any.whl → 0.2.70__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fleet-python might be problematic. Click here for more details.

fleet/base.py CHANGED
@@ -2,6 +2,7 @@ import httpx
2
2
  from typing import Dict, Any, Optional
3
3
  import json
4
4
  import logging
5
+ import uuid
5
6
 
6
7
  from .models import InstanceResponse
7
8
  from .config import GLOBAL_BASE_URL
@@ -20,6 +21,12 @@ from .exceptions import (
20
21
  FleetPermissionError,
21
22
  )
22
23
 
24
+ # Import version
25
+ try:
26
+ from . import __version__
27
+ except ImportError:
28
+ __version__ = "0.2.70"
29
+
23
30
  logger = logging.getLogger(__name__)
24
31
 
25
32
 
@@ -38,17 +45,17 @@ class BaseWrapper:
38
45
  base_url = GLOBAL_BASE_URL
39
46
  self.base_url = base_url
40
47
 
41
- def get_headers(self) -> Dict[str, str]:
48
+ def get_headers(self, request_id: Optional[str] = None) -> Dict[str, str]:
42
49
  headers: Dict[str, str] = {
43
50
  "X-Fleet-SDK-Language": "Python",
44
- "X-Fleet-SDK-Version": "1.0.0",
51
+ "X-Fleet-SDK-Version": __version__,
45
52
  }
46
53
  headers["Authorization"] = f"Bearer {self.api_key}"
47
- # Debug log
48
- import logging
49
-
50
- logger = logging.getLogger(__name__)
51
- logger.debug(f"Headers being sent: {headers}")
54
+
55
+ # Add request ID for idempotency (persists across retries)
56
+ if request_id:
57
+ headers["X-Request-ID"] = request_id
58
+
52
59
  return headers
53
60
 
54
61
 
@@ -67,11 +74,14 @@ class SyncWrapper(BaseWrapper):
67
74
  **kwargs,
68
75
  ) -> httpx.Response:
69
76
  base_url = base_url or self.base_url
77
+ # Generate unique request ID that persists across retries
78
+ request_id = str(uuid.uuid4())
79
+
70
80
  try:
71
81
  response = self.httpx_client.request(
72
82
  method,
73
83
  f"{base_url}{url}",
74
- headers=self.get_headers(),
84
+ headers=self.get_headers(request_id=request_id),
75
85
  params=params,
76
86
  json=json,
77
87
  **kwargs,
@@ -93,8 +103,9 @@ class SyncWrapper(BaseWrapper):
93
103
 
94
104
  # Debug log 500 errors
95
105
  if status_code == 500:
96
- logger.error(f"Got 500 error from {response.url}")
97
- logger.error(f"Response text: {response.text}")
106
+ # logger.error(f"Got 500 error from {response.url}")
107
+ # logger.error(f"Response text: {response.text}")
108
+ pass
98
109
 
99
110
  # Try to parse error response as JSON
100
111
  try:
fleet/client.py CHANGED
@@ -21,8 +21,7 @@ import httpx
21
21
  import json
22
22
  import logging
23
23
  import os
24
- from typing import List, Optional, Dict, Any, TYPE_CHECKING, Union
25
- from urllib.parse import urlparse
24
+ from typing import List, Optional, Dict, Any, TYPE_CHECKING
26
25
 
27
26
  from .base import EnvironmentBase, SyncWrapper
28
27
  from .models import (
@@ -36,6 +35,8 @@ from .models import (
36
35
  TaskRequest,
37
36
  TaskResponse,
38
37
  TaskUpdateRequest,
38
+ Run,
39
+ HeartbeatResponse,
39
40
  )
40
41
  from .tasks import Task
41
42
 
@@ -48,11 +49,6 @@ from .instance import (
48
49
  ResetResponse,
49
50
  ExecuteFunctionResponse,
50
51
  )
51
- from .instance.models import (
52
- Resource as ResourceModel,
53
- ResourceType,
54
- ResourceMode,
55
- )
56
52
  from .config import (
57
53
  DEFAULT_MAX_RETRIES,
58
54
  DEFAULT_TIMEOUT,
@@ -75,14 +71,6 @@ class SyncEnv(EnvironmentBase):
75
71
  self._client = client
76
72
  self._apps: Dict[str, InstanceClient] = {}
77
73
  self._instance: Optional[InstanceClient] = None
78
- self._manager_url_override: Optional[str] = None # For URL mode
79
-
80
- @property
81
- def manager_url(self) -> str:
82
- """Override to support URL mode where urls is None."""
83
- if self._manager_url_override is not None:
84
- return self._manager_url_override
85
- return super().manager_url
86
74
 
87
75
  @property
88
76
  def instance(self) -> InstanceClient:
@@ -94,17 +82,17 @@ class SyncEnv(EnvironmentBase):
94
82
 
95
83
  def app(self, name: str) -> InstanceClient:
96
84
  if name not in self._apps:
97
- # Extract scheme://netloc from manager_url, then construct /{name}/api/v1/env
98
- # Supports all URL formats:
99
- # https://host/api/v1/env -> https://host/{name}/api/v1/env
100
- # https://host/sentry/api/v1/env -> https://host/{name}/api/v1/env
101
- # http://localhost:8080/api/v1/env -> http://localhost:8080/{name}/api/v1/env
102
- parsed = urlparse(self.manager_url)
103
- root = f"{parsed.scheme}://{parsed.netloc}"
104
- new_url = f"{root}/{name}/api/v1/env"
85
+ # Extract base URL by removing the current app path (e.g., /sentry/api/v1/env)
86
+ # manager_url looks like: https://xxx.fleetai.com/sentry/api/v1/env
87
+ base_url = self.manager_url.split("/api/v1/env")[0]
88
+ # Remove the current app name (e.g., /sentry) to get the root
89
+ if "/" in base_url:
90
+ parts = base_url.rsplit("/", 1)
91
+ if len(parts) == 2 and parts[0] != "https:/":
92
+ base_url = parts[0]
105
93
 
106
94
  self._apps[name] = InstanceClient(
107
- new_url,
95
+ f"{base_url}/{name}/api/v1/env",
108
96
  self._client.httpx_client if self._client else None,
109
97
  )
110
98
  return self._apps[name]
@@ -140,6 +128,23 @@ class SyncEnv(EnvironmentBase):
140
128
  def close(self) -> InstanceResponse:
141
129
  return _delete_instance(self._load_client, self.instance_id)
142
130
 
131
+ def heartbeat(self) -> HeartbeatResponse:
132
+ """Send heartbeat to keep instance alive (if heartbeat monitoring is enabled).
133
+
134
+ Returns:
135
+ HeartbeatResponse containing heartbeat status and deadline information
136
+ """
137
+ body = {}
138
+ if self.heartbeat_region:
139
+ body["region"] = self.heartbeat_region
140
+
141
+ response = self._load_client.request(
142
+ "POST",
143
+ f"/v1/env/instances/{self.instance_id}/heartbeat",
144
+ json=body
145
+ )
146
+ return HeartbeatResponse(**response.json())
147
+
143
148
  def verify(self, validator: ValidatorType) -> ExecuteFunctionResponse:
144
149
  return self.instance.verify(validator)
145
150
 
@@ -227,6 +232,7 @@ class Fleet:
227
232
  image_type: Optional[str] = None,
228
233
  ttl_seconds: Optional[int] = None,
229
234
  run_id: Optional[str] = None,
235
+ heartbeat_interval: Optional[int] = None,
230
236
  ) -> SyncEnv:
231
237
  if ":" in env_key:
232
238
  env_key_part, env_version = env_key.split(":", 1)
@@ -263,6 +269,7 @@ class Fleet:
263
269
  created_from="sdk",
264
270
  ttl_seconds=ttl_seconds,
265
271
  run_id=run_id,
272
+ heartbeat_interval=heartbeat_interval,
266
273
  )
267
274
 
268
275
  # Only use region-specific base URL if no custom base URL is set
@@ -285,7 +292,7 @@ class Fleet:
285
292
  return self.make(env_key=f"{task.env_id}:{task.version}")
286
293
 
287
294
  def instances(
288
- self, status: Optional[str] = None, region: Optional[str] = None, run_id: Optional[str] = None
295
+ self, status: Optional[str] = None, region: Optional[str] = None, run_id: Optional[str] = None, profile_id: Optional[str] = None
289
296
  ) -> List[SyncEnv]:
290
297
  params = {}
291
298
  if status:
@@ -294,6 +301,8 @@ class Fleet:
294
301
  params["region"] = region
295
302
  if run_id:
296
303
  params["run_id"] = run_id
304
+ if profile_id:
305
+ params["profile_id"] = profile_id
297
306
 
298
307
  response = self.client.request("GET", "/v1/env/instances", params=params)
299
308
  return [
@@ -301,165 +310,11 @@ class Fleet:
301
310
  for instance_data in response.json()
302
311
  ]
303
312
 
304
- def instance(self, instance_id: Union[str, Dict[str, str]]) -> SyncEnv:
305
- """Create or connect to an environment instance.
306
-
307
- Supports three modes based on input type:
308
- 1. dict: Local filesystem mode - {"current": "./data.db", "seed": "./seed.db"}
309
- 2. str starting with http:// or https://: Localhost/URL mode
310
- 3. str (other): Remote cloud instance mode
311
-
312
- Args:
313
- instance_id: Instance identifier (str), URL (str starting with http://),
314
- or local db mapping (dict)
315
-
316
- Returns:
317
- SyncEnv: Environment instance
318
- """
319
- # Local filesystem mode - dict of resource names to file paths
320
- if isinstance(instance_id, dict):
321
- return self._create_local_instance(instance_id)
322
-
323
- # Localhost/direct URL mode - string starting with http:// or https://
324
- elif isinstance(instance_id, str) and instance_id.startswith(("http://", "https://")):
325
- return self._create_url_instance(instance_id)
326
-
327
- # Remote mode - existing behavior
328
- else:
329
- response = self.client.request("GET", f"/v1/env/instances/{instance_id}")
330
- instance = SyncEnv(client=self.client, **response.json())
331
- instance.instance.load()
332
- return instance
333
-
334
- def _create_url_instance(self, base_url: str) -> SyncEnv:
335
- """Create instance connected to a direct URL (localhost or custom).
336
-
337
- Args:
338
- base_url: URL of the instance manager API
339
-
340
- Returns:
341
- SyncEnv: Environment instance configured for URL mode
342
- """
343
- instance_client = InstanceClient(url=base_url, httpx_client=self._httpx_client)
344
-
345
- # Create a minimal environment for URL mode
346
- env = SyncEnv(
347
- client=self.client,
348
- instance_id=base_url,
349
- env_key="localhost",
350
- version="",
351
- status="running",
352
- subdomain="localhost",
353
- created_at="",
354
- updated_at="",
355
- terminated_at=None,
356
- team_id="",
357
- region="localhost",
358
- env_variables=None,
359
- data_key=None,
360
- data_version=None,
361
- urls=None,
362
- health=None,
363
- )
364
- env._instance = instance_client
365
- env._manager_url_override = base_url # Set manager_url for URL mode
366
- return env
367
-
368
- @staticmethod
369
- def _normalize_db_path(path: str) -> tuple[str, bool]:
370
- """Normalize database path and detect if it's in-memory.
371
-
372
- Args:
373
- path: Database path - can be:
374
- - File path: "./data.db"
375
- - Plain memory: ":memory:"
376
- - Named memory: ":memory:namespace"
377
- - URI: "file:name?mode=memory&cache=shared"
378
-
379
- Returns:
380
- Tuple of (normalized_path, is_memory)
381
- """
382
- import uuid
383
- import sqlite3
384
-
385
- if path == ":memory:":
386
- # Plain :memory: - create unique namespace
387
- name = f"mem_{uuid.uuid4().hex[:8]}"
388
- return f"file:{name}?mode=memory&cache=shared", True
389
- elif path.startswith(":memory:"):
390
- # Named memory: :memory:current -> file:current?mode=memory&cache=shared
391
- namespace = path[8:] # Remove ":memory:" prefix
392
- return f"file:{namespace}?mode=memory&cache=shared", True
393
- elif "mode=memory" in path:
394
- # Already a proper memory URI
395
- return path, True
396
- else:
397
- # Regular file path
398
- return path, False
399
-
400
- def _create_local_instance(self, dbs: Dict[str, str]) -> SyncEnv:
401
- """Create instance with local file-based or in-memory SQLite resources.
402
-
403
- Args:
404
- dbs: Map of resource names to paths (e.g., {"current": "./data.db"} or
405
- {"current": ":memory:current"})
406
-
407
- Returns:
408
- SyncEnv: Environment instance configured for local mode
409
- """
410
- import sqlite3
411
-
412
- instance_client = InstanceClient(url="local://", httpx_client=None)
413
- instance_client._resources = [] # Mark as loaded
414
- instance_client._memory_anchors = {} # Store anchor connections for in-memory DBs
415
-
416
- # Store creation parameters for local SQLiteResources
417
- # This allows db() to create new instances each time (matching HTTP mode behavior)
418
- for name, path in dbs.items():
419
- # Normalize path and detect if it's in-memory
420
- normalized_path, is_memory = self._normalize_db_path(path)
421
-
422
- # Create anchor connection for in-memory databases
423
- # This keeps the database alive as long as the env exists
424
- if is_memory:
425
- anchor_conn = sqlite3.connect(normalized_path, uri=True)
426
- instance_client._memory_anchors[name] = anchor_conn
427
-
428
- resource_model = ResourceModel(
429
- name=name,
430
- type=ResourceType.db,
431
- mode=ResourceMode.rw,
432
- label=f"Local: {path}",
433
- )
434
- instance_client._resources_state[ResourceType.db.value][name] = {
435
- 'type': 'local',
436
- 'resource_model': resource_model,
437
- 'db_path': normalized_path,
438
- 'is_memory': is_memory
439
- }
440
-
441
- # Create a minimal environment for local mode
442
- env = SyncEnv(
443
- client=self.client,
444
- instance_id="local",
445
- env_key="local",
446
- version="",
447
- status="running",
448
- subdomain="local",
449
- created_at="",
450
- updated_at="",
451
- terminated_at=None,
452
- team_id="",
453
- region="local",
454
- env_variables=None,
455
- data_key=None,
456
- data_version=None,
457
- urls=None,
458
- health=None,
459
- )
460
- env._instance = instance_client
461
- env._manager_url_override = "local://" # Set manager_url for local mode
462
- return env
313
+ def instance(self, instance_id: str) -> SyncEnv:
314
+ response = self.client.request("GET", f"/v1/env/instances/{instance_id}")
315
+ instance = SyncEnv(client=self.client, **response.json())
316
+ instance.instance.load()
317
+ return instance
463
318
 
464
319
  def check_bundle_exists(self, bundle_hash: str) -> VerifiersCheckResponse:
465
320
  return _check_bundle_exists(self.client, bundle_hash)
@@ -483,16 +338,53 @@ class Fleet:
483
338
  """
484
339
  return _delete_instance(self.client, instance_id)
485
340
 
486
- def close_all(self, run_id: str) -> List[InstanceResponse]:
487
- """Close (delete) all instances associated with a run_id.
341
+ def heartbeat(self, instance_id: str, region: Optional[str] = None) -> HeartbeatResponse:
342
+ """Send heartbeat to keep instance alive (if heartbeat monitoring is enabled).
343
+
344
+ Args:
345
+ instance_id: The instance ID to send heartbeat for
346
+ region: Optional region override for cross-region heartbeats
347
+
348
+ Returns:
349
+ HeartbeatResponse containing heartbeat status and deadline information
350
+ """
351
+ return _send_heartbeat(self.client, instance_id, region)
352
+
353
+ def close_all(self, run_id: Optional[str] = None, profile_id: Optional[str] = None) -> List[InstanceResponse]:
354
+ """Close (delete) instances using the batch delete endpoint.
488
355
 
489
356
  Args:
490
- run_id: The run ID whose instances should be closed
357
+ run_id: Optional run ID to filter instances by
358
+ profile_id: Optional profile ID to filter instances by (use "self" for your own profile)
491
359
 
492
360
  Returns:
493
361
  List[InstanceResponse] containing the deleted instances
362
+
363
+ Note:
364
+ At least one of run_id or profile_id must be provided.
365
+ """
366
+ return _delete_instances_batch(self.client, run_id=run_id, profile_id=profile_id)
367
+
368
+ def list_runs(
369
+ self, profile_id: Optional[str] = None, status: Optional[str] = "active"
370
+ ) -> List[Run]:
371
+ """List all runs (groups of instances by run_id) with aggregated statistics.
372
+
373
+ Args:
374
+ profile_id: Optional profile ID to filter runs by (use "self" for your own profile)
375
+ status: Filter by run status - "active" (default), "inactive", or "all"
376
+
377
+ Returns:
378
+ List[Run] containing run information with instance counts and timestamps
494
379
  """
495
- return _delete_instances_by_run_id(self.client, run_id)
380
+ params = {}
381
+ if profile_id:
382
+ params["profile_id"] = profile_id
383
+ if status:
384
+ params["active"] = status
385
+
386
+ response = self.client.request("GET", "/v1/env/runs", params=params)
387
+ return [Run(**run_data) for run_data in response.json()]
496
388
 
497
389
  def load_tasks_from_file(self, filename: str) -> List[Task]:
498
390
  with open(filename, "r", encoding="utf-8") as f:
@@ -572,8 +464,8 @@ class Fleet:
572
464
  error_msg = f"Failed to create verifier {task_json.get('key', task_json.get('id'))}: {e}"
573
465
  if raise_on_verifier_error:
574
466
  raise ValueError(error_msg) from e
575
- else:
576
- logger.warning(error_msg)
467
+ # else:
468
+ # logger.warning(error_msg)
577
469
 
578
470
  task = Task(
579
471
  key=task_json.get("key", task_json.get("id")),
@@ -663,23 +555,23 @@ class Fleet:
663
555
  verifier_sha=tr.verifier.sha256,
664
556
  )
665
557
  except Exception as e:
666
- logger.warning(
667
- f"Failed to create verifier {tr.verifier.key}: {e}"
668
- )
558
+ # logger.warning(
559
+ # f"Failed to create verifier {tr.verifier.key}: {e}"
560
+ # )
669
561
  return None
670
562
  else:
671
563
  # Fallback: try fetching by ID
672
564
  try:
673
- logger.warning(
674
- f"Embedded verifier code missing for {tr.verifier.key} (NoSuchKey). "
675
- f"Attempting to refetch by id {tr.verifier.verifier_id}"
676
- )
565
+ # logger.warning(
566
+ # f"Embedded verifier code missing for {tr.verifier.key} (NoSuchKey). "
567
+ # f"Attempting to refetch by id {tr.verifier.verifier_id}"
568
+ # )
677
569
  return self._load_verifier(tr.verifier.verifier_id)
678
570
  except Exception as e:
679
- logger.warning(
680
- f"Refetch by verifier id failed for {tr.verifier.key}: {e}. "
681
- "Leaving verifier unset."
682
- )
571
+ # logger.warning(
572
+ # f"Refetch by verifier id failed for {tr.verifier.key}: {e}. "
573
+ # "Leaving verifier unset."
574
+ # )
683
575
  return None
684
576
 
685
577
  # Add the task for parallel execution
@@ -719,7 +611,7 @@ class Fleet:
719
611
  result = future.result()
720
612
  verifier_results.append(result)
721
613
  except Exception as e:
722
- logger.warning(f"Verifier loading failed: {e}")
614
+ # logger.warning(f"Verifier loading failed: {e}")
723
615
  verifier_results.append(None)
724
616
 
725
617
  # Build tasks with results
@@ -806,10 +698,10 @@ class Fleet:
806
698
  with open(filename, "w", encoding="utf-8") as f:
807
699
  json.dump(tasks_data, f, indent=2, default=str)
808
700
 
809
- logger.info(f"Exported {len(tasks)} tasks to {filename}")
701
+ # logger.info(f"Exported {len(tasks)} tasks to {filename}")
810
702
  return filename
811
703
  else:
812
- logger.info("No tasks found to export")
704
+ # logger.info("No tasks found to export")
813
705
  return None
814
706
 
815
707
  def import_single_task(self, task: Task, project_key: Optional[str] = None):
@@ -838,7 +730,7 @@ class Fleet:
838
730
  )
839
731
  return response
840
732
  except Exception as e:
841
- logger.error(f"Failed to import task {task.key}: {e}")
733
+ # logger.error(f"Failed to import task {task.key}: {e}")
842
734
  return None
843
735
 
844
736
  def import_tasks(self, filename: str, project_key: Optional[str] = None):
@@ -1004,8 +896,34 @@ def _delete_instance(client: SyncWrapper, instance_id: str) -> InstanceResponse:
1004
896
  return InstanceResponse(**response.json())
1005
897
 
1006
898
 
1007
- def _delete_instances_by_run_id(client: SyncWrapper, run_id: str) -> List[InstanceResponse]:
1008
- response = client.request("DELETE", f"/v1/env/instances/run/{run_id}")
899
+ def _send_heartbeat(client: SyncWrapper, instance_id: str, region: Optional[str] = None) -> HeartbeatResponse:
900
+ """Send heartbeat to keep instance alive."""
901
+ body = {}
902
+ if region:
903
+ body["region"] = region
904
+
905
+ response = client.request(
906
+ "POST",
907
+ f"/v1/env/instances/{instance_id}/heartbeat",
908
+ json=body
909
+ )
910
+ return HeartbeatResponse(**response.json())
911
+
912
+
913
+ def _delete_instances_batch(
914
+ client: SyncWrapper, run_id: Optional[str] = None, profile_id: Optional[str] = None
915
+ ) -> List[InstanceResponse]:
916
+ """Delete instances using the batch endpoint with flexible filtering."""
917
+ params = {}
918
+ if run_id:
919
+ params["run_id"] = run_id
920
+ if profile_id:
921
+ params["profile_id"] = profile_id
922
+
923
+ if not params:
924
+ raise ValueError("At least one of run_id or profile_id must be provided")
925
+
926
+ response = client.request("DELETE", "/v1/env/instances/batch", params=params)
1009
927
  return [InstanceResponse(**instance_data) for instance_data in response.json()]
1010
928
 
1011
929
 
@@ -1051,17 +969,17 @@ def _execute_verifier_remote(
1051
969
  request_data["bundle"] = bundle_b64
1052
970
 
1053
971
  # Debug logging
1054
- logger.debug(
1055
- f"Sending verifier execute request: key={key}, sha256={bundle_sha[:8]}..., function_name={function_name}"
1056
- )
1057
- logger.debug(f"Request has bundle: {needs_upload}")
1058
- logger.debug(f"Using client with base_url: {client.base_url}")
1059
- logger.debug(f"Request data keys: {list(request_data.keys())}")
1060
- logger.debug(
1061
- f"Bundle size: {len(request_data.get('bundle', ''))} chars"
1062
- if "bundle" in request_data
1063
- else "No bundle"
1064
- )
972
+ # logger.debug(
973
+ # f"Sending verifier execute request: key={key}, sha256={bundle_sha[:8]}..., function_name={function_name}"
974
+ # )
975
+ # logger.debug(f"Request has bundle: {needs_upload}")
976
+ # logger.debug(f"Using client with base_url: {client.base_url}")
977
+ # logger.debug(f"Request data keys: {list(request_data.keys())}")
978
+ # logger.debug(
979
+ # f"Bundle size: {len(request_data.get('bundle', ''))} chars"
980
+ # if "bundle" in request_data
981
+ # else "No bundle"
982
+ # )
1065
983
 
1066
984
  # Note: This should be called on the instance URL, not the orchestrator
1067
985
  # The instance has manager URLs for verifier execution
@@ -1069,6 +987,6 @@ def _execute_verifier_remote(
1069
987
 
1070
988
  # Debug the response
1071
989
  response_json = response.json()
1072
- logger.debug(f"Verifier execute response: {response_json}")
990
+ # logger.debug(f"Verifier execute response: {response_json}")
1073
991
 
1074
992
  return VerifiersExecuteResponse(**response_json)
fleet/config.py CHANGED
@@ -1,5 +1,5 @@
1
1
  DEFAULT_MAX_RETRIES = 3
2
- DEFAULT_TIMEOUT = 180.0
2
+ DEFAULT_TIMEOUT = 300.0
3
3
 
4
4
  GLOBAL_BASE_URL = "https://orchestrator.fleetai.com"
5
5
  REGION_BASE_URL = {
fleet/env/__init__.py CHANGED
@@ -9,6 +9,8 @@ from .client import (
9
9
  list_instances,
10
10
  close,
11
11
  close_all,
12
+ list_runs,
13
+ heartbeat,
12
14
  account,
13
15
  )
14
16
 
@@ -21,6 +23,8 @@ from .._async.env.client import (
21
23
  list_instances_async,
22
24
  close_async,
23
25
  close_all_async,
26
+ list_runs_async,
27
+ heartbeat_async,
24
28
  account_async,
25
29
  )
26
30
 
@@ -33,6 +37,8 @@ __all__ = [
33
37
  "get",
34
38
  "close",
35
39
  "close_all",
40
+ "list_runs",
41
+ "heartbeat",
36
42
  "make_async",
37
43
  "list_envs_async",
38
44
  "list_regions_async",
@@ -40,6 +46,8 @@ __all__ = [
40
46
  "get_async",
41
47
  "close_async",
42
48
  "close_all_async",
49
+ "list_runs_async",
50
+ "heartbeat_async",
43
51
  "account",
44
52
  "account_async",
45
53
  ]