fleet-python 0.2.69b2__py3-none-any.whl → 0.2.70__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fleet-python might be problematic. Click here for more details.

@@ -1,5 +1,5 @@
1
1
  from ..client import AsyncFleet, AsyncEnv, Task
2
- from ...models import Environment as EnvironmentModel, AccountResponse, InstanceResponse
2
+ from ...models import Environment as EnvironmentModel, AccountResponse, InstanceResponse, Run, HeartbeatResponse
3
3
  from typing import List, Optional, Dict, Any
4
4
 
5
5
 
@@ -11,6 +11,7 @@ async def make_async(
11
11
  image_type: Optional[str] = None,
12
12
  ttl_seconds: Optional[int] = None,
13
13
  run_id: Optional[str] = None,
14
+ heartbeat_interval: Optional[int] = None,
14
15
  ) -> AsyncEnv:
15
16
  return await AsyncFleet().make(
16
17
  env_key,
@@ -20,6 +21,7 @@ async def make_async(
20
21
  image_type=image_type,
21
22
  ttl_seconds=ttl_seconds,
22
23
  run_id=run_id,
24
+ heartbeat_interval=heartbeat_interval,
23
25
  )
24
26
 
25
27
 
@@ -36,9 +38,9 @@ async def list_regions_async() -> List[str]:
36
38
 
37
39
 
38
40
  async def list_instances_async(
39
- status: Optional[str] = None, region: Optional[str] = None, run_id: Optional[str] = None
41
+ status: Optional[str] = None, region: Optional[str] = None, run_id: Optional[str] = None, profile_id: Optional[str] = None
40
42
  ) -> List[AsyncEnv]:
41
- return await AsyncFleet().instances(status=status, region=region, run_id=run_id)
43
+ return await AsyncFleet().instances(status=status, region=region, run_id=run_id, profile_id=profile_id)
42
44
 
43
45
 
44
46
  async def get_async(instance_id: str) -> AsyncEnv:
@@ -57,16 +59,45 @@ async def close_async(instance_id: str) -> InstanceResponse:
57
59
  return await AsyncFleet().close(instance_id)
58
60
 
59
61
 
60
- async def close_all_async(run_id: str) -> List[InstanceResponse]:
61
- """Close (delete) all instances associated with a run_id.
62
+ async def close_all_async(run_id: Optional[str] = None, profile_id: Optional[str] = None) -> List[InstanceResponse]:
63
+ """Close (delete) instances using the batch delete endpoint.
62
64
 
63
65
  Args:
64
- run_id: The run ID whose instances should be closed
66
+ run_id: Optional run ID to filter instances by
67
+ profile_id: Optional profile ID to filter instances by (use "self" for your own profile)
65
68
 
66
69
  Returns:
67
70
  List[InstanceResponse] containing the deleted instances
71
+
72
+ Note:
73
+ At least one of run_id or profile_id must be provided.
74
+ """
75
+ return await AsyncFleet().close_all(run_id=run_id, profile_id=profile_id)
76
+
77
+
78
+ async def list_runs_async(profile_id: Optional[str] = None, status: Optional[str] = "active") -> List[Run]:
79
+ """List all runs (groups of instances by run_id) with aggregated statistics.
80
+
81
+ Args:
82
+ profile_id: Optional profile ID to filter runs by (use "self" for your own profile)
83
+ status: Filter by run status - "active" (default), "inactive", or "all"
84
+
85
+ Returns:
86
+ List[Run] containing run information with instance counts and timestamps
87
+ """
88
+ return await AsyncFleet().list_runs(profile_id=profile_id, status=status)
89
+
90
+
91
+ async def heartbeat_async(instance_id: str) -> HeartbeatResponse:
92
+ """Send heartbeat to keep instance alive (if heartbeat monitoring is enabled).
93
+
94
+ Args:
95
+ instance_id: The instance ID to send heartbeat for
96
+
97
+ Returns:
98
+ HeartbeatResponse containing heartbeat status and deadline information
68
99
  """
69
- return await AsyncFleet().close_all(run_id)
100
+ return await AsyncFleet().heartbeat(instance_id)
70
101
 
71
102
 
72
103
  async def account_async() -> AccountResponse:
@@ -85,17 +85,9 @@ class AsyncInstanceClient:
85
85
  Returns:
86
86
  An SQLite database resource for the given database name
87
87
  """
88
- resource_info = self._resources_state[ResourceType.db.value][name]
89
- # Local mode - resource_info is a dict with creation parameters
90
- if isinstance(resource_info, dict) and resource_info.get('type') == 'local':
91
- # Create new instance each time (matching HTTP mode behavior)
92
- return AsyncSQLiteResource(
93
- resource_info['resource_model'],
94
- client=None,
95
- db_path=resource_info['db_path']
96
- )
97
- # HTTP mode - resource_info is a ResourceModel, create new wrapper
98
- return AsyncSQLiteResource(resource_info, self.client)
88
+ return AsyncSQLiteResource(
89
+ self._resources_state[ResourceType.db.value][name], self.client
90
+ )
99
91
 
100
92
  def browser(self, name: str) -> AsyncBrowserResource:
101
93
  return AsyncBrowserResource(
@@ -185,17 +177,10 @@ class AsyncInstanceClient:
185
177
  response = await self.client.request("GET", "/health")
186
178
  return HealthResponse(**response.json())
187
179
 
188
- def close(self):
189
- """Close anchor connections for in-memory databases."""
190
- if hasattr(self, '_memory_anchors'):
191
- for conn in self._memory_anchors.values():
192
- conn.close()
193
- self._memory_anchors.clear()
194
-
195
180
  async def __aenter__(self):
196
181
  """Async context manager entry."""
197
182
  return self
198
183
 
199
184
  async def __aexit__(self, exc_type, exc_val, exc_tb):
200
185
  """Async context manager exit."""
201
- self.close()
186
+ await self.close()
@@ -6,7 +6,6 @@ from datetime import datetime
6
6
  import tempfile
7
7
  import sqlite3
8
8
  import os
9
- import asyncio
10
9
 
11
10
  from typing import TYPE_CHECKING
12
11
 
@@ -680,100 +679,17 @@ class AsyncQueryBuilder:
680
679
 
681
680
 
682
681
  class AsyncSQLiteResource(Resource):
683
- def __init__(
684
- self,
685
- resource: ResourceModel,
686
- client: Optional["AsyncWrapper"] = None,
687
- db_path: Optional[str] = None,
688
- ):
682
+ def __init__(self, resource: ResourceModel, client: "AsyncWrapper"):
689
683
  super().__init__(resource)
690
684
  self.client = client
691
- self.db_path = db_path
692
- self._mode = "direct" if db_path else "http"
693
-
694
- @property
695
- def mode(self) -> str:
696
- """Return the mode of this resource: 'direct' (local file) or 'http' (remote API)."""
697
- return self._mode
698
685
 
699
686
  async def describe(self) -> DescribeResponse:
700
687
  """Describe the SQLite database schema."""
701
- if self._mode == "direct":
702
- return await self._describe_direct()
703
- else:
704
- return await self._describe_http()
705
-
706
- async def _describe_http(self) -> DescribeResponse:
707
- """Describe database schema via HTTP API."""
708
688
  response = await self.client.request(
709
689
  "GET", f"/resources/sqlite/{self.resource.name}/describe"
710
690
  )
711
691
  return DescribeResponse(**response.json())
712
692
 
713
- async def _describe_direct(self) -> DescribeResponse:
714
- """Describe database schema from local file or in-memory database."""
715
- def _sync_describe():
716
- try:
717
- # Check if we need URI mode (for shared memory databases)
718
- use_uri = 'mode=memory' in self.db_path
719
- conn = sqlite3.connect(self.db_path, uri=use_uri)
720
- cursor = conn.cursor()
721
-
722
- # Get all tables
723
- cursor.execute(
724
- "SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
725
- )
726
- table_names = [row[0] for row in cursor.fetchall()]
727
-
728
- tables = []
729
- for table_name in table_names:
730
- # Get table info
731
- cursor.execute(f"PRAGMA table_info({table_name})")
732
- columns = cursor.fetchall()
733
-
734
- # Get CREATE TABLE SQL
735
- cursor.execute(
736
- f"SELECT sql FROM sqlite_master WHERE type='table' AND name=?",
737
- (table_name,)
738
- )
739
- sql_row = cursor.fetchone()
740
- create_sql = sql_row[0] if sql_row else ""
741
-
742
- table_schema = {
743
- "name": table_name,
744
- "sql": create_sql,
745
- "columns": [
746
- {
747
- "name": col[1],
748
- "type": col[2],
749
- "notnull": bool(col[3]),
750
- "default_value": col[4],
751
- "primary_key": col[5] > 0,
752
- }
753
- for col in columns
754
- ],
755
- }
756
- tables.append(table_schema)
757
-
758
- conn.close()
759
-
760
- return DescribeResponse(
761
- success=True,
762
- resource_name=self.resource.name,
763
- tables=tables,
764
- message="Schema retrieved from local file",
765
- )
766
- except Exception as e:
767
- return DescribeResponse(
768
- success=False,
769
- resource_name=self.resource.name,
770
- tables=None,
771
- error=str(e),
772
- message=f"Failed to describe database: {str(e)}",
773
- )
774
-
775
- return await asyncio.to_thread(_sync_describe)
776
-
777
693
  async def query(
778
694
  self, query: str, args: Optional[List[Any]] = None
779
695
  ) -> QueryResponse:
@@ -785,15 +701,6 @@ class AsyncSQLiteResource(Resource):
785
701
  async def _query(
786
702
  self, query: str, args: Optional[List[Any]] = None, read_only: bool = True
787
703
  ) -> QueryResponse:
788
- if self._mode == "direct":
789
- return await self._query_direct(query, args, read_only)
790
- else:
791
- return await self._query_http(query, args, read_only)
792
-
793
- async def _query_http(
794
- self, query: str, args: Optional[List[Any]] = None, read_only: bool = True
795
- ) -> QueryResponse:
796
- """Execute query via HTTP API."""
797
704
  request = QueryRequest(query=query, args=args, read_only=read_only)
798
705
  response = await self.client.request(
799
706
  "POST",
@@ -802,62 +709,6 @@ class AsyncSQLiteResource(Resource):
802
709
  )
803
710
  return QueryResponse(**response.json())
804
711
 
805
- async def _query_direct(
806
- self, query: str, args: Optional[List[Any]] = None, read_only: bool = True
807
- ) -> QueryResponse:
808
- """Execute query directly on local SQLite file or in-memory database."""
809
- def _sync_query():
810
- try:
811
- # Check if we need URI mode (for shared memory databases)
812
- use_uri = 'mode=memory' in self.db_path
813
- conn = sqlite3.connect(self.db_path, uri=use_uri)
814
- cursor = conn.cursor()
815
-
816
- # Execute the query
817
- if args:
818
- cursor.execute(query, args)
819
- else:
820
- cursor.execute(query)
821
-
822
- # For write operations, commit the transaction
823
- if not read_only:
824
- conn.commit()
825
-
826
- # Get column names if available
827
- columns = [desc[0] for desc in cursor.description] if cursor.description else []
828
-
829
- # Fetch results for SELECT queries
830
- rows = []
831
- rows_affected = 0
832
- last_insert_id = None
833
-
834
- if cursor.description: # SELECT query
835
- rows = cursor.fetchall()
836
- else: # INSERT/UPDATE/DELETE
837
- rows_affected = cursor.rowcount
838
- last_insert_id = cursor.lastrowid if cursor.lastrowid else None
839
-
840
- conn.close()
841
-
842
- return QueryResponse(
843
- success=True,
844
- columns=columns if columns else None,
845
- rows=rows if rows else None,
846
- rows_affected=rows_affected if rows_affected > 0 else None,
847
- last_insert_id=last_insert_id,
848
- message="Query executed successfully",
849
- )
850
- except Exception as e:
851
- return QueryResponse(
852
- success=False,
853
- columns=None,
854
- rows=None,
855
- error=str(e),
856
- message=f"Query failed: {str(e)}",
857
- )
858
-
859
- return await asyncio.to_thread(_sync_query)
860
-
861
712
  def table(self, table_name: str) -> AsyncQueryBuilder:
862
713
  """Create a query builder for the specified table."""
863
714
  return AsyncQueryBuilder(self, table_name)
fleet/_async/tasks.py CHANGED
@@ -215,13 +215,18 @@ class Task(BaseModel):
215
215
  image_type: Optional[str] = None,
216
216
  ttl_seconds: Optional[int] = None,
217
217
  run_id: Optional[str] = None,
218
+ heartbeat_interval: Optional[int] = None,
218
219
  ):
219
220
  """Create an environment instance for this task's environment.
220
221
 
221
222
  Alias for make() method. Uses the task's env_id (and version if present) to create the env.
222
223
  """
223
224
  return await self.make(
224
- region=region, image_type=image_type, ttl_seconds=ttl_seconds, run_id=run_id
225
+ region=region,
226
+ image_type=image_type,
227
+ ttl_seconds=ttl_seconds,
228
+ run_id=run_id,
229
+ heartbeat_interval=heartbeat_interval,
225
230
  )
226
231
 
227
232
  async def make(
@@ -230,6 +235,7 @@ class Task(BaseModel):
230
235
  image_type: Optional[str] = None,
231
236
  ttl_seconds: Optional[int] = None,
232
237
  run_id: Optional[str] = None,
238
+ heartbeat_interval: Optional[int] = None,
233
239
  ):
234
240
  """Create an environment instance with task's configuration.
235
241
 
@@ -238,12 +244,14 @@ class Task(BaseModel):
238
244
  - data_key (data_id + data_version, if present)
239
245
  - env_variables (if present)
240
246
  - run_id (if present)
247
+ - heartbeat_interval (if present)
241
248
 
242
249
  Args:
243
250
  region: Optional AWS region for the environment
244
251
  image_type: Optional image type for the environment
245
252
  ttl_seconds: Optional TTL in seconds for the instance
246
253
  run_id: Optional run ID to group instances
254
+ heartbeat_interval: Optional heartbeat interval in seconds (30-3600)
247
255
 
248
256
  Returns:
249
257
  Environment instance configured for this task
@@ -251,7 +259,7 @@ class Task(BaseModel):
251
259
  Example:
252
260
  task = fleet.Task(key="my-task", prompt="...", env_id="my-env",
253
261
  data_id="my-data", data_version="v1.0")
254
- env = await task.make(region="us-west-2", run_id="my-batch-123")
262
+ env = await task.make(region="us-west-2", run_id="my-batch-123", heartbeat_interval=60)
255
263
  """
256
264
  if not self.env_id:
257
265
  raise ValueError("Task has no env_id defined")
@@ -267,6 +275,7 @@ class Task(BaseModel):
267
275
  image_type=image_type,
268
276
  ttl_seconds=ttl_seconds,
269
277
  run_id=run_id,
278
+ heartbeat_interval=heartbeat_interval,
270
279
  )
271
280
 
272
281
 
@@ -295,11 +304,8 @@ def verifier_from_string(
295
304
  # Remove lines like: @verifier(key="...")
296
305
  cleaned_code = re.sub(r"@verifier\([^)]*\)\s*\n", "", verifier_func)
297
306
  # Also remove the verifier import if present
298
- # Use MULTILINE flag to match beginning of lines with ^
299
- cleaned_code = re.sub(r"^from fleet\.verifiers.*import.*verifier.*$\n?", "", cleaned_code, flags=re.MULTILINE)
300
- cleaned_code = re.sub(r"^from fleet import verifier.*$\n?", "", cleaned_code, flags=re.MULTILINE)
301
- cleaned_code = re.sub(r"^import fleet\.verifiers.*$\n?", "", cleaned_code, flags=re.MULTILINE)
302
- cleaned_code = re.sub(r"^import fleet$\n?", "", cleaned_code, flags=re.MULTILINE)
307
+ cleaned_code = re.sub(r"from fleet import.*verifier.*\n", "", cleaned_code)
308
+ cleaned_code = re.sub(r"import.*verifier.*\n", "", cleaned_code)
303
309
 
304
310
  # Create a local namespace for executing the code
305
311
  local_namespace = {
@@ -37,7 +37,7 @@ class FunctionBundler:
37
37
  ) -> bytes:
38
38
  """Create a function bundle with statically extracted code."""
39
39
 
40
- logger.info(f"Creating function bundle for {func.__name__}")
40
+ # logger.info(f"Creating function bundle for {func.__name__}")
41
41
 
42
42
  # 1. Parse the main function and find dependencies
43
43
  mod_file = Path(func.__code__.co_filename)
@@ -115,7 +115,7 @@ class FunctionBundler:
115
115
 
116
116
  # Find function calls within the verifier function
117
117
  called_functions = self._extract_function_calls(main_func_ast)
118
- logger.debug(f"Functions called in verifier: {called_functions}")
118
+ # logger.debug(f"Functions called in verifier: {called_functions}")
119
119
 
120
120
  # Find all functions defined in the module
121
121
  module_functions = {}
@@ -128,7 +128,7 @@ class FunctionBundler:
128
128
  for func_name in called_functions:
129
129
  if func_name in module_functions and func_name != func.__name__:
130
130
  same_module_deps.append(func_name)
131
- logger.debug(f"Found same-module dependency: {func_name}")
131
+ # logger.debug(f"Found same-module dependency: {func_name}")
132
132
 
133
133
  # Separate local and external imports
134
134
  local_imports = {}
@@ -292,7 +292,7 @@ class FunctionBundler:
292
292
  code = ast.unparse(node)
293
293
  extracted_code.append(code)
294
294
  except Exception as e:
295
- logger.warning(f"Could not unparse AST node: {e}")
295
+ # logger.warning(f"Could not unparse AST node: {e}")
296
296
  # Fallback to original source extraction
297
297
  lines = content.split("\n")
298
298
  start_line = node.lineno - 1
@@ -305,11 +305,11 @@ class FunctionBundler:
305
305
  extracted_code.append(code)
306
306
 
307
307
  result = "\n\n".join(extracted_code)
308
- logger.debug(f"Extracted {len(extracted_code)} items from {file_path}")
308
+ # logger.debug(f"Extracted {len(extracted_code)} items from {file_path}")
309
309
  return result
310
310
 
311
311
  except Exception as e:
312
- logger.warning(f"Failed to extract functions from {file_path}: {e}")
312
+ # logger.warning(f"Failed to extract functions from {file_path}: {e}")
313
313
  # Fallback to including the entire file
314
314
  with open(file_path, "r", encoding="utf-8") as f:
315
315
  return f.read()
@@ -464,14 +464,14 @@ class FunctionBundler:
464
464
  version = dist.version # Get the installed version
465
465
  package_with_version = f"{package_name}=={version}"
466
466
  packages.add(package_with_version)
467
- logger.debug(f"Mapped {mod} -> {package_with_version}")
467
+ # logger.debug(f"Mapped {mod} -> {package_with_version}")
468
468
  except imd.PackageNotFoundError:
469
469
  # Skip stdlib or local modules
470
- logger.debug(f"Skipping {mod} (stdlib or local)")
470
+ # logger.debug(f"Skipping {mod} (stdlib or local)")
471
471
  continue
472
472
 
473
473
  package_list = list(packages)
474
- logger.debug(f"Final package list: {package_list}")
474
+ # logger.debug(f"Final package list: {package_list}")
475
475
  return package_list
476
476
 
477
477
  def _merge_requirements(
@@ -511,10 +511,10 @@ class FunctionBundler:
511
511
  if pkg_name not in seen_packages:
512
512
  final_requirements.append(req)
513
513
  seen_packages.add(pkg_name)
514
- else:
515
- logger.debug(
516
- f"Skipping auto-detected {req}, using explicit version instead"
517
- )
514
+ # else:
515
+ # logger.debug(
516
+ # f"Skipping auto-detected {req}, using explicit version instead"
517
+ # )
518
518
 
519
519
  # Always ensure fleet-python is included
520
520
  if "fleet-python" not in seen_packages:
@@ -565,9 +565,9 @@ class FunctionBundler:
565
565
  )
566
566
  if dep_src:
567
567
  same_module_code += f"\n{dep_src}\n"
568
- logger.debug(
569
- f"Extracted same-module dependency: {dep_name}"
570
- )
568
+ # logger.debug(
569
+ # f"Extracted same-module dependency: {dep_name}"
570
+ # )
571
571
 
572
572
  # Create verifier.py with the main function
573
573
  verifier_file = build_dir / "verifier.py"
@@ -586,7 +586,7 @@ class FunctionBundler:
586
586
  {code}
587
587
  """
588
588
  dest_path.write_text(extracted_content)
589
- logger.debug(f"Created extracted file: {relative_path}")
589
+ # logger.debug(f"Created extracted file: {relative_path}")
590
590
 
591
591
  # Ensure __init__.py files exist
592
592
  self._ensure_init_files(Path(relative_path), build_dir)
@@ -595,7 +595,7 @@ class FunctionBundler:
595
595
  return self._create_zip_bundle(build_dir)
596
596
 
597
597
  except Exception as e:
598
- logger.error(f"Failed to build function bundle: {e}")
598
+ # logger.error(f"Failed to build function bundle: {e}")
599
599
  raise RuntimeError(f"Function bundle creation failed: {e}")
600
600
 
601
601
  def _ensure_init_files(self, rel_path: Path, build_dir: Path):
@@ -607,7 +607,7 @@ class FunctionBundler:
607
607
  if not init_file.exists():
608
608
  init_file.parent.mkdir(parents=True, exist_ok=True)
609
609
  init_file.write_text("# Auto-generated __init__.py")
610
- logger.debug(f"Created __init__.py: {current}")
610
+ # logger.debug(f"Created __init__.py: {current}")
611
611
  current = current.parent
612
612
 
613
613
  def _create_zip_bundle(self, build_dir: Path) -> bytes:
@@ -621,7 +621,7 @@ class FunctionBundler:
621
621
  zf.write(file_path, arcname)
622
622
 
623
623
  bundle_size = len(zip_buffer.getvalue())
624
- logger.debug(f"Created function bundle ({bundle_size:,} bytes)")
624
+ # logger.debug(f"Created function bundle ({bundle_size:,} bytes)")
625
625
  return zip_buffer.getvalue()
626
626
 
627
627
  def _extract_function_source(
@@ -662,7 +662,8 @@ class FunctionBundler:
662
662
  return "\n".join(func_lines)
663
663
 
664
664
  except Exception as e:
665
- logger.warning(f"Failed to extract function {function_name}: {e}")
665
+ # logger.warning(f"Failed to extract function {function_name}: {e}")
666
+ pass
666
667
 
667
668
  return None
668
669
 
@@ -79,9 +79,9 @@ class AsyncVerifierFunction:
79
79
 
80
80
  self._bundle_data = zip_buffer.getvalue()
81
81
  self._bundle_sha = _get_bundle_sha(self._bundle_data)
82
- logger.debug(
83
- f"Created bundle from raw code for {self.key} with SHA: {self._bundle_sha}"
84
- )
82
+ # logger.debug(
83
+ # f"Created bundle from raw code for {self.key} with SHA: {self._bundle_sha}"
84
+ # )
85
85
  else:
86
86
  # Try to create bundle from function source
87
87
  try:
@@ -89,9 +89,9 @@ class AsyncVerifierFunction:
89
89
  self.func, self.extra_requirements, self.verifier_id
90
90
  )
91
91
  self._bundle_sha = _get_bundle_sha(self._bundle_data)
92
- logger.debug(
93
- f"Created bundle for {self.key} with SHA: {self._bundle_sha}"
94
- )
92
+ # logger.debug(
93
+ # f"Created bundle for {self.key} with SHA: {self._bundle_sha}"
94
+ # )
95
95
  except OSError as e:
96
96
  # Can't create bundle - no source and no raw code
97
97
  raise OSError(f"Cannot create bundle for {self.key}: {e}")
@@ -104,20 +104,21 @@ class AsyncVerifierFunction:
104
104
 
105
105
  # If bundle_data is empty, we're using server-side bundle
106
106
  if not bundle_data:
107
- logger.debug(f"Using server-side bundle {bundle_sha[:8]}...")
107
+ # logger.debug(f"Using server-side bundle {bundle_sha[:8]}...")
108
108
  return bundle_sha, False # No upload needed, server has it
109
109
 
110
110
  # Always check if bundle exists on server
111
111
  try:
112
112
  exists = await env.check_bundle_exists(bundle_sha)
113
113
  if exists.success:
114
- logger.info(f"Bundle {bundle_sha[:8]}... found on server")
114
+ # logger.info(f"Bundle {bundle_sha[:8]}... found on server")
115
115
  return bundle_sha, False # Found on server, no upload needed
116
116
  except Exception as e:
117
- logger.warning(f"Failed to check bundle existence: {e}")
117
+ # logger.warning(f"Failed to check bundle existence: {e}")
118
+ pass
118
119
 
119
120
  # Bundle not found on server - upload needed
120
- logger.info(f"Bundle {bundle_sha[:8]}... needs to be uploaded")
121
+ # logger.info(f"Bundle {bundle_sha[:8]}... needs to be uploaded")
121
122
  return bundle_sha, True # Upload needed
122
123
 
123
124
  async def __call__(self, env: AsyncEnv, *args, **kwargs) -> float:
@@ -147,7 +148,7 @@ class AsyncVerifierFunction:
147
148
  )
148
149
 
149
150
  except Exception as e:
150
- logger.error(f"Error in verifier {self.key}: {e}")
151
+ # logger.error(f"Error in verifier {self.key}: {e}")
151
152
  # Return error score 0
152
153
  return 0.0
153
154
 
@@ -179,7 +180,7 @@ class AsyncVerifierFunction:
179
180
  try:
180
181
  return float(result)
181
182
  except (ValueError, TypeError):
182
- logger.warning(f"Could not convert result to float: {result}")
183
+ # logger.warning(f"Could not convert result to float: {result}")
183
184
  return 0.0
184
185
 
185
186
  def _raise_remote_error(self, error_info: Dict[str, Any]):
@@ -238,7 +239,7 @@ Remote traceback:
238
239
 
239
240
  if needs_upload:
240
241
  # Need to upload bundle to S3
241
- logger.info(f"Uploading bundle {bundle_sha[:8]}... for {self.key}")
242
+ # logger.info(f"Uploading bundle {bundle_sha[:8]}... for {self.key}")
242
243
  bundle_data, _ = self._get_or_create_bundle()
243
244
 
244
245
  response = await env.execute_verifier_remote(
@@ -252,11 +253,11 @@ Remote traceback:
252
253
  needs_upload=True,
253
254
  )
254
255
 
255
- logger.debug(f"Bundle {bundle_sha[:8]}... uploaded successfully")
256
+ # logger.debug(f"Bundle {bundle_sha[:8]}... uploaded successfully")
256
257
 
257
258
  else:
258
259
  # Bundle already available - execute without upload
259
- logger.info(f"Bundle {bundle_sha[:8]}... already cached for {self.key}")
260
+ # logger.info(f"Bundle {bundle_sha[:8]}... already cached for {self.key}")
260
261
  response = await env.execute_verifier_remote(
261
262
  bundle_data=b"", # Empty bundle since it's cached
262
263
  bundle_sha=bundle_sha,
@@ -273,9 +274,9 @@ Remote traceback:
273
274
  except Exception as e:
274
275
  # Check if error indicates bundle not found and retry with upload
275
276
  if self._is_bundle_not_found_error(e) and not needs_upload:
276
- logger.info(
277
- f"Bundle {bundle_sha[:8]}... not found on server, uploading..."
278
- )
277
+ # logger.info(
278
+ # f"Bundle {bundle_sha[:8]}... not found on server, uploading..."
279
+ # )
279
280
  bundle_data, _ = self._get_or_create_bundle()
280
281
  response = await env.execute_verifier_remote(
281
282
  bundle_data=bundle_data,
@@ -289,7 +290,7 @@ Remote traceback:
289
290
  )
290
291
  return response
291
292
  else:
292
- logger.error(f"Error in remote execution of {self.key}: {e}")
293
+ # logger.error(f"Error in remote execution of {self.key}: {e}")
293
294
  raise
294
295
 
295
296