fleet-python 0.2.69b2__py3-none-any.whl → 0.2.70__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fleet-python might be problematic. Click here for more details.
- fleet/__init__.py +3 -2
- fleet/_async/__init__.py +26 -2
- fleet/_async/base.py +21 -10
- fleet/_async/client.py +131 -201
- fleet/_async/env/client.py +38 -7
- fleet/_async/instance/client.py +4 -19
- fleet/_async/resources/sqlite.py +1 -150
- fleet/_async/tasks.py +13 -7
- fleet/_async/verifiers/bundler.py +22 -21
- fleet/_async/verifiers/verifier.py +20 -19
- fleet/base.py +21 -10
- fleet/client.py +128 -199
- fleet/config.py +1 -1
- fleet/env/__init__.py +8 -0
- fleet/env/client.py +38 -7
- fleet/instance/client.py +5 -20
- fleet/models.py +33 -0
- fleet/resources/sqlite.py +1 -143
- fleet/tasks.py +15 -7
- fleet/verifiers/bundler.py +22 -21
- fleet/verifiers/decorator.py +1 -1
- fleet/verifiers/verifier.py +20 -19
- {fleet_python-0.2.69b2.dist-info → fleet_python-0.2.70.dist-info}/METADATA +1 -1
- {fleet_python-0.2.69b2.dist-info → fleet_python-0.2.70.dist-info}/RECORD +27 -30
- tests/test_instance_dispatch.py +0 -607
- tests/test_sqlite_resource_dual_mode.py +0 -263
- tests/test_sqlite_shared_memory_behavior.py +0 -117
- {fleet_python-0.2.69b2.dist-info → fleet_python-0.2.70.dist-info}/WHEEL +0 -0
- {fleet_python-0.2.69b2.dist-info → fleet_python-0.2.70.dist-info}/licenses/LICENSE +0 -0
- {fleet_python-0.2.69b2.dist-info → fleet_python-0.2.70.dist-info}/top_level.txt +0 -0
fleet/instance/client.py
CHANGED
|
@@ -83,17 +83,9 @@ class InstanceClient:
|
|
|
83
83
|
Returns:
|
|
84
84
|
An SQLite database resource for the given database name
|
|
85
85
|
"""
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
# Create new instance each time (matching HTTP mode behavior)
|
|
90
|
-
return SQLiteResource(
|
|
91
|
-
resource_info['resource_model'],
|
|
92
|
-
client=None,
|
|
93
|
-
db_path=resource_info['db_path']
|
|
94
|
-
)
|
|
95
|
-
# HTTP mode - resource_info is a ResourceModel, create new wrapper
|
|
96
|
-
return SQLiteResource(resource_info, self.client)
|
|
86
|
+
return SQLiteResource(
|
|
87
|
+
self._resources_state[ResourceType.db.value][name], self.client
|
|
88
|
+
)
|
|
97
89
|
|
|
98
90
|
def browser(self, name: str) -> BrowserResource:
|
|
99
91
|
return BrowserResource(
|
|
@@ -183,17 +175,10 @@ class InstanceClient:
|
|
|
183
175
|
response = self.client.request("GET", "/health")
|
|
184
176
|
return HealthResponse(**response.json())
|
|
185
177
|
|
|
186
|
-
def close(self):
|
|
187
|
-
"""Close anchor connections for in-memory databases."""
|
|
188
|
-
if hasattr(self, '_memory_anchors'):
|
|
189
|
-
for conn in self._memory_anchors.values():
|
|
190
|
-
conn.close()
|
|
191
|
-
self._memory_anchors.clear()
|
|
192
|
-
|
|
193
178
|
def __enter__(self):
|
|
194
|
-
"""
|
|
179
|
+
"""Async context manager entry."""
|
|
195
180
|
return self
|
|
196
181
|
|
|
197
182
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
198
|
-
"""
|
|
183
|
+
"""Async context manager exit."""
|
|
199
184
|
self.close()
|
fleet/models.py
CHANGED
|
@@ -71,6 +71,7 @@ class InstanceRequest(BaseModel):
|
|
|
71
71
|
image_type: Optional[str] = Field(None, title="Image Type")
|
|
72
72
|
created_from: Optional[str] = Field(None, title="Created From")
|
|
73
73
|
ttl_seconds: Optional[int] = Field(None, title="TTL Seconds")
|
|
74
|
+
heartbeat_interval: Optional[int] = Field(None, title="Heartbeat Interval")
|
|
74
75
|
|
|
75
76
|
|
|
76
77
|
class InstanceStatus(Enum):
|
|
@@ -365,6 +366,36 @@ class InstanceResponse(BaseModel):
|
|
|
365
366
|
urls: Optional[InstanceURLs] = Field(None, title="Urls")
|
|
366
367
|
health: Optional[bool] = Field(None, title="Health")
|
|
367
368
|
run_id: Optional[str] = Field(None, title="Run Id")
|
|
369
|
+
profile_id: Optional[str] = Field(None, title="Profile Id")
|
|
370
|
+
heartbeat_interval: Optional[int] = Field(None, title="Heartbeat Interval")
|
|
371
|
+
heartbeat_region: Optional[str] = Field(None, title="Heartbeat Region")
|
|
372
|
+
|
|
373
|
+
|
|
374
|
+
class Run(BaseModel):
|
|
375
|
+
run_id: str = Field(..., title="Run Id")
|
|
376
|
+
running_count: int = Field(..., title="Running Count")
|
|
377
|
+
total_count: int = Field(..., title="Total Count")
|
|
378
|
+
first_created_at: str = Field(..., title="First Created At")
|
|
379
|
+
last_created_at: str = Field(..., title="Last Created At")
|
|
380
|
+
profile_id: Optional[str] = Field(None, title="Profile Id")
|
|
381
|
+
|
|
382
|
+
|
|
383
|
+
class HeartbeatResponse(BaseModel):
|
|
384
|
+
"""Response from bumping an instance heartbeat."""
|
|
385
|
+
|
|
386
|
+
success: bool = Field(..., description="Whether the heartbeat was successfully updated")
|
|
387
|
+
instance_id: str = Field(..., description="The instance ID")
|
|
388
|
+
last_heartbeat: Optional[str] = Field(
|
|
389
|
+
None,
|
|
390
|
+
description="ISO 8601 UTC timestamp of the heartbeat (None if not enabled)",
|
|
391
|
+
)
|
|
392
|
+
deadline_timestamp: Optional[float] = Field(
|
|
393
|
+
None,
|
|
394
|
+
description="Unix timestamp when next heartbeat is due (None if not enabled)",
|
|
395
|
+
)
|
|
396
|
+
interval_seconds: Optional[int] = Field(
|
|
397
|
+
None, description="Heartbeat interval in seconds (None if not enabled)"
|
|
398
|
+
)
|
|
368
399
|
|
|
369
400
|
|
|
370
401
|
class AccountResponse(BaseModel):
|
|
@@ -372,3 +403,5 @@ class AccountResponse(BaseModel):
|
|
|
372
403
|
team_name: str = Field(..., title="Team Name")
|
|
373
404
|
instance_limit: int = Field(..., title="Instance Limit")
|
|
374
405
|
instance_count: int = Field(..., title="Instance Count")
|
|
406
|
+
profile_id: Optional[str] = Field(None, title="Profile Id")
|
|
407
|
+
profile_name: Optional[str] = Field(None, title="Profile Name")
|
fleet/resources/sqlite.py
CHANGED
|
@@ -675,97 +675,17 @@ class SyncQueryBuilder:
|
|
|
675
675
|
|
|
676
676
|
|
|
677
677
|
class SQLiteResource(Resource):
|
|
678
|
-
def __init__(
|
|
679
|
-
self,
|
|
680
|
-
resource: ResourceModel,
|
|
681
|
-
client: Optional["SyncWrapper"] = None,
|
|
682
|
-
db_path: Optional[str] = None,
|
|
683
|
-
):
|
|
678
|
+
def __init__(self, resource: ResourceModel, client: "SyncWrapper"):
|
|
684
679
|
super().__init__(resource)
|
|
685
680
|
self.client = client
|
|
686
|
-
self.db_path = db_path
|
|
687
|
-
self._mode = "direct" if db_path else "http"
|
|
688
|
-
|
|
689
|
-
@property
|
|
690
|
-
def mode(self) -> str:
|
|
691
|
-
"""Return the mode of this resource: 'direct' (local file) or 'http' (remote API)."""
|
|
692
|
-
return self._mode
|
|
693
681
|
|
|
694
682
|
def describe(self) -> DescribeResponse:
|
|
695
683
|
"""Describe the SQLite database schema."""
|
|
696
|
-
if self._mode == "direct":
|
|
697
|
-
return self._describe_direct()
|
|
698
|
-
else:
|
|
699
|
-
return self._describe_http()
|
|
700
|
-
|
|
701
|
-
def _describe_http(self) -> DescribeResponse:
|
|
702
|
-
"""Describe database schema via HTTP API."""
|
|
703
684
|
response = self.client.request(
|
|
704
685
|
"GET", f"/resources/sqlite/{self.resource.name}/describe"
|
|
705
686
|
)
|
|
706
687
|
return DescribeResponse(**response.json())
|
|
707
688
|
|
|
708
|
-
def _describe_direct(self) -> DescribeResponse:
|
|
709
|
-
"""Describe database schema from local file or in-memory database."""
|
|
710
|
-
try:
|
|
711
|
-
# Check if we need URI mode (for shared memory databases)
|
|
712
|
-
use_uri = 'mode=memory' in self.db_path
|
|
713
|
-
conn = sqlite3.connect(self.db_path, uri=use_uri)
|
|
714
|
-
cursor = conn.cursor()
|
|
715
|
-
|
|
716
|
-
# Get all tables
|
|
717
|
-
cursor.execute(
|
|
718
|
-
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
|
|
719
|
-
)
|
|
720
|
-
table_names = [row[0] for row in cursor.fetchall()]
|
|
721
|
-
|
|
722
|
-
tables = []
|
|
723
|
-
for table_name in table_names:
|
|
724
|
-
# Get table info
|
|
725
|
-
cursor.execute(f"PRAGMA table_info({table_name})")
|
|
726
|
-
columns = cursor.fetchall()
|
|
727
|
-
|
|
728
|
-
# Get CREATE TABLE SQL
|
|
729
|
-
cursor.execute(
|
|
730
|
-
f"SELECT sql FROM sqlite_master WHERE type='table' AND name=?",
|
|
731
|
-
(table_name,)
|
|
732
|
-
)
|
|
733
|
-
sql_row = cursor.fetchone()
|
|
734
|
-
create_sql = sql_row[0] if sql_row else ""
|
|
735
|
-
|
|
736
|
-
table_schema = {
|
|
737
|
-
"name": table_name,
|
|
738
|
-
"sql": create_sql,
|
|
739
|
-
"columns": [
|
|
740
|
-
{
|
|
741
|
-
"name": col[1],
|
|
742
|
-
"type": col[2],
|
|
743
|
-
"notnull": bool(col[3]),
|
|
744
|
-
"default_value": col[4],
|
|
745
|
-
"primary_key": col[5] > 0,
|
|
746
|
-
}
|
|
747
|
-
for col in columns
|
|
748
|
-
],
|
|
749
|
-
}
|
|
750
|
-
tables.append(table_schema)
|
|
751
|
-
|
|
752
|
-
conn.close()
|
|
753
|
-
|
|
754
|
-
return DescribeResponse(
|
|
755
|
-
success=True,
|
|
756
|
-
resource_name=self.resource.name,
|
|
757
|
-
tables=tables,
|
|
758
|
-
message="Schema retrieved from local file",
|
|
759
|
-
)
|
|
760
|
-
except Exception as e:
|
|
761
|
-
return DescribeResponse(
|
|
762
|
-
success=False,
|
|
763
|
-
resource_name=self.resource.name,
|
|
764
|
-
tables=None,
|
|
765
|
-
error=str(e),
|
|
766
|
-
message=f"Failed to describe database: {str(e)}",
|
|
767
|
-
)
|
|
768
|
-
|
|
769
689
|
def query(self, query: str, args: Optional[List[Any]] = None) -> QueryResponse:
|
|
770
690
|
return self._query(query, args, read_only=True)
|
|
771
691
|
|
|
@@ -775,15 +695,6 @@ class SQLiteResource(Resource):
|
|
|
775
695
|
def _query(
|
|
776
696
|
self, query: str, args: Optional[List[Any]] = None, read_only: bool = True
|
|
777
697
|
) -> QueryResponse:
|
|
778
|
-
if self._mode == "direct":
|
|
779
|
-
return self._query_direct(query, args, read_only)
|
|
780
|
-
else:
|
|
781
|
-
return self._query_http(query, args, read_only)
|
|
782
|
-
|
|
783
|
-
def _query_http(
|
|
784
|
-
self, query: str, args: Optional[List[Any]] = None, read_only: bool = True
|
|
785
|
-
) -> QueryResponse:
|
|
786
|
-
"""Execute query via HTTP API."""
|
|
787
698
|
request = QueryRequest(query=query, args=args, read_only=read_only)
|
|
788
699
|
response = self.client.request(
|
|
789
700
|
"POST",
|
|
@@ -792,59 +703,6 @@ class SQLiteResource(Resource):
|
|
|
792
703
|
)
|
|
793
704
|
return QueryResponse(**response.json())
|
|
794
705
|
|
|
795
|
-
def _query_direct(
|
|
796
|
-
self, query: str, args: Optional[List[Any]] = None, read_only: bool = True
|
|
797
|
-
) -> QueryResponse:
|
|
798
|
-
"""Execute query directly on local SQLite file or in-memory database."""
|
|
799
|
-
try:
|
|
800
|
-
# Check if we need URI mode (for shared memory databases)
|
|
801
|
-
use_uri = 'mode=memory' in self.db_path
|
|
802
|
-
conn = sqlite3.connect(self.db_path, uri=use_uri)
|
|
803
|
-
cursor = conn.cursor()
|
|
804
|
-
|
|
805
|
-
# Execute the query
|
|
806
|
-
if args:
|
|
807
|
-
cursor.execute(query, args)
|
|
808
|
-
else:
|
|
809
|
-
cursor.execute(query)
|
|
810
|
-
|
|
811
|
-
# For write operations, commit the transaction
|
|
812
|
-
if not read_only:
|
|
813
|
-
conn.commit()
|
|
814
|
-
|
|
815
|
-
# Get column names if available
|
|
816
|
-
columns = [desc[0] for desc in cursor.description] if cursor.description else []
|
|
817
|
-
|
|
818
|
-
# Fetch results for SELECT queries
|
|
819
|
-
rows = []
|
|
820
|
-
rows_affected = 0
|
|
821
|
-
last_insert_id = None
|
|
822
|
-
|
|
823
|
-
if cursor.description: # SELECT query
|
|
824
|
-
rows = cursor.fetchall()
|
|
825
|
-
else: # INSERT/UPDATE/DELETE
|
|
826
|
-
rows_affected = cursor.rowcount
|
|
827
|
-
last_insert_id = cursor.lastrowid if cursor.lastrowid else None
|
|
828
|
-
|
|
829
|
-
conn.close()
|
|
830
|
-
|
|
831
|
-
return QueryResponse(
|
|
832
|
-
success=True,
|
|
833
|
-
columns=columns if columns else None,
|
|
834
|
-
rows=rows if rows else None,
|
|
835
|
-
rows_affected=rows_affected if rows_affected > 0 else None,
|
|
836
|
-
last_insert_id=last_insert_id,
|
|
837
|
-
message="Query executed successfully",
|
|
838
|
-
)
|
|
839
|
-
except Exception as e:
|
|
840
|
-
return QueryResponse(
|
|
841
|
-
success=False,
|
|
842
|
-
columns=None,
|
|
843
|
-
rows=None,
|
|
844
|
-
error=str(e),
|
|
845
|
-
message=f"Query failed: {str(e)}",
|
|
846
|
-
)
|
|
847
|
-
|
|
848
706
|
def table(self, table_name: str) -> SyncQueryBuilder:
|
|
849
707
|
"""Create a query builder for the specified table."""
|
|
850
708
|
return SyncQueryBuilder(self, table_name)
|
fleet/tasks.py
CHANGED
|
@@ -208,12 +208,19 @@ class Task(BaseModel):
|
|
|
208
208
|
image_type: Optional[str] = None,
|
|
209
209
|
ttl_seconds: Optional[int] = None,
|
|
210
210
|
run_id: Optional[str] = None,
|
|
211
|
+
heartbeat_interval: Optional[int] = None,
|
|
211
212
|
):
|
|
212
213
|
"""Create an environment instance for this task's environment.
|
|
213
214
|
|
|
214
215
|
Alias for make() method. Uses the task's env_id (and version if present) to create the env.
|
|
215
216
|
"""
|
|
216
|
-
return self.make(
|
|
217
|
+
return self.make(
|
|
218
|
+
region=region,
|
|
219
|
+
image_type=image_type,
|
|
220
|
+
ttl_seconds=ttl_seconds,
|
|
221
|
+
run_id=run_id,
|
|
222
|
+
heartbeat_interval=heartbeat_interval,
|
|
223
|
+
)
|
|
217
224
|
|
|
218
225
|
def make(
|
|
219
226
|
self,
|
|
@@ -221,6 +228,7 @@ class Task(BaseModel):
|
|
|
221
228
|
image_type: Optional[str] = None,
|
|
222
229
|
ttl_seconds: Optional[int] = None,
|
|
223
230
|
run_id: Optional[str] = None,
|
|
231
|
+
heartbeat_interval: Optional[int] = None,
|
|
224
232
|
):
|
|
225
233
|
"""Create an environment instance with task's configuration.
|
|
226
234
|
|
|
@@ -229,12 +237,14 @@ class Task(BaseModel):
|
|
|
229
237
|
- data_key (data_id + data_version, if present)
|
|
230
238
|
- env_variables (if present)
|
|
231
239
|
- run_id (if present)
|
|
240
|
+
- heartbeat_interval (if present)
|
|
232
241
|
|
|
233
242
|
Args:
|
|
234
243
|
region: Optional AWS region for the environment
|
|
235
244
|
image_type: Optional image type for the environment
|
|
236
245
|
ttl_seconds: Optional TTL in seconds for the instance
|
|
237
246
|
run_id: Optional run ID to group instances
|
|
247
|
+
heartbeat_interval: Optional heartbeat interval in seconds (30-3600)
|
|
238
248
|
|
|
239
249
|
Returns:
|
|
240
250
|
Environment instance configured for this task
|
|
@@ -242,7 +252,7 @@ class Task(BaseModel):
|
|
|
242
252
|
Example:
|
|
243
253
|
task = fleet.Task(key="my-task", prompt="...", env_id="my-env",
|
|
244
254
|
data_id="my-data", data_version="v1.0")
|
|
245
|
-
env = task.make(region="us-west-2", run_id="my-batch-123")
|
|
255
|
+
env = task.make(region="us-west-2", run_id="my-batch-123", heartbeat_interval=60)
|
|
246
256
|
"""
|
|
247
257
|
if not self.env_id:
|
|
248
258
|
raise ValueError("Task has no env_id defined")
|
|
@@ -258,6 +268,7 @@ class Task(BaseModel):
|
|
|
258
268
|
image_type=image_type,
|
|
259
269
|
ttl_seconds=ttl_seconds,
|
|
260
270
|
run_id=run_id,
|
|
271
|
+
heartbeat_interval=heartbeat_interval,
|
|
261
272
|
)
|
|
262
273
|
|
|
263
274
|
|
|
@@ -286,11 +297,8 @@ def verifier_from_string(
|
|
|
286
297
|
# Remove lines like: @verifier(key="...")
|
|
287
298
|
cleaned_code = re.sub(r"@verifier\([^)]*\)\s*\n", "", verifier_func)
|
|
288
299
|
# Also remove the verifier import if present
|
|
289
|
-
|
|
290
|
-
cleaned_code = re.sub(r"
|
|
291
|
-
cleaned_code = re.sub(r"^from fleet import verifier.*$\n?", "", cleaned_code, flags=re.MULTILINE)
|
|
292
|
-
cleaned_code = re.sub(r"^import fleet\.verifiers.*$\n?", "", cleaned_code, flags=re.MULTILINE)
|
|
293
|
-
cleaned_code = re.sub(r"^import fleet$\n?", "", cleaned_code, flags=re.MULTILINE)
|
|
300
|
+
cleaned_code = re.sub(r"from fleet import.*verifier.*\n", "", cleaned_code)
|
|
301
|
+
cleaned_code = re.sub(r"import.*verifier.*\n", "", cleaned_code)
|
|
294
302
|
|
|
295
303
|
# Create a globals namespace with all required imports
|
|
296
304
|
exec_globals = globals().copy()
|
fleet/verifiers/bundler.py
CHANGED
|
@@ -37,7 +37,7 @@ class FunctionBundler:
|
|
|
37
37
|
) -> bytes:
|
|
38
38
|
"""Create a function bundle with statically extracted code."""
|
|
39
39
|
|
|
40
|
-
logger.info(f"Creating function bundle for {func.__name__}")
|
|
40
|
+
# logger.info(f"Creating function bundle for {func.__name__}")
|
|
41
41
|
|
|
42
42
|
# 1. Parse the main function and find dependencies
|
|
43
43
|
mod_file = Path(func.__code__.co_filename)
|
|
@@ -115,7 +115,7 @@ class FunctionBundler:
|
|
|
115
115
|
|
|
116
116
|
# Find function calls within the verifier function
|
|
117
117
|
called_functions = self._extract_function_calls(main_func_ast)
|
|
118
|
-
logger.debug(f"Functions called in verifier: {called_functions}")
|
|
118
|
+
# logger.debug(f"Functions called in verifier: {called_functions}")
|
|
119
119
|
|
|
120
120
|
# Find all functions defined in the module
|
|
121
121
|
module_functions = {}
|
|
@@ -128,7 +128,7 @@ class FunctionBundler:
|
|
|
128
128
|
for func_name in called_functions:
|
|
129
129
|
if func_name in module_functions and func_name != func.__name__:
|
|
130
130
|
same_module_deps.append(func_name)
|
|
131
|
-
logger.debug(f"Found same-module dependency: {func_name}")
|
|
131
|
+
# logger.debug(f"Found same-module dependency: {func_name}")
|
|
132
132
|
|
|
133
133
|
# Separate local and external imports
|
|
134
134
|
local_imports = {}
|
|
@@ -292,7 +292,7 @@ class FunctionBundler:
|
|
|
292
292
|
code = ast.unparse(node)
|
|
293
293
|
extracted_code.append(code)
|
|
294
294
|
except Exception as e:
|
|
295
|
-
logger.warning(f"Could not unparse AST node: {e}")
|
|
295
|
+
# logger.warning(f"Could not unparse AST node: {e}")
|
|
296
296
|
# Fallback to original source extraction
|
|
297
297
|
lines = content.split("\n")
|
|
298
298
|
start_line = node.lineno - 1
|
|
@@ -305,11 +305,11 @@ class FunctionBundler:
|
|
|
305
305
|
extracted_code.append(code)
|
|
306
306
|
|
|
307
307
|
result = "\n\n".join(extracted_code)
|
|
308
|
-
logger.debug(f"Extracted {len(extracted_code)} items from {file_path}")
|
|
308
|
+
# logger.debug(f"Extracted {len(extracted_code)} items from {file_path}")
|
|
309
309
|
return result
|
|
310
310
|
|
|
311
311
|
except Exception as e:
|
|
312
|
-
logger.warning(f"Failed to extract functions from {file_path}: {e}")
|
|
312
|
+
# logger.warning(f"Failed to extract functions from {file_path}: {e}")
|
|
313
313
|
# Fallback to including the entire file
|
|
314
314
|
with open(file_path, "r", encoding="utf-8") as f:
|
|
315
315
|
return f.read()
|
|
@@ -464,14 +464,14 @@ class FunctionBundler:
|
|
|
464
464
|
version = dist.version # Get the installed version
|
|
465
465
|
package_with_version = f"{package_name}=={version}"
|
|
466
466
|
packages.add(package_with_version)
|
|
467
|
-
logger.debug(f"Mapped {mod} -> {package_with_version}")
|
|
467
|
+
# logger.debug(f"Mapped {mod} -> {package_with_version}")
|
|
468
468
|
except imd.PackageNotFoundError:
|
|
469
469
|
# Skip stdlib or local modules
|
|
470
|
-
logger.debug(f"Skipping {mod} (stdlib or local)")
|
|
470
|
+
# logger.debug(f"Skipping {mod} (stdlib or local)")
|
|
471
471
|
continue
|
|
472
472
|
|
|
473
473
|
package_list = list(packages)
|
|
474
|
-
logger.debug(f"Final package list: {package_list}")
|
|
474
|
+
# logger.debug(f"Final package list: {package_list}")
|
|
475
475
|
return package_list
|
|
476
476
|
|
|
477
477
|
def _merge_requirements(
|
|
@@ -511,10 +511,10 @@ class FunctionBundler:
|
|
|
511
511
|
if pkg_name not in seen_packages:
|
|
512
512
|
final_requirements.append(req)
|
|
513
513
|
seen_packages.add(pkg_name)
|
|
514
|
-
else:
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
514
|
+
# else:
|
|
515
|
+
# logger.debug(
|
|
516
|
+
# f"Skipping auto-detected {req}, using explicit version instead"
|
|
517
|
+
# )
|
|
518
518
|
|
|
519
519
|
# Always ensure fleet-python is included
|
|
520
520
|
if "fleet-python" not in seen_packages:
|
|
@@ -565,9 +565,9 @@ class FunctionBundler:
|
|
|
565
565
|
)
|
|
566
566
|
if dep_src:
|
|
567
567
|
same_module_code += f"\n{dep_src}\n"
|
|
568
|
-
logger.debug(
|
|
569
|
-
|
|
570
|
-
)
|
|
568
|
+
# logger.debug(
|
|
569
|
+
# f"Extracted same-module dependency: {dep_name}"
|
|
570
|
+
# )
|
|
571
571
|
|
|
572
572
|
# Create verifier.py with the main function
|
|
573
573
|
verifier_file = build_dir / "verifier.py"
|
|
@@ -586,7 +586,7 @@ class FunctionBundler:
|
|
|
586
586
|
{code}
|
|
587
587
|
"""
|
|
588
588
|
dest_path.write_text(extracted_content)
|
|
589
|
-
logger.debug(f"Created extracted file: {relative_path}")
|
|
589
|
+
# logger.debug(f"Created extracted file: {relative_path}")
|
|
590
590
|
|
|
591
591
|
# Ensure __init__.py files exist
|
|
592
592
|
self._ensure_init_files(Path(relative_path), build_dir)
|
|
@@ -595,7 +595,7 @@ class FunctionBundler:
|
|
|
595
595
|
return self._create_zip_bundle(build_dir)
|
|
596
596
|
|
|
597
597
|
except Exception as e:
|
|
598
|
-
logger.error(f"Failed to build function bundle: {e}")
|
|
598
|
+
# logger.error(f"Failed to build function bundle: {e}")
|
|
599
599
|
raise RuntimeError(f"Function bundle creation failed: {e}")
|
|
600
600
|
|
|
601
601
|
def _ensure_init_files(self, rel_path: Path, build_dir: Path):
|
|
@@ -607,7 +607,7 @@ class FunctionBundler:
|
|
|
607
607
|
if not init_file.exists():
|
|
608
608
|
init_file.parent.mkdir(parents=True, exist_ok=True)
|
|
609
609
|
init_file.write_text("# Auto-generated __init__.py")
|
|
610
|
-
logger.debug(f"Created __init__.py: {current}")
|
|
610
|
+
# logger.debug(f"Created __init__.py: {current}")
|
|
611
611
|
current = current.parent
|
|
612
612
|
|
|
613
613
|
def _create_zip_bundle(self, build_dir: Path) -> bytes:
|
|
@@ -621,7 +621,7 @@ class FunctionBundler:
|
|
|
621
621
|
zf.write(file_path, arcname)
|
|
622
622
|
|
|
623
623
|
bundle_size = len(zip_buffer.getvalue())
|
|
624
|
-
logger.debug(f"Created function bundle ({bundle_size:,} bytes)")
|
|
624
|
+
# logger.debug(f"Created function bundle ({bundle_size:,} bytes)")
|
|
625
625
|
return zip_buffer.getvalue()
|
|
626
626
|
|
|
627
627
|
def _extract_function_source(
|
|
@@ -662,7 +662,8 @@ class FunctionBundler:
|
|
|
662
662
|
return "\n".join(func_lines)
|
|
663
663
|
|
|
664
664
|
except Exception as e:
|
|
665
|
-
logger.warning(f"Failed to extract function {function_name}: {e}")
|
|
665
|
+
# logger.warning(f"Failed to extract function {function_name}: {e}")
|
|
666
|
+
pass
|
|
666
667
|
|
|
667
668
|
return None
|
|
668
669
|
|
fleet/verifiers/decorator.py
CHANGED
fleet/verifiers/verifier.py
CHANGED
|
@@ -90,9 +90,9 @@ class SyncVerifierFunction:
|
|
|
90
90
|
|
|
91
91
|
self._bundle_data = zip_buffer.getvalue()
|
|
92
92
|
self._bundle_sha = _get_bundle_sha(self._bundle_data)
|
|
93
|
-
logger.debug(
|
|
94
|
-
|
|
95
|
-
)
|
|
93
|
+
# logger.debug(
|
|
94
|
+
# f"Created bundle from raw code for {self.key} with SHA: {self._bundle_sha}"
|
|
95
|
+
# )
|
|
96
96
|
else:
|
|
97
97
|
# Try to create bundle from function source
|
|
98
98
|
try:
|
|
@@ -100,9 +100,9 @@ class SyncVerifierFunction:
|
|
|
100
100
|
self.func, self.extra_requirements, self.verifier_id
|
|
101
101
|
)
|
|
102
102
|
self._bundle_sha = _get_bundle_sha(self._bundle_data)
|
|
103
|
-
logger.debug(
|
|
104
|
-
|
|
105
|
-
)
|
|
103
|
+
# logger.debug(
|
|
104
|
+
# f"Created bundle for {self.key} with SHA: {self._bundle_sha}"
|
|
105
|
+
# )
|
|
106
106
|
except OSError as e:
|
|
107
107
|
# Can't create bundle - no source and no raw code
|
|
108
108
|
raise OSError(f"Cannot create bundle for {self.key}: {e}")
|
|
@@ -115,20 +115,21 @@ class SyncVerifierFunction:
|
|
|
115
115
|
|
|
116
116
|
# If bundle_data is empty, we're using server-side bundle
|
|
117
117
|
if not bundle_data:
|
|
118
|
-
logger.debug(f"Using server-side bundle {bundle_sha[:8]}...")
|
|
118
|
+
# logger.debug(f"Using server-side bundle {bundle_sha[:8]}...")
|
|
119
119
|
return bundle_sha, False # No upload needed, server has it
|
|
120
120
|
|
|
121
121
|
# Always check if bundle exists on server
|
|
122
122
|
try:
|
|
123
123
|
exists = env.check_bundle_exists(bundle_sha)
|
|
124
124
|
if exists.success:
|
|
125
|
-
logger.info(f"Bundle {bundle_sha[:8]}... found on server")
|
|
125
|
+
# logger.info(f"Bundle {bundle_sha[:8]}... found on server")
|
|
126
126
|
return bundle_sha, False # Found on server, no upload needed
|
|
127
127
|
except Exception as e:
|
|
128
|
-
logger.warning(f"Failed to check bundle existence: {e}")
|
|
128
|
+
# logger.warning(f"Failed to check bundle existence: {e}")
|
|
129
|
+
pass
|
|
129
130
|
|
|
130
131
|
# Bundle not found on server - upload needed
|
|
131
|
-
logger.info(f"Bundle {bundle_sha[:8]}... needs to be uploaded")
|
|
132
|
+
# logger.info(f"Bundle {bundle_sha[:8]}... needs to be uploaded")
|
|
132
133
|
return bundle_sha, True # Upload needed
|
|
133
134
|
|
|
134
135
|
def __call__(self, env: "SyncEnv", *args, **kwargs) -> float:
|
|
@@ -158,7 +159,7 @@ class SyncVerifierFunction:
|
|
|
158
159
|
)
|
|
159
160
|
|
|
160
161
|
except Exception as e:
|
|
161
|
-
logger.error(f"Error in verifier {self.key}: {e}")
|
|
162
|
+
# logger.error(f"Error in verifier {self.key}: {e}")
|
|
162
163
|
# Return error score 0
|
|
163
164
|
return 0.0
|
|
164
165
|
|
|
@@ -190,7 +191,7 @@ class SyncVerifierFunction:
|
|
|
190
191
|
try:
|
|
191
192
|
return float(result)
|
|
192
193
|
except (ValueError, TypeError):
|
|
193
|
-
logger.warning(f"Could not convert result to float: {result}")
|
|
194
|
+
# logger.warning(f"Could not convert result to float: {result}")
|
|
194
195
|
return 0.0
|
|
195
196
|
|
|
196
197
|
def _raise_remote_error(self, error_info: Dict[str, Any]):
|
|
@@ -249,7 +250,7 @@ Remote traceback:
|
|
|
249
250
|
|
|
250
251
|
if needs_upload:
|
|
251
252
|
# Need to upload bundle to S3
|
|
252
|
-
logger.info(f"Uploading bundle {bundle_sha[:8]}... for {self.key}")
|
|
253
|
+
# logger.info(f"Uploading bundle {bundle_sha[:8]}... for {self.key}")
|
|
253
254
|
bundle_data, _ = self._get_or_create_bundle()
|
|
254
255
|
|
|
255
256
|
response = env.execute_verifier_remote(
|
|
@@ -263,12 +264,12 @@ Remote traceback:
|
|
|
263
264
|
needs_upload=True,
|
|
264
265
|
)
|
|
265
266
|
|
|
266
|
-
logger.debug(f"Bundle {bundle_sha[:8]}... uploaded successfully")
|
|
267
|
+
# logger.debug(f"Bundle {bundle_sha[:8]}... uploaded successfully")
|
|
267
268
|
return response
|
|
268
269
|
|
|
269
270
|
else:
|
|
270
271
|
# Bundle already available - execute without upload
|
|
271
|
-
logger.info(f"Bundle {bundle_sha[:8]}... already cached for {self.key}")
|
|
272
|
+
# logger.info(f"Bundle {bundle_sha[:8]}... already cached for {self.key}")
|
|
272
273
|
response = env.execute_verifier_remote(
|
|
273
274
|
bundle_data=b"", # Empty bundle since it's cached
|
|
274
275
|
bundle_sha=bundle_sha,
|
|
@@ -284,9 +285,9 @@ Remote traceback:
|
|
|
284
285
|
except Exception as e:
|
|
285
286
|
# Check if error indicates bundle not found and retry with upload
|
|
286
287
|
if self._is_bundle_not_found_error(e) and not needs_upload:
|
|
287
|
-
logger.info(
|
|
288
|
-
|
|
289
|
-
)
|
|
288
|
+
# logger.info(
|
|
289
|
+
# f"Bundle {bundle_sha[:8]}... not found on server, uploading..."
|
|
290
|
+
# )
|
|
290
291
|
bundle_data, _ = self._get_or_create_bundle()
|
|
291
292
|
response = env.execute_verifier_remote(
|
|
292
293
|
bundle_data=bundle_data,
|
|
@@ -300,7 +301,7 @@ Remote traceback:
|
|
|
300
301
|
)
|
|
301
302
|
return response
|
|
302
303
|
else:
|
|
303
|
-
logger.error(f"Error in remote execution of {self.key}: {e}")
|
|
304
|
+
# logger.error(f"Error in remote execution of {self.key}: {e}")
|
|
304
305
|
raise
|
|
305
306
|
|
|
306
307
|
|