fleet-python 0.2.47__tar.gz → 0.2.48__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fleet-python might be problematic. Click here for more details.
- {fleet_python-0.2.47 → fleet_python-0.2.48}/PKG-INFO +1 -1
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/client.py +3 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/tasks.py +2 -1
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/client.py +62 -72
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/instance/client.py +3 -1
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/models.py +1 -3
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/resources/sqlite.py +9 -3
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/tasks.py +45 -49
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/verifiers/parse.py +3 -3
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/verifiers/verifier.py +9 -20
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet_python.egg-info/PKG-INFO +1 -1
- {fleet_python-0.2.47 → fleet_python-0.2.48}/pyproject.toml +1 -1
- {fleet_python-0.2.47 → fleet_python-0.2.48}/LICENSE +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/README.md +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/diff_example.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/dsl_example.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/example.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/exampleResume.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/example_account.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/example_action_log.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/example_client.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/example_mcp_anthropic.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/example_mcp_openai.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/example_sync.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/example_task.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/example_tasks.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/example_verifier.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/gemini_example.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/json_tasks_example.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/nova_act_example.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/openai_example.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/openai_simple_example.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/query_builder_example.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/quickstart.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/examples/test_cdp_logging.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/__init__.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/__init__.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/base.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/env/__init__.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/env/client.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/exceptions.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/global_client.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/instance/__init__.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/instance/base.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/instance/client.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/models.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/resources/__init__.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/resources/base.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/resources/browser.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/resources/mcp.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/resources/sqlite.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/verifiers/__init__.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/verifiers/bundler.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/_async/verifiers/verifier.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/base.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/config.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/env/__init__.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/env/client.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/exceptions.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/global_client.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/instance/__init__.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/instance/base.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/instance/models.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/resources/__init__.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/resources/base.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/resources/browser.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/resources/mcp.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/types.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/verifiers/__init__.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/verifiers/bundler.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/verifiers/code.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/verifiers/db.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/verifiers/decorator.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet/verifiers/sql_differ.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet_python.egg-info/SOURCES.txt +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet_python.egg-info/dependency_links.txt +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet_python.egg-info/requires.txt +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/fleet_python.egg-info/top_level.txt +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/scripts/fix_sync_imports.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/scripts/unasync.py +0 -0
- {fleet_python-0.2.47 → fleet_python-0.2.48}/setup.cfg +0 -0
|
@@ -379,6 +379,7 @@ class AsyncFleet:
|
|
|
379
379
|
keys: Optional[List[str]] = None,
|
|
380
380
|
version: Optional[str] = None,
|
|
381
381
|
team_id: Optional[str] = None,
|
|
382
|
+
project_key: Optional[str] = None,
|
|
382
383
|
) -> List[Task]:
|
|
383
384
|
"""Load tasks for the authenticated team, with optional filtering.
|
|
384
385
|
|
|
@@ -398,6 +399,8 @@ class AsyncFleet:
|
|
|
398
399
|
params["task_keys"] = keys
|
|
399
400
|
if team_id is not None:
|
|
400
401
|
params["team_id"] = team_id
|
|
402
|
+
if project_key is not None:
|
|
403
|
+
params["project_key"] = project_key
|
|
401
404
|
|
|
402
405
|
response = await self.client.request("GET", "/v1/tasks", params=params)
|
|
403
406
|
task_list_response = TaskListResponse(**response.json())
|
|
@@ -276,6 +276,7 @@ async def load_tasks(
|
|
|
276
276
|
keys: Optional[List[str]] = None,
|
|
277
277
|
version: Optional[str] = None,
|
|
278
278
|
team_id: Optional[str] = None,
|
|
279
|
+
project_key: Optional[str] = None,
|
|
279
280
|
) -> List[Task]:
|
|
280
281
|
"""Convenience function to load tasks with optional filtering.
|
|
281
282
|
|
|
@@ -295,7 +296,7 @@ async def load_tasks(
|
|
|
295
296
|
|
|
296
297
|
client = get_client()
|
|
297
298
|
return await client.load_tasks(
|
|
298
|
-
env_key=env_key, keys=keys, version=version, team_id=team_id
|
|
299
|
+
env_key=env_key, keys=keys, version=version, team_id=team_id, project_key=project_key
|
|
299
300
|
)
|
|
300
301
|
|
|
301
302
|
|
|
@@ -16,7 +16,6 @@
|
|
|
16
16
|
|
|
17
17
|
import base64
|
|
18
18
|
import cloudpickle
|
|
19
|
-
import concurrent.futures
|
|
20
19
|
import httpx
|
|
21
20
|
import json
|
|
22
21
|
import logging
|
|
@@ -291,7 +290,9 @@ class Fleet:
|
|
|
291
290
|
def execute_verifier_remote(
|
|
292
291
|
self, bundle_data: bytes, args: tuple, kwargs: dict, timeout: Optional[int] = 30
|
|
293
292
|
) -> VerifiersExecuteResponse:
|
|
294
|
-
return _execute_verifier_remote(
|
|
293
|
+
return _execute_verifier_remote(
|
|
294
|
+
self.client, bundle_data, args, kwargs, timeout
|
|
295
|
+
)
|
|
295
296
|
|
|
296
297
|
def delete(self, instance_id: str) -> InstanceResponse:
|
|
297
298
|
return _delete_instance(self.client, instance_id)
|
|
@@ -377,6 +378,7 @@ class Fleet:
|
|
|
377
378
|
keys: Optional[List[str]] = None,
|
|
378
379
|
version: Optional[str] = None,
|
|
379
380
|
team_id: Optional[str] = None,
|
|
381
|
+
project_key: Optional[str] = None,
|
|
380
382
|
) -> List[Task]:
|
|
381
383
|
"""Load tasks for the authenticated team, with optional filtering.
|
|
382
384
|
|
|
@@ -396,13 +398,16 @@ class Fleet:
|
|
|
396
398
|
params["task_keys"] = keys
|
|
397
399
|
if team_id is not None:
|
|
398
400
|
params["team_id"] = team_id
|
|
401
|
+
if project_key is not None:
|
|
402
|
+
params["project_key"] = project_key
|
|
399
403
|
|
|
400
404
|
response = self.client.request("GET", "/v1/tasks", params=params)
|
|
401
405
|
task_list_response = TaskListResponse(**response.json())
|
|
402
406
|
|
|
403
|
-
# Prepare verifier loading
|
|
404
|
-
|
|
407
|
+
# Prepare verifier loading coroutines with concurrency limit
|
|
408
|
+
verifier_coroutines = []
|
|
405
409
|
task_responses_with_indices = []
|
|
410
|
+
semaphore = asyncio.Semaphore(100) # Limit to 10 concurrent operations
|
|
406
411
|
|
|
407
412
|
for idx, task_response in enumerate(task_list_response.tasks):
|
|
408
413
|
if task_response.verifier:
|
|
@@ -413,74 +418,61 @@ class Fleet:
|
|
|
413
418
|
|
|
414
419
|
def create_verifier_with_fallback(tr, emb_code, is_error):
|
|
415
420
|
"""Create verifier with fallback logic."""
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
is_embedded_error
|
|
421
|
+
with semaphore: # Acquire semaphore before operation
|
|
422
|
+
if not is_error:
|
|
423
|
+
# Try to create from embedded data
|
|
424
|
+
try:
|
|
425
|
+
return self._create_verifier_from_data(
|
|
426
|
+
verifier_id=tr.verifier.verifier_id,
|
|
427
|
+
verifier_key=tr.verifier.key,
|
|
428
|
+
verifier_code=emb_code,
|
|
429
|
+
verifier_sha=tr.verifier.sha256,
|
|
430
|
+
)
|
|
431
|
+
except Exception as e:
|
|
432
|
+
logger.warning(
|
|
433
|
+
f"Failed to create verifier {tr.verifier.key}: {e}"
|
|
434
|
+
)
|
|
435
|
+
return None
|
|
436
|
+
else:
|
|
437
|
+
# Fallback: try fetching by ID
|
|
438
|
+
try:
|
|
439
|
+
logger.warning(
|
|
440
|
+
f"Embedded verifier code missing for {tr.verifier.key} (NoSuchKey). "
|
|
441
|
+
f"Attempting to refetch by id {tr.verifier.verifier_id}"
|
|
442
|
+
)
|
|
443
|
+
return self._load_verifier(
|
|
444
|
+
tr.verifier.verifier_id
|
|
445
|
+
)
|
|
446
|
+
except Exception as e:
|
|
447
|
+
logger.warning(
|
|
448
|
+
f"Refetch by verifier id failed for {tr.verifier.key}: {e}. "
|
|
449
|
+
"Leaving verifier unset."
|
|
450
|
+
)
|
|
451
|
+
return None
|
|
452
|
+
|
|
453
|
+
# Add the coroutine for parallel execution
|
|
454
|
+
verifier_coroutines.append(
|
|
455
|
+
create_verifier_with_fallback(
|
|
456
|
+
task_response, embedded_code, is_embedded_error
|
|
452
457
|
)
|
|
453
458
|
)
|
|
454
459
|
task_responses_with_indices.append((idx, task_response))
|
|
455
460
|
else:
|
|
456
461
|
# No verifier needed
|
|
457
|
-
|
|
462
|
+
verifier_coroutines.append(None)
|
|
458
463
|
task_responses_with_indices.append((idx, task_response))
|
|
459
464
|
|
|
460
|
-
# Execute all verifier loading in parallel
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
futures.append(None)
|
|
472
|
-
|
|
473
|
-
# Collect results
|
|
474
|
-
for future in futures:
|
|
475
|
-
if future is None:
|
|
476
|
-
verifier_results.append(None)
|
|
477
|
-
else:
|
|
478
|
-
try:
|
|
479
|
-
result = future.result()
|
|
480
|
-
verifier_results.append(result)
|
|
481
|
-
except Exception as e:
|
|
482
|
-
logger.warning(f"Verifier loading failed: {e}")
|
|
483
|
-
verifier_results.append(None)
|
|
465
|
+
# Execute all verifier loading in parallel
|
|
466
|
+
if verifier_coroutines:
|
|
467
|
+
verifier_results = asyncio.gather(
|
|
468
|
+
*[
|
|
469
|
+
coro if coro is not None else time.sleep(0)
|
|
470
|
+
for coro in verifier_coroutines
|
|
471
|
+
],
|
|
472
|
+
return_exceptions=True,
|
|
473
|
+
)
|
|
474
|
+
else:
|
|
475
|
+
verifier_results = []
|
|
484
476
|
|
|
485
477
|
# Build tasks with results
|
|
486
478
|
tasks = []
|
|
@@ -493,7 +485,11 @@ class Fleet:
|
|
|
493
485
|
|
|
494
486
|
if task_response.verifier:
|
|
495
487
|
# Process verifier result
|
|
496
|
-
if verifier_result
|
|
488
|
+
if isinstance(verifier_result, Exception):
|
|
489
|
+
logger.warning(
|
|
490
|
+
f"Verifier loading failed for {task_response.key}: {verifier_result}"
|
|
491
|
+
)
|
|
492
|
+
elif verifier_result is not None:
|
|
497
493
|
verifier = verifier_result
|
|
498
494
|
embedded_code = task_response.verifier.code or ""
|
|
499
495
|
is_embedded_error = embedded_code.strip().startswith(
|
|
@@ -579,8 +575,6 @@ class Fleet:
|
|
|
579
575
|
task = Task(**task_data)
|
|
580
576
|
tasks.append(task)
|
|
581
577
|
|
|
582
|
-
responses = []
|
|
583
|
-
|
|
584
578
|
for task in tasks:
|
|
585
579
|
payload = TaskRequest(
|
|
586
580
|
key=task.key,
|
|
@@ -594,13 +588,10 @@ class Fleet:
|
|
|
594
588
|
response = self.client.request(
|
|
595
589
|
"POST", "/v1/tasks", json=payload.model_dump()
|
|
596
590
|
)
|
|
597
|
-
responses.append(response)
|
|
598
591
|
except Exception as e:
|
|
599
592
|
logger.error(f"Failed to import task {task.key}: {e}")
|
|
600
593
|
continue
|
|
601
594
|
|
|
602
|
-
return responses
|
|
603
|
-
|
|
604
595
|
def account(self) -> AccountResponse:
|
|
605
596
|
"""Get account information including instance limits and usage.
|
|
606
597
|
|
|
@@ -647,7 +638,6 @@ class Fleet:
|
|
|
647
638
|
AsyncVerifierFunction created from the verifier code
|
|
648
639
|
"""
|
|
649
640
|
from .tasks import verifier_from_string
|
|
650
|
-
from .verifiers import SyncVerifierFunction
|
|
651
641
|
|
|
652
642
|
# Use verifier_from_string to create the verifier
|
|
653
643
|
verifier_func = verifier_from_string(
|
|
@@ -63,7 +63,9 @@ class InstanceClient:
|
|
|
63
63
|
def load(self) -> None:
|
|
64
64
|
self._load_resources()
|
|
65
65
|
|
|
66
|
-
def reset(
|
|
66
|
+
def reset(
|
|
67
|
+
self, reset_request: Optional[ResetRequest] = None
|
|
68
|
+
) -> ResetResponse:
|
|
67
69
|
response = self.client.request(
|
|
68
70
|
"POST", "/reset", json=reset_request.model_dump() if reset_request else None
|
|
69
71
|
)
|
|
@@ -55,9 +55,7 @@ class Instance(BaseModel):
|
|
|
55
55
|
|
|
56
56
|
class InstanceRequest(BaseModel):
|
|
57
57
|
env_key: str = Field(..., title="Env Key")
|
|
58
|
-
|
|
59
|
-
data_key: Optional[str] = Field(None, title="Data Key")
|
|
60
|
-
data_version: Optional[str] = Field(None, title="Data Version")
|
|
58
|
+
version: Optional[str] = Field(None, title="Version")
|
|
61
59
|
region: Optional[str] = Field("us-west-1", title="Region")
|
|
62
60
|
seed: Optional[int] = Field(None, title="Seed")
|
|
63
61
|
timestamp: Optional[int] = Field(None, title="Timestamp")
|
|
@@ -651,7 +651,9 @@ class SQLiteResource(Resource):
|
|
|
651
651
|
)
|
|
652
652
|
return DescribeResponse(**response.json())
|
|
653
653
|
|
|
654
|
-
def query(
|
|
654
|
+
def query(
|
|
655
|
+
self, query: str, args: Optional[List[Any]] = None
|
|
656
|
+
) -> QueryResponse:
|
|
655
657
|
return self._query(query, args, read_only=True)
|
|
656
658
|
|
|
657
659
|
def exec(self, query: str, args: Optional[List[Any]] = None) -> QueryResponse:
|
|
@@ -693,8 +695,12 @@ class SQLiteResource(Resource):
|
|
|
693
695
|
AsyncSnapshotDiff: Object containing the differences between the two databases
|
|
694
696
|
"""
|
|
695
697
|
# Create snapshots of both databases
|
|
696
|
-
before_snapshot = self.snapshot(
|
|
697
|
-
|
|
698
|
+
before_snapshot = self.snapshot(
|
|
699
|
+
name=f"before_{datetime.utcnow().isoformat()}"
|
|
700
|
+
)
|
|
701
|
+
after_snapshot = other.snapshot(
|
|
702
|
+
name=f"after_{datetime.utcnow().isoformat()}"
|
|
703
|
+
)
|
|
698
704
|
|
|
699
705
|
# Return the diff between the snapshots
|
|
700
706
|
return before_snapshot.diff(after_snapshot, ignore_config)
|
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
-
import asyncio
|
|
6
5
|
from datetime import datetime
|
|
7
6
|
from typing import Any, Dict, Optional, List, TYPE_CHECKING
|
|
8
7
|
|
|
@@ -12,7 +11,7 @@ from pydantic import BaseModel, Field, validator
|
|
|
12
11
|
from fleet.types import VerifierFunction
|
|
13
12
|
|
|
14
13
|
if TYPE_CHECKING:
|
|
15
|
-
from fleet.models import VerifiersExecuteResponse
|
|
14
|
+
from fleet._async.models import VerifiersExecuteResponse
|
|
16
15
|
|
|
17
16
|
|
|
18
17
|
class Task(BaseModel):
|
|
@@ -30,7 +29,7 @@ class Task(BaseModel):
|
|
|
30
29
|
verifier: Optional[Any] = Field(
|
|
31
30
|
None,
|
|
32
31
|
description="Verifier function with decorator (async or sync)",
|
|
33
|
-
exclude=True,
|
|
32
|
+
exclude=True, # Exclude from JSON serialization
|
|
34
33
|
)
|
|
35
34
|
verifier_id: Optional[str] = Field(None, description="Verifier identifier")
|
|
36
35
|
verifier_sha: Optional[str] = Field(None, description="Verifier SHA256 hash")
|
|
@@ -76,7 +75,6 @@ class Task(BaseModel):
|
|
|
76
75
|
if self.verifier:
|
|
77
76
|
import inspect
|
|
78
77
|
|
|
79
|
-
# Check if verifier has remote method (for decorated verifiers)
|
|
80
78
|
result = self.verifier.remote(env, *args, **kwargs)
|
|
81
79
|
|
|
82
80
|
# If the result is a coroutine, we need to run it
|
|
@@ -119,9 +117,33 @@ class Task(BaseModel):
|
|
|
119
117
|
else:
|
|
120
118
|
raise ValueError("No verifier function found for this task")
|
|
121
119
|
|
|
122
|
-
def
|
|
120
|
+
def verify_detailed_async(
|
|
121
|
+
self, *args, **kwargs
|
|
122
|
+
) -> "VerifiersExecuteResponse":
|
|
123
123
|
"""Verify the task and return the full execute response model.
|
|
124
124
|
|
|
125
|
+
For async environments, awaits the async verifier.
|
|
126
|
+
Works with both sync and async verifiers in async contexts.
|
|
127
|
+
"""
|
|
128
|
+
# If verifier doesn't exist but verifier_func does, rebuild it
|
|
129
|
+
if not self.verifier and self.verifier_func:
|
|
130
|
+
self._rebuild_verifier()
|
|
131
|
+
|
|
132
|
+
if self.verifier:
|
|
133
|
+
result = self.verifier.remote_with_response(*args, **kwargs)
|
|
134
|
+
# If it's a coroutine, await it
|
|
135
|
+
import inspect
|
|
136
|
+
|
|
137
|
+
if inspect.iscoroutine(result):
|
|
138
|
+
return result
|
|
139
|
+
else:
|
|
140
|
+
return result
|
|
141
|
+
else:
|
|
142
|
+
raise ValueError("No verifier function found for this task")
|
|
143
|
+
|
|
144
|
+
def verify_detailed(self, env, *args, **kwargs) -> "VerifiersExecuteResponse":
|
|
145
|
+
"""Verify the task and return the full execute response model (sync version).
|
|
146
|
+
|
|
125
147
|
For sync environments, calls the sync verifier directly.
|
|
126
148
|
For async verifiers, automatically runs them with asyncio.run().
|
|
127
149
|
"""
|
|
@@ -153,23 +175,6 @@ class Task(BaseModel):
|
|
|
153
175
|
else:
|
|
154
176
|
raise ValueError("No verifier function found for this task")
|
|
155
177
|
|
|
156
|
-
def verify_detailed_async(self, *args, **kwargs) -> "VerifiersExecuteResponse":
|
|
157
|
-
"""Verify the task and return the full execute response model (async version).
|
|
158
|
-
|
|
159
|
-
For async environments, returns a coroutine that when awaited returns the response.
|
|
160
|
-
Works with both sync and async verifiers in async contexts.
|
|
161
|
-
"""
|
|
162
|
-
# If verifier doesn't exist but verifier_func does, rebuild it
|
|
163
|
-
if not self.verifier and self.verifier_func:
|
|
164
|
-
self._rebuild_verifier()
|
|
165
|
-
|
|
166
|
-
if self.verifier:
|
|
167
|
-
result = self.verifier.remote_with_response(*args, **kwargs)
|
|
168
|
-
# Return the result (could be a coroutine or a value)
|
|
169
|
-
return result
|
|
170
|
-
else:
|
|
171
|
-
raise ValueError("No verifier function found for this task")
|
|
172
|
-
|
|
173
178
|
def _rebuild_verifier(self):
|
|
174
179
|
"""Rebuild the verifier from verifier_func string if it exists."""
|
|
175
180
|
if self.verifier_func:
|
|
@@ -212,26 +217,20 @@ def verifier_from_string(
|
|
|
212
217
|
"""
|
|
213
218
|
try:
|
|
214
219
|
import inspect
|
|
215
|
-
from .verifiers import SyncVerifierFunction
|
|
216
|
-
from .verifiers.code import TASK_SUCCESSFUL_SCORE, TASK_FAILED_SCORE
|
|
217
|
-
from .verifiers.db import IgnoreConfig
|
|
218
|
-
|
|
219
|
-
# Create a globals namespace with all required imports
|
|
220
|
-
exec_globals = globals().copy()
|
|
221
|
-
exec_globals.update(
|
|
222
|
-
{
|
|
223
|
-
"TASK_SUCCESSFUL_SCORE": TASK_SUCCESSFUL_SCORE,
|
|
224
|
-
"TASK_FAILED_SCORE": TASK_FAILED_SCORE,
|
|
225
|
-
"IgnoreConfig": IgnoreConfig,
|
|
226
|
-
"Environment": object, # Add Environment type if needed
|
|
227
|
-
}
|
|
228
|
-
)
|
|
220
|
+
from .verifiers.verifier import SyncVerifierFunction
|
|
221
|
+
from fleet.verifiers.code import TASK_SUCCESSFUL_SCORE, TASK_FAILED_SCORE
|
|
222
|
+
from fleet.verifiers.db import IgnoreConfig
|
|
229
223
|
|
|
230
224
|
# Create a local namespace for executing the code
|
|
231
|
-
local_namespace = {
|
|
225
|
+
local_namespace = {
|
|
226
|
+
"TASK_SUCCESSFUL_SCORE": TASK_SUCCESSFUL_SCORE,
|
|
227
|
+
"TASK_FAILED_SCORE": TASK_FAILED_SCORE,
|
|
228
|
+
"IgnoreConfig": IgnoreConfig,
|
|
229
|
+
"Environment": object, # Add Environment type if needed
|
|
230
|
+
}
|
|
232
231
|
|
|
233
232
|
# Execute the verifier code in the namespace
|
|
234
|
-
exec(verifier_func,
|
|
233
|
+
exec(verifier_func, globals(), local_namespace)
|
|
235
234
|
|
|
236
235
|
# Find the function that was defined
|
|
237
236
|
func_obj = None
|
|
@@ -243,19 +242,15 @@ def verifier_from_string(
|
|
|
243
242
|
if func_obj is None:
|
|
244
243
|
raise ValueError("No function found in verifier code")
|
|
245
244
|
|
|
246
|
-
# Create an
|
|
245
|
+
# Create an AsyncVerifierFunction instance with raw code
|
|
247
246
|
verifier_instance = SyncVerifierFunction(
|
|
248
|
-
|
|
249
|
-
|
|
247
|
+
func_obj,
|
|
248
|
+
verifier_key,
|
|
250
249
|
verifier_id=verifier_id,
|
|
251
250
|
sha256=sha256,
|
|
252
251
|
raw_code=verifier_func,
|
|
253
252
|
)
|
|
254
253
|
|
|
255
|
-
# Store additional metadata
|
|
256
|
-
verifier_instance._verifier_code = verifier_func
|
|
257
|
-
verifier_instance._sha256 = sha256
|
|
258
|
-
|
|
259
254
|
return verifier_instance
|
|
260
255
|
|
|
261
256
|
except Exception as e:
|
|
@@ -266,7 +261,7 @@ def load_tasks_from_file(filename: str) -> List[Task]:
|
|
|
266
261
|
"""Load tasks from a JSON file.
|
|
267
262
|
|
|
268
263
|
Example:
|
|
269
|
-
tasks = fleet.load_tasks_from_file("my_tasks.json")
|
|
264
|
+
tasks = await fleet.load_tasks_from_file("my_tasks.json")
|
|
270
265
|
"""
|
|
271
266
|
from .global_client import get_client
|
|
272
267
|
|
|
@@ -279,6 +274,7 @@ def load_tasks(
|
|
|
279
274
|
keys: Optional[List[str]] = None,
|
|
280
275
|
version: Optional[str] = None,
|
|
281
276
|
team_id: Optional[str] = None,
|
|
277
|
+
project_key: Optional[str] = None,
|
|
282
278
|
) -> List[Task]:
|
|
283
279
|
"""Convenience function to load tasks with optional filtering.
|
|
284
280
|
|
|
@@ -298,7 +294,7 @@ def load_tasks(
|
|
|
298
294
|
|
|
299
295
|
client = get_client()
|
|
300
296
|
return client.load_tasks(
|
|
301
|
-
env_key=env_key, keys=keys, version=version, team_id=team_id
|
|
297
|
+
env_key=env_key, keys=keys, version=version, team_id=team_id, project_key=project_key
|
|
302
298
|
)
|
|
303
299
|
|
|
304
300
|
|
|
@@ -316,8 +312,8 @@ def update_task(
|
|
|
316
312
|
TaskResponse containing the updated task details
|
|
317
313
|
|
|
318
314
|
Examples:
|
|
319
|
-
response = fleet.update_task("my-task", prompt="New prompt text")
|
|
320
|
-
response = fleet.update_task("my-task", verifier_code="def verify(env): return True")
|
|
315
|
+
response = await fleet.update_task("my-task", prompt="New prompt text")
|
|
316
|
+
response = await fleet.update_task("my-task", verifier_code="def verify(env): return True")
|
|
321
317
|
"""
|
|
322
318
|
from .global_client import get_client
|
|
323
319
|
|
|
@@ -67,7 +67,7 @@ def convert_verifier_string(verifier_str: str) -> str:
|
|
|
67
67
|
|
|
68
68
|
if not match:
|
|
69
69
|
raise ValueError(
|
|
70
|
-
"Could not parse verifier function. Expected format: def function_name(env: Environment, final_answer: str
|
|
70
|
+
"Could not parse verifier function. Expected format: def function_name(env: Environment, final_answer: Optional[str] = None) -> float/int:"
|
|
71
71
|
)
|
|
72
72
|
|
|
73
73
|
func_name = match.group(1)
|
|
@@ -82,7 +82,7 @@ def convert_verifier_string(verifier_str: str) -> str:
|
|
|
82
82
|
|
|
83
83
|
# Build the new function
|
|
84
84
|
new_func = f"""def {func_name}(
|
|
85
|
-
before: DatabaseSnapshot, after: DatabaseSnapshot, transcript: str
|
|
85
|
+
before: DatabaseSnapshot, after: DatabaseSnapshot, transcript: Optional[str] = None
|
|
86
86
|
) -> int:
|
|
87
87
|
class Environment:
|
|
88
88
|
def db(self, name: str) -> DatabaseSnapshot:"""
|
|
@@ -128,7 +128,7 @@ def convert_verifier_string(verifier_str: str) -> str:
|
|
|
128
128
|
def load(self):
|
|
129
129
|
pass
|
|
130
130
|
|
|
131
|
-
def verifier(env: Environment, final_answer: str
|
|
131
|
+
def verifier(env: Environment, final_answer: Optional[str] = None) -> float:"""
|
|
132
132
|
|
|
133
133
|
if docstring:
|
|
134
134
|
new_func += f"\n {docstring}"
|
|
@@ -12,22 +12,11 @@ import uuid
|
|
|
12
12
|
import logging
|
|
13
13
|
import hashlib
|
|
14
14
|
import inspect
|
|
15
|
-
from typing import
|
|
16
|
-
Any,
|
|
17
|
-
Callable,
|
|
18
|
-
Dict,
|
|
19
|
-
Optional,
|
|
20
|
-
List,
|
|
21
|
-
TypeVar,
|
|
22
|
-
TYPE_CHECKING,
|
|
23
|
-
Tuple,
|
|
24
|
-
)
|
|
15
|
+
from typing import Any, Callable, Dict, Optional, List, TypeVar, Tuple
|
|
25
16
|
|
|
26
17
|
from .bundler import FunctionBundler
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
from ..client import SyncEnv
|
|
30
|
-
from ..models import VerifiersExecuteResponse
|
|
18
|
+
from ..client import SyncEnv
|
|
19
|
+
from ...models import VerifiersExecuteResponse
|
|
31
20
|
|
|
32
21
|
logger = logging.getLogger(__name__)
|
|
33
22
|
|
|
@@ -109,7 +98,7 @@ class SyncVerifierFunction:
|
|
|
109
98
|
|
|
110
99
|
return self._bundle_data, self._bundle_sha
|
|
111
100
|
|
|
112
|
-
def _check_bundle_status(self, env:
|
|
101
|
+
def _check_bundle_status(self, env: SyncEnv) -> Tuple[str, bool]:
|
|
113
102
|
"""Check if bundle needs to be uploaded and return (sha, needs_upload)."""
|
|
114
103
|
bundle_data, bundle_sha = self._get_or_create_bundle()
|
|
115
104
|
|
|
@@ -131,7 +120,7 @@ class SyncVerifierFunction:
|
|
|
131
120
|
logger.info(f"Bundle {bundle_sha[:8]}... needs to be uploaded")
|
|
132
121
|
return bundle_sha, True # Upload needed
|
|
133
122
|
|
|
134
|
-
def __call__(self, env:
|
|
123
|
+
def __call__(self, env: SyncEnv, *args, **kwargs) -> float:
|
|
135
124
|
"""Local execution of the verifier function with env as first parameter."""
|
|
136
125
|
try:
|
|
137
126
|
if self._is_async:
|
|
@@ -162,7 +151,7 @@ class SyncVerifierFunction:
|
|
|
162
151
|
# Return error score 0
|
|
163
152
|
return 0.0
|
|
164
153
|
|
|
165
|
-
def remote(self, env:
|
|
154
|
+
def remote(self, env: SyncEnv, *args, **kwargs) -> float:
|
|
166
155
|
"""Remote execution of the verifier function with SHA-based bundle caching."""
|
|
167
156
|
response = self.remote_with_response(env, *args, **kwargs)
|
|
168
157
|
|
|
@@ -215,7 +204,7 @@ Remote traceback:
|
|
|
215
204
|
except Exception:
|
|
216
205
|
raise RuntimeError(full_message)
|
|
217
206
|
|
|
218
|
-
def _get_env_id(self, env:
|
|
207
|
+
def _get_env_id(self, env: SyncEnv) -> str:
|
|
219
208
|
"""Generate a unique identifier for the environment."""
|
|
220
209
|
# Use instance base URL or similar unique identifier
|
|
221
210
|
if hasattr(env, "instance") and hasattr(env.instance, "base_url"):
|
|
@@ -264,7 +253,6 @@ Remote traceback:
|
|
|
264
253
|
)
|
|
265
254
|
|
|
266
255
|
logger.debug(f"Bundle {bundle_sha[:8]}... uploaded successfully")
|
|
267
|
-
return response
|
|
268
256
|
|
|
269
257
|
else:
|
|
270
258
|
# Bundle already available - execute without upload
|
|
@@ -279,7 +267,8 @@ Remote traceback:
|
|
|
279
267
|
kwargs=kwargs,
|
|
280
268
|
needs_upload=False,
|
|
281
269
|
)
|
|
282
|
-
|
|
270
|
+
|
|
271
|
+
return response
|
|
283
272
|
|
|
284
273
|
except Exception as e:
|
|
285
274
|
# Check if error indicates bundle not found and retry with upload
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|