fleet-python 0.2.13__py3-none-any.whl → 0.2.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fleet-python might be problematic. Click here for more details.
- examples/diff_example.py +161 -0
- examples/dsl_example.py +50 -1
- examples/example_action_log.py +28 -0
- examples/example_mcp_anthropic.py +77 -0
- examples/example_mcp_openai.py +27 -0
- examples/example_task.py +199 -0
- examples/example_verifier.py +71 -0
- examples/query_builder_example.py +117 -0
- fleet/__init__.py +51 -40
- fleet/_async/base.py +14 -1
- fleet/_async/client.py +137 -19
- fleet/_async/env/client.py +4 -4
- fleet/_async/instance/__init__.py +1 -2
- fleet/_async/instance/client.py +3 -2
- fleet/_async/playwright.py +2 -2
- fleet/_async/resources/sqlite.py +654 -0
- fleet/_async/tasks.py +44 -0
- fleet/_async/verifiers/__init__.py +17 -0
- fleet/_async/verifiers/bundler.py +699 -0
- fleet/_async/verifiers/verifier.py +301 -0
- fleet/base.py +14 -1
- fleet/client.py +645 -12
- fleet/config.py +1 -1
- fleet/instance/__init__.py +1 -2
- fleet/instance/client.py +15 -5
- fleet/models.py +171 -4
- fleet/resources/browser.py +7 -8
- fleet/resources/mcp.py +60 -0
- fleet/resources/sqlite.py +654 -0
- fleet/tasks.py +44 -0
- fleet/types.py +18 -0
- fleet/verifiers/__init__.py +11 -5
- fleet/verifiers/bundler.py +699 -0
- fleet/verifiers/decorator.py +103 -0
- fleet/verifiers/verifier.py +301 -0
- {fleet_python-0.2.13.dist-info → fleet_python-0.2.15.dist-info}/METADATA +3 -42
- fleet_python-0.2.15.dist-info/RECORD +69 -0
- fleet_python-0.2.13.dist-info/RECORD +0 -52
- {fleet_python-0.2.13.dist-info → fleet_python-0.2.15.dist-info}/WHEEL +0 -0
- {fleet_python-0.2.13.dist-info → fleet_python-0.2.15.dist-info}/licenses/LICENSE +0 -0
- {fleet_python-0.2.13.dist-info → fleet_python-0.2.15.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,301 @@
|
|
|
1
|
+
"""Fleet SDK Verifier - Async Version.
|
|
2
|
+
|
|
3
|
+
Provides a @verifier decorator that can wrap any sync function to support
|
|
4
|
+
both local execution and remote execution via .remote() method.
|
|
5
|
+
|
|
6
|
+
The decorated function must take 'env' as its first parameter, making it explicit
|
|
7
|
+
that verifiers operate within an environment context.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import functools
|
|
11
|
+
import uuid
|
|
12
|
+
import logging
|
|
13
|
+
import hashlib
|
|
14
|
+
import asyncio
|
|
15
|
+
import inspect
|
|
16
|
+
from typing import Any, Callable, Dict, Optional, List, TypeVar, Set, Union
|
|
17
|
+
|
|
18
|
+
from .bundler import FunctionBundler
|
|
19
|
+
from ..client import AsyncEnv
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
F = TypeVar('F', bound=Callable[..., Any])
|
|
24
|
+
|
|
25
|
+
# Global cache to track which bundle SHAs have been uploaded to S3
|
|
26
|
+
_uploaded_bundle_shas: Set[str] = set()
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@functools.lru_cache(maxsize=128)
|
|
30
|
+
def _get_bundle_sha(bundle_data: bytes) -> str:
|
|
31
|
+
"""Calculate SHA256 hash of bundle data with LRU caching."""
|
|
32
|
+
return hashlib.sha256(bundle_data).hexdigest()
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class AsyncVerifierFunction:
|
|
36
|
+
"""Wrapper for a verified function that supports local execution with env-first pattern."""
|
|
37
|
+
|
|
38
|
+
def __init__(
|
|
39
|
+
self,
|
|
40
|
+
func: F,
|
|
41
|
+
key: str,
|
|
42
|
+
extra_requirements: Optional[List[str]] = None,
|
|
43
|
+
verifier_id: Optional[str] = None
|
|
44
|
+
):
|
|
45
|
+
self.func = func
|
|
46
|
+
self.key = key
|
|
47
|
+
self.name = key # Keep name for backward compatibility
|
|
48
|
+
self.verifier_id = verifier_id or str(uuid.uuid4())
|
|
49
|
+
self.extra_requirements = extra_requirements or []
|
|
50
|
+
self._bundler = FunctionBundler()
|
|
51
|
+
self._bundle_sha: Optional[str] = None # Cached bundle SHA
|
|
52
|
+
self._bundle_data: Optional[bytes] = None # Cached bundle data
|
|
53
|
+
self._is_async = asyncio.iscoroutinefunction(func)
|
|
54
|
+
|
|
55
|
+
# Copy function metadata
|
|
56
|
+
functools.update_wrapper(self, func)
|
|
57
|
+
|
|
58
|
+
def _get_or_create_bundle(self) -> tuple[bytes, str]:
|
|
59
|
+
"""Get or create bundle data and return (bundle_data, sha)."""
|
|
60
|
+
if self._bundle_data is None or self._bundle_sha is None:
|
|
61
|
+
# Create bundle and cache it
|
|
62
|
+
self._bundle_data = self._bundler.create_bundle(
|
|
63
|
+
self.func,
|
|
64
|
+
self.extra_requirements,
|
|
65
|
+
self.verifier_id
|
|
66
|
+
)
|
|
67
|
+
self._bundle_sha = _get_bundle_sha(self._bundle_data)
|
|
68
|
+
logger.debug(f"Created bundle for {self.name} with SHA: {self._bundle_sha}")
|
|
69
|
+
|
|
70
|
+
return self._bundle_data, self._bundle_sha
|
|
71
|
+
|
|
72
|
+
async def _check_bundle_status(self, env: AsyncEnv) -> tuple[str, bool]:
|
|
73
|
+
"""Check if bundle needs to be uploaded and return (sha, needs_upload)."""
|
|
74
|
+
bundle_data, bundle_sha = self._get_or_create_bundle()
|
|
75
|
+
|
|
76
|
+
# 1. Check local process cache first
|
|
77
|
+
if bundle_sha in _uploaded_bundle_shas:
|
|
78
|
+
logger.debug(f"Bundle {bundle_sha[:8]}... found in local cache")
|
|
79
|
+
return bundle_sha, False # Already uploaded, no upload needed
|
|
80
|
+
|
|
81
|
+
# 2. Check if bundle exists on server (pseudocode)
|
|
82
|
+
# TODO: Add endpoint to check if bundle SHA exists in S3
|
|
83
|
+
try:
|
|
84
|
+
exists = await env.check_bundle_exists(bundle_sha)
|
|
85
|
+
if exists.success:
|
|
86
|
+
logger.info(f"Bundle {bundle_sha[:8]}... found on server, updating cache")
|
|
87
|
+
_uploaded_bundle_shas.add(bundle_sha)
|
|
88
|
+
return bundle_sha, False # Found on server, no upload needed
|
|
89
|
+
except Exception as e:
|
|
90
|
+
logger.warning(f"Failed to check bundle existence: {e}")
|
|
91
|
+
|
|
92
|
+
# 3. Bundle not found locally or on server - upload needed
|
|
93
|
+
logger.info(f"Bundle {bundle_sha[:8]}... needs to be uploaded")
|
|
94
|
+
return bundle_sha, True # Upload needed
|
|
95
|
+
|
|
96
|
+
async def __call__(self, env: AsyncEnv, *args, **kwargs) -> float:
|
|
97
|
+
"""Local execution of the verifier function with env as first parameter."""
|
|
98
|
+
try:
|
|
99
|
+
if self._is_async:
|
|
100
|
+
# For async functions, await the result
|
|
101
|
+
result = await self.func(env, *args, **kwargs)
|
|
102
|
+
else:
|
|
103
|
+
# For sync functions, call directly
|
|
104
|
+
result = self.func(env, *args, **kwargs)
|
|
105
|
+
|
|
106
|
+
# Handle different return types
|
|
107
|
+
if isinstance(result, (int, float)):
|
|
108
|
+
# Direct score return
|
|
109
|
+
return float(result)
|
|
110
|
+
elif isinstance(result, dict) and "score" in result:
|
|
111
|
+
# For local execution, return the full dict if that's what the function returns
|
|
112
|
+
return result
|
|
113
|
+
else:
|
|
114
|
+
# Try to extract score from object attributes
|
|
115
|
+
if hasattr(result, 'score'):
|
|
116
|
+
return float(result.score)
|
|
117
|
+
else:
|
|
118
|
+
raise ValueError(f"Verifier function must return a score (number). Got {type(result)}")
|
|
119
|
+
|
|
120
|
+
except Exception as e:
|
|
121
|
+
logger.error(f"Error in verifier {self.name}: {e}")
|
|
122
|
+
# Return error score 0
|
|
123
|
+
return 0.0
|
|
124
|
+
|
|
125
|
+
async def remote(self, env: AsyncEnv, *args, **kwargs) -> float:
|
|
126
|
+
"""Remote execution of the verifier function with SHA-based bundle caching."""
|
|
127
|
+
if self._is_async:
|
|
128
|
+
raise NotImplementedError(
|
|
129
|
+
f"Async verifier '{self.name}' cannot be executed remotely. "
|
|
130
|
+
"The remote execution environment only supports synchronous functions. "
|
|
131
|
+
"Please provide a synchronous version of your verifier."
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
try:
|
|
135
|
+
# Check if bundle needs to be uploaded
|
|
136
|
+
bundle_sha, needs_upload = await self._check_bundle_status(env)
|
|
137
|
+
|
|
138
|
+
if needs_upload:
|
|
139
|
+
# Need to upload bundle to S3
|
|
140
|
+
logger.info(f"Uploading bundle {bundle_sha[:8]}... for {self.key}")
|
|
141
|
+
bundle_data, _ = self._get_or_create_bundle()
|
|
142
|
+
|
|
143
|
+
response = await env.execute_verifier_remote(
|
|
144
|
+
bundle_data=bundle_data,
|
|
145
|
+
bundle_sha=bundle_sha,
|
|
146
|
+
key=self.key,
|
|
147
|
+
function_name=self.func.__name__,
|
|
148
|
+
args=args,
|
|
149
|
+
kwargs=kwargs,
|
|
150
|
+
needs_upload=True
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
# Mark as uploaded after successful execution
|
|
154
|
+
_uploaded_bundle_shas.add(bundle_sha)
|
|
155
|
+
logger.debug(f"Registered bundle {bundle_sha[:8]}... as uploaded")
|
|
156
|
+
|
|
157
|
+
else:
|
|
158
|
+
# Bundle already available - execute without upload
|
|
159
|
+
logger.info(f"Executing cached bundle {bundle_sha[:8]}... for {self.key}")
|
|
160
|
+
bundle_data, _ = self._get_or_create_bundle()
|
|
161
|
+
|
|
162
|
+
response = await env.execute_verifier_remote(
|
|
163
|
+
bundle_data=bundle_data, # Still need bundle_data for local caching
|
|
164
|
+
bundle_sha=bundle_sha,
|
|
165
|
+
key=self.key,
|
|
166
|
+
function_name=self.func.__name__,
|
|
167
|
+
args=args,
|
|
168
|
+
kwargs=kwargs,
|
|
169
|
+
needs_upload=False # Don't upload, just execute
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
# Handle response
|
|
173
|
+
if response.success:
|
|
174
|
+
return self._process_result(response.result)
|
|
175
|
+
else:
|
|
176
|
+
self._raise_remote_error(response.error)
|
|
177
|
+
|
|
178
|
+
except Exception as e:
|
|
179
|
+
logger.error(f"Remote execution failed for {self.key}: {e}")
|
|
180
|
+
# If it's an HTTP error, try to get more details
|
|
181
|
+
if hasattr(e, 'response') and hasattr(e.response, 'text'):
|
|
182
|
+
logger.error(f"Server response: {e.response.text}")
|
|
183
|
+
raise
|
|
184
|
+
|
|
185
|
+
def _process_result(self, result: Any) -> float:
|
|
186
|
+
"""Process remote execution result, handling different return types."""
|
|
187
|
+
# Handle different return types like local execution
|
|
188
|
+
if isinstance(result, (int, float)):
|
|
189
|
+
return float(result)
|
|
190
|
+
elif isinstance(result, dict) and "score" in result:
|
|
191
|
+
return float(result["score"])
|
|
192
|
+
else:
|
|
193
|
+
# Try to extract score from object attributes
|
|
194
|
+
if hasattr(result, 'score'):
|
|
195
|
+
return float(result.score)
|
|
196
|
+
else:
|
|
197
|
+
# Best effort conversion
|
|
198
|
+
try:
|
|
199
|
+
return float(result)
|
|
200
|
+
except (ValueError, TypeError):
|
|
201
|
+
logger.warning(f"Could not convert result to float: {result}")
|
|
202
|
+
return 0.0
|
|
203
|
+
|
|
204
|
+
def _raise_remote_error(self, error_info: Dict[str, Any]):
|
|
205
|
+
"""Reconstruct remote error as local exception."""
|
|
206
|
+
error_type = error_info.get("type", "RuntimeError")
|
|
207
|
+
message = error_info.get("message", "Remote execution failed")
|
|
208
|
+
traceback_str = error_info.get("traceback", "")
|
|
209
|
+
|
|
210
|
+
# Create a rich error message
|
|
211
|
+
full_message = f"""
|
|
212
|
+
Remote verifier execution failed:
|
|
213
|
+
{message}
|
|
214
|
+
|
|
215
|
+
Remote traceback:
|
|
216
|
+
{traceback_str}
|
|
217
|
+
""".strip()
|
|
218
|
+
|
|
219
|
+
# Try to raise the original exception type
|
|
220
|
+
try:
|
|
221
|
+
exception_class = getattr(__builtins__, error_type, RuntimeError)
|
|
222
|
+
raise exception_class(full_message)
|
|
223
|
+
except:
|
|
224
|
+
raise RuntimeError(full_message)
|
|
225
|
+
|
|
226
|
+
def _get_env_id(self, env: AsyncEnv) -> str:
|
|
227
|
+
"""Generate a unique identifier for the environment."""
|
|
228
|
+
# Use instance base URL or similar unique identifier
|
|
229
|
+
if hasattr(env, 'instance') and hasattr(env.instance, 'base_url'):
|
|
230
|
+
return f"{env.instance.base_url}"
|
|
231
|
+
else:
|
|
232
|
+
# Fallback to object id (less ideal but works)
|
|
233
|
+
return str(id(env))
|
|
234
|
+
|
|
235
|
+
def _is_bundle_not_found_error(self, error: Exception) -> bool:
|
|
236
|
+
"""Check if the error indicates the bundle was not found on the server."""
|
|
237
|
+
# Check for common "bundle not found" error patterns
|
|
238
|
+
error_msg = str(error).lower()
|
|
239
|
+
return (
|
|
240
|
+
"bundle not found" in error_msg or
|
|
241
|
+
"verifier not found" in error_msg or
|
|
242
|
+
"404" in error_msg or
|
|
243
|
+
"not found" in error_msg
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def verifier(
|
|
248
|
+
key: Optional[str] = None,
|
|
249
|
+
extra_requirements: Optional[List[str]] = None
|
|
250
|
+
) -> Callable[[F], AsyncVerifierFunction]:
|
|
251
|
+
"""
|
|
252
|
+
Decorator to create a verifier function with env-first pattern.
|
|
253
|
+
|
|
254
|
+
The decorated function must take 'env' as its first parameter, making it explicit
|
|
255
|
+
that verifiers operate within an environment context. This makes verifiers reusable
|
|
256
|
+
across different environments.
|
|
257
|
+
|
|
258
|
+
Args:
|
|
259
|
+
key: Optional key for the verifier. Defaults to function name.
|
|
260
|
+
extra_requirements: Additional PyPI packages needed by the verifier.
|
|
261
|
+
|
|
262
|
+
Example:
|
|
263
|
+
# Synchronous verifier (works locally and remotely)
|
|
264
|
+
@verifier(key="check_user_count")
|
|
265
|
+
def check_user_count(env, expected_count: int) -> float:
|
|
266
|
+
db = env.db()
|
|
267
|
+
result = db.query("SELECT COUNT(*) FROM users")
|
|
268
|
+
actual_count = result.rows[0][0]
|
|
269
|
+
return 1.0 if actual_count >= expected_count else 0.0
|
|
270
|
+
|
|
271
|
+
# Async verifier (only works locally)
|
|
272
|
+
@verifier(key="check_user_async")
|
|
273
|
+
async def check_user_async(env, expected_count: int) -> float:
|
|
274
|
+
db = env.db()
|
|
275
|
+
result = await db.query("SELECT COUNT(*) FROM users")
|
|
276
|
+
actual_count = result.rows[0][0]
|
|
277
|
+
return 1.0 if actual_count >= expected_count else 0.0
|
|
278
|
+
|
|
279
|
+
# Usage
|
|
280
|
+
env = await flt.env.make_async("fira")
|
|
281
|
+
|
|
282
|
+
# Local execution
|
|
283
|
+
result = await check_user_count(env, 5) # sync verifier
|
|
284
|
+
result = await check_user_async(env, 5) # async verifier
|
|
285
|
+
|
|
286
|
+
# Remote execution
|
|
287
|
+
result = await check_user_count.remote(env, 5) # sync verifier works
|
|
288
|
+
# await check_user_async.remote(env, 5) # raises NotImplementedError
|
|
289
|
+
"""
|
|
290
|
+
def decorator(func: F) -> AsyncVerifierFunction:
|
|
291
|
+
verifier_key = key or func.__name__
|
|
292
|
+
verifier_uuid = str(uuid.uuid4())
|
|
293
|
+
|
|
294
|
+
return AsyncVerifierFunction(
|
|
295
|
+
func,
|
|
296
|
+
verifier_key,
|
|
297
|
+
extra_requirements,
|
|
298
|
+
verifier_uuid
|
|
299
|
+
)
|
|
300
|
+
|
|
301
|
+
return decorator
|
fleet/base.py
CHANGED
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
import httpx
|
|
2
2
|
from typing import Dict, Any, Optional
|
|
3
3
|
import json
|
|
4
|
+
import logging
|
|
4
5
|
|
|
5
6
|
from .models import InstanceResponse
|
|
7
|
+
from .config import GLOBAL_BASE_URL
|
|
6
8
|
from .exceptions import (
|
|
7
9
|
FleetAPIError,
|
|
8
10
|
FleetAuthenticationError,
|
|
@@ -18,6 +20,8 @@ from .exceptions import (
|
|
|
18
20
|
FleetPermissionError,
|
|
19
21
|
)
|
|
20
22
|
|
|
23
|
+
logger = logging.getLogger(__name__)
|
|
24
|
+
|
|
21
25
|
|
|
22
26
|
class EnvironmentBase(InstanceResponse):
|
|
23
27
|
@property
|
|
@@ -31,7 +35,7 @@ class BaseWrapper:
|
|
|
31
35
|
raise ValueError("api_key is required")
|
|
32
36
|
self.api_key = api_key
|
|
33
37
|
if base_url is None:
|
|
34
|
-
base_url =
|
|
38
|
+
base_url = GLOBAL_BASE_URL
|
|
35
39
|
self.base_url = base_url
|
|
36
40
|
|
|
37
41
|
def get_headers(self) -> Dict[str, str]:
|
|
@@ -40,6 +44,10 @@ class BaseWrapper:
|
|
|
40
44
|
"X-Fleet-SDK-Version": "1.0.0",
|
|
41
45
|
}
|
|
42
46
|
headers["Authorization"] = f"Bearer {self.api_key}"
|
|
47
|
+
# Debug log
|
|
48
|
+
import logging
|
|
49
|
+
logger = logging.getLogger(__name__)
|
|
50
|
+
logger.debug(f"Headers being sent: {headers}")
|
|
43
51
|
return headers
|
|
44
52
|
|
|
45
53
|
|
|
@@ -81,6 +89,11 @@ class SyncWrapper(BaseWrapper):
|
|
|
81
89
|
def _handle_error_response(self, response: httpx.Response) -> None:
|
|
82
90
|
"""Handle HTTP error responses and convert to appropriate Fleet exceptions."""
|
|
83
91
|
status_code = response.status_code
|
|
92
|
+
|
|
93
|
+
# Debug log 500 errors
|
|
94
|
+
if status_code == 500:
|
|
95
|
+
logger.error(f"Got 500 error from {response.url}")
|
|
96
|
+
logger.error(f"Response text: {response.text}")
|
|
84
97
|
|
|
85
98
|
# Try to parse error response as JSON
|
|
86
99
|
try:
|