fleet-python 0.2.45__py3-none-any.whl → 0.2.47__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fleet-python might be problematic. Click here for more details.

fleet/_async/models.py CHANGED
@@ -273,6 +273,12 @@ class VerifiersExecuteResponse(BaseModel):
273
273
  result: Optional[Any] = Field(
274
274
  None, description="The return value of the function", title="Result"
275
275
  )
276
+ verifier_id: Optional[str] = Field(
277
+ None, description="ID of the verifier", title="Verifier Id"
278
+ )
279
+ execution_id: Optional[str] = Field(
280
+ None, description="ID of the execution record", title="Execution Id"
281
+ )
276
282
  error: Optional[Dict[str, Any]] = Field(
277
283
  None, description="Error details if verification failed", title="Error"
278
284
  )
fleet/_async/tasks.py CHANGED
@@ -3,13 +3,16 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  from datetime import datetime
6
- from typing import Any, Dict, Optional, List
6
+ from typing import Any, Dict, Optional, List, TYPE_CHECKING
7
7
 
8
8
  from pydantic import BaseModel, Field, validator
9
9
 
10
10
  # Import the shared VerifierFunction type that works for both async and sync
11
11
  from fleet.types import VerifierFunction
12
12
 
13
+ if TYPE_CHECKING:
14
+ from fleet._async.models import VerifiersExecuteResponse
15
+
13
16
 
14
17
  class Task(BaseModel):
15
18
  """A task model representing a single task in the Fleet system."""
@@ -24,9 +27,9 @@ class Task(BaseModel):
24
27
  version: Optional[str] = Field(None, description="Task version")
25
28
  verifier_func: Optional[str] = Field(None, description="Verifier function code")
26
29
  verifier: Optional[Any] = Field(
27
- None,
30
+ None,
28
31
  description="Verifier function with decorator (async or sync)",
29
- exclude=True # Exclude from JSON serialization
32
+ exclude=True, # Exclude from JSON serialization
30
33
  )
31
34
  verifier_id: Optional[str] = Field(None, description="Verifier identifier")
32
35
  verifier_sha: Optional[str] = Field(None, description="Verifier SHA256 hash")
@@ -68,7 +71,7 @@ class Task(BaseModel):
68
71
  # If verifier doesn't exist but verifier_func does, rebuild it
69
72
  if not self.verifier and self.verifier_func:
70
73
  self._rebuild_verifier()
71
-
74
+
72
75
  if self.verifier:
73
76
  import asyncio
74
77
  import inspect
@@ -102,7 +105,7 @@ class Task(BaseModel):
102
105
  # If verifier doesn't exist but verifier_func does, rebuild it
103
106
  if not self.verifier and self.verifier_func:
104
107
  self._rebuild_verifier()
105
-
108
+
106
109
  if self.verifier:
107
110
  result = self.verifier.remote(*args, **kwargs)
108
111
  # If it's a coroutine, await it
@@ -115,6 +118,65 @@ class Task(BaseModel):
115
118
  else:
116
119
  raise ValueError("No verifier function found for this task")
117
120
 
121
+ async def verify_detailed_async(
122
+ self, *args, **kwargs
123
+ ) -> "VerifiersExecuteResponse":
124
+ """Verify the task and return the full execute response model.
125
+
126
+ For async environments, awaits the async verifier.
127
+ Works with both sync and async verifiers in async contexts.
128
+ """
129
+ # If verifier doesn't exist but verifier_func does, rebuild it
130
+ if not self.verifier and self.verifier_func:
131
+ self._rebuild_verifier()
132
+
133
+ if self.verifier:
134
+ result = self.verifier.remote_with_response(*args, **kwargs)
135
+ # If it's a coroutine, await it
136
+ import inspect
137
+
138
+ if inspect.iscoroutine(result):
139
+ return await result
140
+ else:
141
+ return result
142
+ else:
143
+ raise ValueError("No verifier function found for this task")
144
+
145
+ def verify_detailed(self, env, *args, **kwargs) -> "VerifiersExecuteResponse":
146
+ """Verify the task and return the full execute response model (sync version).
147
+
148
+ For sync environments, calls the sync verifier directly.
149
+ For async verifiers, automatically runs them with asyncio.run().
150
+ """
151
+ # If verifier doesn't exist but verifier_func does, rebuild it
152
+ if not self.verifier and self.verifier_func:
153
+ self._rebuild_verifier()
154
+
155
+ if self.verifier:
156
+ import asyncio
157
+ import inspect
158
+
159
+ # Check if verifier has remote_with_response method (for decorated verifiers)
160
+ result = self.verifier.remote_with_response(env, *args, **kwargs)
161
+
162
+ # If the result is a coroutine, we need to run it
163
+ if inspect.iscoroutine(result):
164
+ # Check if we're already in an event loop
165
+ try:
166
+ asyncio.get_running_loop()
167
+ # We're in an async context, can't use asyncio.run()
168
+ raise RuntimeError(
169
+ "Cannot run async verifier in sync mode while event loop is running. "
170
+ "Use await task.verify_detailed_async() instead."
171
+ )
172
+ except RuntimeError:
173
+ # No event loop running, safe to use asyncio.run()
174
+ return asyncio.run(result)
175
+ else:
176
+ return result
177
+ else:
178
+ raise ValueError("No verifier function found for this task")
179
+
118
180
  def _rebuild_verifier(self):
119
181
  """Rebuild the verifier from verifier_func string if it exists."""
120
182
  if self.verifier_func:
@@ -127,7 +189,7 @@ class Task(BaseModel):
127
189
  sha256=self.verifier_sha or "",
128
190
  )
129
191
  self.verifier = verifier
130
-
192
+
131
193
  async def make_env(self, region: Optional[str] = None):
132
194
  """Create an environment instance for this task's environment.
133
195
 
@@ -12,17 +12,17 @@ import uuid
12
12
  import logging
13
13
  import hashlib
14
14
  import asyncio
15
- from typing import Any, Callable, Dict, Optional, List, TypeVar, Set, Tuple
15
+ from typing import Any, Callable, Dict, Optional, List, TypeVar, Tuple
16
16
 
17
17
  from .bundler import FunctionBundler
18
18
  from ..client import AsyncEnv
19
+ from ...models import VerifiersExecuteResponse
19
20
 
20
21
  logger = logging.getLogger(__name__)
21
22
 
22
23
  F = TypeVar("F", bound=Callable[..., Any])
23
24
 
24
- # Global cache to track which bundle SHAs have been uploaded to S3
25
- _uploaded_bundle_shas: Set[str] = set()
25
+ # Removed global cache - always check server for bundle status
26
26
 
27
27
 
28
28
  @functools.lru_cache(maxsize=128)
@@ -107,25 +107,16 @@ class AsyncVerifierFunction:
107
107
  logger.debug(f"Using server-side bundle {bundle_sha[:8]}...")
108
108
  return bundle_sha, False # No upload needed, server has it
109
109
 
110
- # 1. Check local process cache first
111
- if bundle_sha in _uploaded_bundle_shas:
112
- logger.debug(f"Bundle {bundle_sha[:8]}... found in local cache")
113
- return bundle_sha, False # Already uploaded, no upload needed
114
-
115
- # 2. Check if bundle exists on server (pseudocode)
116
- # TODO: Add endpoint to check if bundle SHA exists in S3
110
+ # Always check if bundle exists on server
117
111
  try:
118
112
  exists = await env.check_bundle_exists(bundle_sha)
119
113
  if exists.success:
120
- logger.info(
121
- f"Bundle {bundle_sha[:8]}... found on server, updating cache"
122
- )
123
- _uploaded_bundle_shas.add(bundle_sha)
114
+ logger.info(f"Bundle {bundle_sha[:8]}... found on server")
124
115
  return bundle_sha, False # Found on server, no upload needed
125
116
  except Exception as e:
126
117
  logger.warning(f"Failed to check bundle existence: {e}")
127
118
 
128
- # 3. Bundle not found locally or on server - upload needed
119
+ # Bundle not found on server - upload needed
129
120
  logger.info(f"Bundle {bundle_sha[:8]}... needs to be uploaded")
130
121
  return bundle_sha, True # Upload needed
131
122
 
@@ -162,74 +153,15 @@ class AsyncVerifierFunction:
162
153
 
163
154
  async def remote(self, env: AsyncEnv, *args, **kwargs) -> float:
164
155
  """Remote execution of the verifier function with SHA-based bundle caching."""
165
- # Async verifiers are now supported by the backend
166
- # if self._is_async:
167
- # raise NotImplementedError(
168
- # f"Async verifier '{self.key}' cannot be executed remotely. "
169
- # "The remote execution environment only supports synchronous functions. "
170
- # "Please provide a synchronous version of your verifier."
171
- # )
172
-
173
- args_array = list(args)
174
- args_array.append({"env": env.instance_id})
175
- args = tuple(args_array)
176
-
177
- try:
178
- # Check if bundle needs to be uploaded
179
- bundle_sha, needs_upload = await self._check_bundle_status(env)
180
-
181
- if needs_upload:
182
- # Need to upload bundle to S3
183
- logger.info(f"Uploading bundle {bundle_sha[:8]}... for {self.key}")
184
- bundle_data, _ = self._get_or_create_bundle()
185
-
186
- response = await env.execute_verifier_remote(
187
- bundle_data=bundle_data,
188
- bundle_sha=bundle_sha,
189
- key=self.key,
190
- function_name=self.func.__name__,
191
- args=args,
192
- args_array=args_array,
193
- kwargs=kwargs,
194
- needs_upload=True,
195
- )
156
+ response = await self.remote_with_response(env, *args, **kwargs)
196
157
 
197
- # Mark as uploaded after successful execution
198
- _uploaded_bundle_shas.add(bundle_sha)
199
- logger.debug(f"Registered bundle {bundle_sha[:8]}... as uploaded")
200
-
201
- else:
202
- # Bundle already available - execute without upload
203
- logger.info(
204
- f"Executing cached bundle {bundle_sha[:8]}... for {self.key}"
205
- )
206
- bundle_data, _ = self._get_or_create_bundle()
207
-
208
- response = await env.execute_verifier_remote(
209
- bundle_data=bundle_data or b"", # Empty if using server-side bundle
210
- bundle_sha=bundle_sha,
211
- key=self.key,
212
- function_name=self.func.__name__,
213
- args=args,
214
- args_array=args_array,
215
- kwargs=kwargs,
216
- needs_upload=False, # Don't upload, just execute
217
- )
218
-
219
- # Handle response
220
- if response.stdout:
221
- print(response.stdout)
222
- if response.success:
223
- return self._process_result(response.result)
224
- else:
225
- self._raise_remote_error(response.error)
226
-
227
- except Exception as e:
228
- logger.error(f"Remote execution failed for {self.key}: {e}")
229
- # If it's an HTTP error, try to get more details
230
- if hasattr(e, "response") and hasattr(e.response, "text"):
231
- logger.error(f"Server response: {e.response.text}")
232
- raise
158
+ # Handle response
159
+ if response.stdout:
160
+ print(response.stdout)
161
+ if response.success:
162
+ return self._process_result(response.result)
163
+ else:
164
+ self._raise_remote_error(response.error)
233
165
 
234
166
  def _process_result(self, result: Any) -> float:
235
167
  """Process remote execution result, handling different return types."""
@@ -269,7 +201,7 @@ Remote traceback:
269
201
  try:
270
202
  exception_class = getattr(__builtins__, error_type, RuntimeError)
271
203
  raise exception_class(full_message)
272
- except:
204
+ except Exception:
273
205
  raise RuntimeError(full_message)
274
206
 
275
207
  def _get_env_id(self, env: AsyncEnv) -> str:
@@ -292,6 +224,74 @@ Remote traceback:
292
224
  or "not found" in error_msg
293
225
  )
294
226
 
227
+ async def remote_with_response(
228
+ self, env: "AsyncEnv", *args, **kwargs
229
+ ) -> "VerifiersExecuteResponse":
230
+ """Remote execution of the verifier function that returns the full response model."""
231
+ args_array = list(args)
232
+ args_array.append({"env": env.instance_id})
233
+ args = tuple(args_array)
234
+
235
+ try:
236
+ # Check if bundle needs to be uploaded
237
+ bundle_sha, needs_upload = await self._check_bundle_status(env)
238
+
239
+ if needs_upload:
240
+ # Need to upload bundle to S3
241
+ logger.info(f"Uploading bundle {bundle_sha[:8]}... for {self.key}")
242
+ bundle_data, _ = self._get_or_create_bundle()
243
+
244
+ response = await env.execute_verifier_remote(
245
+ bundle_data=bundle_data,
246
+ bundle_sha=bundle_sha,
247
+ key=self.key,
248
+ function_name=self.func.__name__,
249
+ args=args,
250
+ args_array=args_array,
251
+ kwargs=kwargs,
252
+ needs_upload=True,
253
+ )
254
+
255
+ logger.debug(f"Bundle {bundle_sha[:8]}... uploaded successfully")
256
+
257
+ else:
258
+ # Bundle already available - execute without upload
259
+ logger.info(f"Bundle {bundle_sha[:8]}... already cached for {self.key}")
260
+ response = await env.execute_verifier_remote(
261
+ bundle_data=b"", # Empty bundle since it's cached
262
+ bundle_sha=bundle_sha,
263
+ key=self.key,
264
+ function_name=self.func.__name__,
265
+ args=args,
266
+ args_array=args_array,
267
+ kwargs=kwargs,
268
+ needs_upload=False,
269
+ )
270
+
271
+ return response
272
+
273
+ except Exception as e:
274
+ # Check if error indicates bundle not found and retry with upload
275
+ if self._is_bundle_not_found_error(e) and not needs_upload:
276
+ logger.info(
277
+ f"Bundle {bundle_sha[:8]}... not found on server, uploading..."
278
+ )
279
+ bundle_data, _ = self._get_or_create_bundle()
280
+ response = await env.execute_verifier_remote(
281
+ bundle_data=bundle_data,
282
+ bundle_sha=bundle_sha,
283
+ key=self.key,
284
+ function_name=self.func.__name__,
285
+ args=args,
286
+ args_array=args_array,
287
+ kwargs=kwargs,
288
+ needs_upload=True,
289
+ )
290
+ return response
291
+ else:
292
+ logger.error(f"Error in remote execution of {self.key}: {e}")
293
+ raise
294
+
295
295
 
296
296
  def verifier(
297
297
  key: Optional[str] = None,
fleet/client.py CHANGED
@@ -579,6 +579,8 @@ class Fleet:
579
579
  task = Task(**task_data)
580
580
  tasks.append(task)
581
581
 
582
+ responses = []
583
+
582
584
  for task in tasks:
583
585
  payload = TaskRequest(
584
586
  key=task.key,
@@ -592,10 +594,13 @@ class Fleet:
592
594
  response = self.client.request(
593
595
  "POST", "/v1/tasks", json=payload.model_dump()
594
596
  )
597
+ responses.append(response)
595
598
  except Exception as e:
596
599
  logger.error(f"Failed to import task {task.key}: {e}")
597
600
  continue
598
601
 
602
+ return responses
603
+
599
604
  def account(self) -> AccountResponse:
600
605
  """Get account information including instance limits and usage.
601
606
 
fleet/models.py CHANGED
@@ -275,6 +275,12 @@ class VerifiersExecuteResponse(BaseModel):
275
275
  result: Optional[Any] = Field(
276
276
  None, description="The return value of the function", title="Result"
277
277
  )
278
+ verifier_id: Optional[str] = Field(
279
+ None, description="ID of the verifier", title="Verifier Id"
280
+ )
281
+ execution_id: Optional[str] = Field(
282
+ None, description="ID of the execution record", title="Execution Id"
283
+ )
278
284
  error: Optional[Dict[str, Any]] = Field(
279
285
  None, description="Error details if verification failed", title="Error"
280
286
  )
fleet/tasks.py CHANGED
@@ -4,13 +4,16 @@ from __future__ import annotations
4
4
 
5
5
  import asyncio
6
6
  from datetime import datetime
7
- from typing import Any, Dict, Optional, List
7
+ from typing import Any, Dict, Optional, List, TYPE_CHECKING
8
8
 
9
9
  from pydantic import BaseModel, Field, validator
10
10
 
11
11
  # Import the shared VerifierFunction type that works for both async and sync
12
12
  from fleet.types import VerifierFunction
13
13
 
14
+ if TYPE_CHECKING:
15
+ from fleet.models import VerifiersExecuteResponse
16
+
14
17
 
15
18
  class Task(BaseModel):
16
19
  """A task model representing a single task in the Fleet system."""
@@ -116,6 +119,57 @@ class Task(BaseModel):
116
119
  else:
117
120
  raise ValueError("No verifier function found for this task")
118
121
 
122
+ def verify_detailed(self, env, *args, **kwargs) -> "VerifiersExecuteResponse":
123
+ """Verify the task and return the full execute response model.
124
+
125
+ For sync environments, calls the sync verifier directly.
126
+ For async verifiers, automatically runs them with asyncio.run().
127
+ """
128
+ # If verifier doesn't exist but verifier_func does, rebuild it
129
+ if not self.verifier and self.verifier_func:
130
+ self._rebuild_verifier()
131
+
132
+ if self.verifier:
133
+ import inspect
134
+
135
+ # Check if verifier has remote_with_response method (for decorated verifiers)
136
+ result = self.verifier.remote_with_response(env, *args, **kwargs)
137
+
138
+ # If the result is a coroutine, we need to run it
139
+ if inspect.iscoroutine(result):
140
+ # Check if we're already in an event loop
141
+ try:
142
+ asyncio.get_running_loop()
143
+ # We're in an async context, can't use asyncio.run()
144
+ raise RuntimeError(
145
+ "Cannot run async verifier in sync mode while event loop is running. "
146
+ "Use await task.verify_detailed_async() instead."
147
+ )
148
+ except RuntimeError:
149
+ # No event loop running, safe to use asyncio.run()
150
+ return asyncio.run(result)
151
+ else:
152
+ return result
153
+ else:
154
+ raise ValueError("No verifier function found for this task")
155
+
156
+ def verify_detailed_async(self, *args, **kwargs) -> "VerifiersExecuteResponse":
157
+ """Verify the task and return the full execute response model (async version).
158
+
159
+ For async environments, returns a coroutine that when awaited returns the response.
160
+ Works with both sync and async verifiers in async contexts.
161
+ """
162
+ # If verifier doesn't exist but verifier_func does, rebuild it
163
+ if not self.verifier and self.verifier_func:
164
+ self._rebuild_verifier()
165
+
166
+ if self.verifier:
167
+ result = self.verifier.remote_with_response(*args, **kwargs)
168
+ # Return the result (could be a coroutine or a value)
169
+ return result
170
+ else:
171
+ raise ValueError("No verifier function found for this task")
172
+
119
173
  def _rebuild_verifier(self):
120
174
  """Rebuild the verifier from verifier_func string if it exists."""
121
175
  if self.verifier_func:
@@ -19,7 +19,6 @@ from typing import (
19
19
  Optional,
20
20
  List,
21
21
  TypeVar,
22
- Set,
23
22
  TYPE_CHECKING,
24
23
  Tuple,
25
24
  )
@@ -28,13 +27,13 @@ from .bundler import FunctionBundler
28
27
 
29
28
  if TYPE_CHECKING:
30
29
  from ..client import SyncEnv
30
+ from ..models import VerifiersExecuteResponse
31
31
 
32
32
  logger = logging.getLogger(__name__)
33
33
 
34
34
  F = TypeVar("F", bound=Callable[..., Any])
35
35
 
36
- # Global cache to track which bundle SHAs have been uploaded to S3
37
- _uploaded_bundle_shas: Set[str] = set()
36
+ # Removed global cache - always check server for bundle status
38
37
 
39
38
 
40
39
  @functools.lru_cache(maxsize=128)
@@ -119,25 +118,16 @@ class SyncVerifierFunction:
119
118
  logger.debug(f"Using server-side bundle {bundle_sha[:8]}...")
120
119
  return bundle_sha, False # No upload needed, server has it
121
120
 
122
- # 1. Check local process cache first
123
- if bundle_sha in _uploaded_bundle_shas:
124
- logger.debug(f"Bundle {bundle_sha[:8]}... found in local cache")
125
- return bundle_sha, False # Already uploaded, no upload needed
126
-
127
- # 2. Check if bundle exists on server (pseudocode)
128
- # TODO: Add endpoint to check if bundle SHA exists in S3
121
+ # Always check if bundle exists on server
129
122
  try:
130
123
  exists = env.check_bundle_exists(bundle_sha)
131
124
  if exists.success:
132
- logger.info(
133
- f"Bundle {bundle_sha[:8]}... found on server, updating cache"
134
- )
135
- _uploaded_bundle_shas.add(bundle_sha)
125
+ logger.info(f"Bundle {bundle_sha[:8]}... found on server")
136
126
  return bundle_sha, False # Found on server, no upload needed
137
127
  except Exception as e:
138
128
  logger.warning(f"Failed to check bundle existence: {e}")
139
129
 
140
- # 3. Bundle not found locally or on server - upload needed
130
+ # Bundle not found on server - upload needed
141
131
  logger.info(f"Bundle {bundle_sha[:8]}... needs to be uploaded")
142
132
  return bundle_sha, True # Upload needed
143
133
 
@@ -174,74 +164,15 @@ class SyncVerifierFunction:
174
164
 
175
165
  def remote(self, env: "SyncEnv", *args, **kwargs) -> float:
176
166
  """Remote execution of the verifier function with SHA-based bundle caching."""
177
- # Async verifiers are now supported by the backend
178
- # if self._is_async:
179
- # raise NotImplementedError(
180
- # f"Async verifier '{self.key}' cannot be executed remotely. "
181
- # "The remote execution environment only supports synchronous functions. "
182
- # "Please provide a synchronous version of your verifier."
183
- # )
184
-
185
- args_array = list(args)
186
- args_array.append({"env": env.instance_id})
187
- args = tuple(args_array)
188
-
189
- try:
190
- # Check if bundle needs to be uploaded
191
- bundle_sha, needs_upload = self._check_bundle_status(env)
192
-
193
- if needs_upload:
194
- # Need to upload bundle to S3
195
- logger.info(f"Uploading bundle {bundle_sha[:8]}... for {self.key}")
196
- bundle_data, _ = self._get_or_create_bundle()
197
-
198
- response = env.execute_verifier_remote(
199
- bundle_data=bundle_data,
200
- bundle_sha=bundle_sha,
201
- key=self.key,
202
- function_name=self.func.__name__,
203
- args=args,
204
- args_array=args_array,
205
- kwargs=kwargs,
206
- needs_upload=True,
207
- )
167
+ response = self.remote_with_response(env, *args, **kwargs)
208
168
 
209
- # Mark as uploaded after successful execution
210
- _uploaded_bundle_shas.add(bundle_sha)
211
- logger.debug(f"Registered bundle {bundle_sha[:8]}... as uploaded")
212
-
213
- else:
214
- # Bundle already available - execute without upload
215
- logger.info(
216
- f"Executing cached bundle {bundle_sha[:8]}... for {self.key}"
217
- )
218
- bundle_data, _ = self._get_or_create_bundle()
219
-
220
- response = env.execute_verifier_remote(
221
- bundle_data=bundle_data or b"", # Empty if using server-side bundle
222
- bundle_sha=bundle_sha,
223
- key=self.key,
224
- function_name=self.func.__name__,
225
- args=args,
226
- args_array=args_array,
227
- kwargs=kwargs,
228
- needs_upload=False, # Don't upload, just execute
229
- )
230
-
231
- # Handle response
232
- if response.stdout:
233
- print(response.stdout)
234
- if response.success:
235
- return self._process_result(response.result)
236
- else:
237
- self._raise_remote_error(response.error)
238
-
239
- except Exception as e:
240
- logger.error(f"Remote execution failed for {self.key}: {e}")
241
- # If it's an HTTP error, try to get more details
242
- if hasattr(e, "response") and hasattr(e.response, "text"):
243
- logger.error(f"Server response: {e.response.text}")
244
- raise
169
+ # Handle response
170
+ if response.stdout:
171
+ print(response.stdout)
172
+ if response.success:
173
+ return self._process_result(response.result)
174
+ else:
175
+ self._raise_remote_error(response.error)
245
176
 
246
177
  def _process_result(self, result: Any) -> float:
247
178
  """Process remote execution result, handling different return types."""
@@ -281,7 +212,7 @@ Remote traceback:
281
212
  try:
282
213
  exception_class = getattr(__builtins__, error_type, RuntimeError)
283
214
  raise exception_class(full_message)
284
- except:
215
+ except Exception:
285
216
  raise RuntimeError(full_message)
286
217
 
287
218
  def _get_env_id(self, env: "SyncEnv") -> str:
@@ -304,6 +235,74 @@ Remote traceback:
304
235
  or "not found" in error_msg
305
236
  )
306
237
 
238
+ def remote_with_response(
239
+ self, env: "SyncEnv", *args, **kwargs
240
+ ) -> "VerifiersExecuteResponse":
241
+ """Remote execution of the verifier function that returns the full response model."""
242
+ args_array = list(args)
243
+ args_array.append({"env": env.instance_id})
244
+ args = tuple(args_array)
245
+
246
+ try:
247
+ # Check if bundle needs to be uploaded
248
+ bundle_sha, needs_upload = self._check_bundle_status(env)
249
+
250
+ if needs_upload:
251
+ # Need to upload bundle to S3
252
+ logger.info(f"Uploading bundle {bundle_sha[:8]}... for {self.key}")
253
+ bundle_data, _ = self._get_or_create_bundle()
254
+
255
+ response = env.execute_verifier_remote(
256
+ bundle_data=bundle_data,
257
+ bundle_sha=bundle_sha,
258
+ key=self.key,
259
+ function_name=self.func.__name__,
260
+ args=args,
261
+ args_array=args_array,
262
+ kwargs=kwargs,
263
+ needs_upload=True,
264
+ )
265
+
266
+ logger.debug(f"Bundle {bundle_sha[:8]}... uploaded successfully")
267
+ return response
268
+
269
+ else:
270
+ # Bundle already available - execute without upload
271
+ logger.info(f"Bundle {bundle_sha[:8]}... already cached for {self.key}")
272
+ response = env.execute_verifier_remote(
273
+ bundle_data=b"", # Empty bundle since it's cached
274
+ bundle_sha=bundle_sha,
275
+ key=self.key,
276
+ function_name=self.func.__name__,
277
+ args=args,
278
+ args_array=args_array,
279
+ kwargs=kwargs,
280
+ needs_upload=False,
281
+ )
282
+ return response
283
+
284
+ except Exception as e:
285
+ # Check if error indicates bundle not found and retry with upload
286
+ if self._is_bundle_not_found_error(e) and not needs_upload:
287
+ logger.info(
288
+ f"Bundle {bundle_sha[:8]}... not found on server, uploading..."
289
+ )
290
+ bundle_data, _ = self._get_or_create_bundle()
291
+ response = env.execute_verifier_remote(
292
+ bundle_data=bundle_data,
293
+ bundle_sha=bundle_sha,
294
+ key=self.key,
295
+ function_name=self.func.__name__,
296
+ args=args,
297
+ args_array=args_array,
298
+ kwargs=kwargs,
299
+ needs_upload=True,
300
+ )
301
+ return response
302
+ else:
303
+ logger.error(f"Error in remote execution of {self.key}: {e}")
304
+ raise
305
+
307
306
 
308
307
  def verifier(
309
308
  key: Optional[str] = None,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fleet-python
3
- Version: 0.2.45
3
+ Version: 0.2.47
4
4
  Summary: Python SDK for Fleet environments
5
5
  Author-email: Fleet AI <nic@fleet.so>
6
6
  License: Apache-2.0
@@ -21,20 +21,20 @@ examples/quickstart.py,sha256=1VT39IRRhemsJgxi0O0gprdpcw7HB4pYO97GAYagIcg,3788
21
21
  examples/test_cdp_logging.py,sha256=AkCwQCgOTQEI8w3v0knWK_4eXMph7L9x07wj9yIYM10,2836
22
22
  fleet/__init__.py,sha256=Mkdeh45N47lnSv73Eehj92cGU-AImUitvDWJLFhEp0Y,3844
23
23
  fleet/base.py,sha256=bc-340sTpq_DJs7yQ9d2pDWnmJFmA1SwDB9Lagvqtb4,9182
24
- fleet/client.py,sha256=TOSHgaYUduK6g0vrmVnbisdJ5EPH8bNQXYSZrzgjgv0,27038
24
+ fleet/client.py,sha256=Nk7XQS1-sVMkRWh99ZziHVEQ3ta67901oQd6tNZGi8I,27131
25
25
  fleet/config.py,sha256=uY02ZKxVoXqVDta-0IMWaYJeE1CTXF_fA9NI6QUutmU,319
26
26
  fleet/exceptions.py,sha256=fUmPwWhnT8SR97lYsRq0kLHQHKtSh2eJS0VQ2caSzEI,5055
27
27
  fleet/global_client.py,sha256=frrDAFNM2ywN0JHLtlm9qbE1dQpnQJsavJpb7xSR_bU,1072
28
- fleet/models.py,sha256=WAiRXa68aXSVbCqmQMn36n9cSlls6YsicV6BbyoeiYQ,12750
29
- fleet/tasks.py,sha256=3VKk3g1gXXhy-N9QnDq2eTdNK9rJENn_N6_TKZU-uvY,9634
28
+ fleet/models.py,sha256=_Gy2wyHM5eUYP3-nOuxBUNTxI8AtteKX0_-jcVtS-os,12988
29
+ fleet/tasks.py,sha256=1QLbjejJLZc5rkwpHBh3TS4X10Gc3t1pZIZCs005_3Q,12024
30
30
  fleet/types.py,sha256=L4Y82xICf1tzyCLqhLYUgEoaIIS5h9T05TyFNHSWs3s,652
31
31
  fleet/_async/__init__.py,sha256=7C_JaEHoqZ4cddsCmlJ4z-UaU6Kr2CBZSgwx5B6fqnc,6765
32
32
  fleet/_async/base.py,sha256=oisVTQsx0M_yTmyQJc3oij63uKZ97MHz-xYFsWXxQE8,9202
33
33
  fleet/_async/client.py,sha256=ub6YGR8tBIEob3z5u8F0x0fktUYiVet87RN8iqJiY2I,27306
34
34
  fleet/_async/exceptions.py,sha256=fUmPwWhnT8SR97lYsRq0kLHQHKtSh2eJS0VQ2caSzEI,5055
35
35
  fleet/_async/global_client.py,sha256=4WskpLHbsDEgWW7hXMD09W-brkp4euy8w2ZJ88594rQ,1103
36
- fleet/_async/models.py,sha256=li5Cii7ASUHCFMFeJIMklyicYczqPez768RxO0Q0F2o,12618
37
- fleet/_async/tasks.py,sha256=TAS7IkSsS8rTW34blTaEPuWzuBqe8NezLpz1ozjP82M,9416
36
+ fleet/_async/models.py,sha256=GX-sRciZDenW2O7Qx9w_ftOkJyE4ph1-92WMq6lynHE,12856
37
+ fleet/_async/tasks.py,sha256=968pI3Ce2Kl83SJO6YeSA7SKVKSl9lRAd7TN1bACoXA,11906
38
38
  fleet/_async/env/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
39
  fleet/_async/env/client.py,sha256=8dS42VvSgdqfuh96l6cyiLZlKElilmfTeRSZ4LZnFuE,1143
40
40
  fleet/_async/instance/__init__.py,sha256=PtmJq8J8bh0SOQ2V55QURz5GJfobozwtQoqhaOk3_tI,515
@@ -47,7 +47,7 @@ fleet/_async/resources/mcp.py,sha256=TLEsLiFhfVfZFs0Fu_uDPm-h4FPdvqgQblYqs-PTHhc
47
47
  fleet/_async/resources/sqlite.py,sha256=0B6mI8Ad4-pxITMxlBST5RFcG6kqAqwnHW9PPAcpVLk,26185
48
48
  fleet/_async/verifiers/__init__.py,sha256=1WTlCNq4tIFbbXaQu5Bf2WppZq0A8suhtZbxMTSOwxI,465
49
49
  fleet/_async/verifiers/bundler.py,sha256=Sq0KkqEhM5Ng2x8R6Z4puXvQ8FMlEO7D3-ldBLktPi4,26205
50
- fleet/_async/verifiers/verifier.py,sha256=pfJFuQ4WsA71AowEESd-sWQUjCVWmG5PX2-GAxDm2-8,14350
50
+ fleet/_async/verifiers/verifier.py,sha256=IiHX028s6ux0kb2FR0Z5zJangl_IDh6cemXsUN2ktUU,14152
51
51
  fleet/env/__init__.py,sha256=cS9zCYobM5jypppDMZIQMYd6hOg5f4sgqRXEQ67pckk,676
52
52
  fleet/env/client.py,sha256=imF47xJG4JeihcZw4Y-_fXz4XxS-OgIkzUK-TLjpeJY,977
53
53
  fleet/instance/__init__.py,sha256=CyWUkbGAK-DBPw4DC4AnCW-MqqheGhZMA5QSRVu-ws4,479
@@ -66,11 +66,11 @@ fleet/verifiers/db.py,sha256=LAh1HambBInH_D9q9E2Z41YNkCOI9JJfpWPFqztjpfQ,27922
66
66
  fleet/verifiers/decorator.py,sha256=nAP3O8szXu7md_kpwpz91hGSUNEVLYjwZQZTkQlV1DM,3260
67
67
  fleet/verifiers/parse.py,sha256=IaROVGmtmilsHQp2sMoJUJcB7tATLLsoHdF0TTWcoC0,8541
68
68
  fleet/verifiers/sql_differ.py,sha256=TqTLWyK3uOyLbitT6HYzYEzuSFC39wcyhgk3rcm__k8,6525
69
- fleet/verifiers/verifier.py,sha256=WViDlhEU9OEwWBjp7fvIW0-HltpuFcuXfzGazQnJUuw,14380
70
- fleet_python-0.2.45.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
69
+ fleet/verifiers/verifier.py,sha256=_lcxXVm8e0xRrK2gNJy9up7pW1zOkPRY5n5lQ85S8jg,14197
70
+ fleet_python-0.2.47.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
71
71
  scripts/fix_sync_imports.py,sha256=X9fWLTpiPGkSHsjyQUDepOJkxOqw1DPj7nd8wFlFqLQ,8368
72
72
  scripts/unasync.py,sha256=vWVQxRWX8SRZO5cmzEhpvnG_REhCWXpidIGIpWmEcvI,696
73
- fleet_python-0.2.45.dist-info/METADATA,sha256=po9K7F5ILTQzLIR95WWM1l7seIpd68Mby99ggmdkgk0,3304
74
- fleet_python-0.2.45.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
75
- fleet_python-0.2.45.dist-info/top_level.txt,sha256=_3DSmTohvSDf3AIP_BYfGzhwO1ECFwuzg83X-wHCx3Y,23
76
- fleet_python-0.2.45.dist-info/RECORD,,
73
+ fleet_python-0.2.47.dist-info/METADATA,sha256=QLRHeDWIHUFE9aJ4wtRSImYpKou3AnoTJdd871U1mlg,3304
74
+ fleet_python-0.2.47.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
75
+ fleet_python-0.2.47.dist-info/top_level.txt,sha256=_3DSmTohvSDf3AIP_BYfGzhwO1ECFwuzg83X-wHCx3Y,23
76
+ fleet_python-0.2.47.dist-info/RECORD,,