fleet-python 0.2.62__tar.gz → 0.2.64__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fleet-python might be problematic. Click here for more details.
- {fleet_python-0.2.62/fleet_python.egg-info → fleet_python-0.2.64}/PKG-INFO +1 -1
- fleet_python-0.2.64/examples/import_tasks.py +313 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/__init__.py +11 -1
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/client.py +2 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/env/client.py +2 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/models.py +1 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/tasks.py +16 -3
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/client.py +2 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/env/client.py +2 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/models.py +1 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/tasks.py +18 -3
- {fleet_python-0.2.62 → fleet_python-0.2.64/fleet_python.egg-info}/PKG-INFO +1 -1
- {fleet_python-0.2.62 → fleet_python-0.2.64}/pyproject.toml +1 -1
- fleet_python-0.2.62/examples/import_tasks.py +0 -102
- {fleet_python-0.2.62 → fleet_python-0.2.64}/LICENSE +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/README.md +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/diff_example.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/dsl_example.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/example.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/exampleResume.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/example_account.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/example_action_log.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/example_client.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/example_mcp_anthropic.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/example_mcp_openai.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/example_sync.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/example_task.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/example_tasks.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/example_verifier.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/export_tasks.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/gemini_example.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/json_tasks_example.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/nova_act_example.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/openai_example.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/openai_simple_example.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/query_builder_example.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/quickstart.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/examples/test_cdp_logging.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/__init__.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/base.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/env/__init__.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/exceptions.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/global_client.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/instance/__init__.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/instance/base.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/instance/client.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/resources/__init__.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/resources/base.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/resources/browser.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/resources/mcp.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/resources/sqlite.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/verifiers/__init__.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/verifiers/bundler.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/_async/verifiers/verifier.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/base.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/config.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/env/__init__.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/exceptions.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/global_client.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/instance/__init__.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/instance/base.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/instance/client.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/instance/models.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/resources/__init__.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/resources/base.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/resources/browser.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/resources/mcp.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/resources/sqlite.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/types.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/verifiers/__init__.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/verifiers/bundler.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/verifiers/code.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/verifiers/db.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/verifiers/decorator.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/verifiers/parse.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/verifiers/sql_differ.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet/verifiers/verifier.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet_python.egg-info/SOURCES.txt +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet_python.egg-info/dependency_links.txt +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet_python.egg-info/requires.txt +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/fleet_python.egg-info/top_level.txt +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/scripts/fix_sync_imports.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/scripts/unasync.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/setup.cfg +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/tests/__init__.py +0 -0
- {fleet_python-0.2.62 → fleet_python-0.2.64}/tests/test_verifier_from_string.py +0 -0
|
@@ -0,0 +1,313 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import argparse
|
|
3
|
+
import json
|
|
4
|
+
import sys
|
|
5
|
+
from collections import defaultdict
|
|
6
|
+
from typing import Dict, List, Tuple
|
|
7
|
+
import fleet
|
|
8
|
+
from fleet._async.tasks import Task
|
|
9
|
+
from dotenv import load_dotenv
|
|
10
|
+
|
|
11
|
+
load_dotenv()
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
async def run_verifier_sanity_check(
|
|
15
|
+
tasks: List[Task],
|
|
16
|
+
client: fleet.AsyncFleet,
|
|
17
|
+
) -> Tuple[bool, Dict[str, str]]:
|
|
18
|
+
"""
|
|
19
|
+
Run sanity check by spinning up instances and running verifiers.
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
tasks: List of Task objects to verify
|
|
23
|
+
client: AsyncFleet client instance
|
|
24
|
+
|
|
25
|
+
Returns:
|
|
26
|
+
Tuple of (all_passed, error_dict) where error_dict maps task_key to error message
|
|
27
|
+
"""
|
|
28
|
+
print("\n" + "=" * 60)
|
|
29
|
+
print("Running verifier sanity check...")
|
|
30
|
+
print("=" * 60)
|
|
31
|
+
|
|
32
|
+
# Group tasks by env_key×env_version×data_key×data_version
|
|
33
|
+
instance_groups = defaultdict(list)
|
|
34
|
+
for task in tasks:
|
|
35
|
+
# Build the instance key
|
|
36
|
+
env_key = task.env_id or ""
|
|
37
|
+
env_version = task.version or ""
|
|
38
|
+
data_key = task.data_id or ""
|
|
39
|
+
data_version = task.data_version or ""
|
|
40
|
+
|
|
41
|
+
instance_key = f"{env_key}×{env_version}×{data_key}×{data_version}"
|
|
42
|
+
instance_groups[instance_key].append(task)
|
|
43
|
+
|
|
44
|
+
print(f"\nFound {len(instance_groups)} unique environment/data combinations:")
|
|
45
|
+
for instance_key, group_tasks in instance_groups.items():
|
|
46
|
+
print(f" {instance_key}: {len(group_tasks)} task(s)")
|
|
47
|
+
|
|
48
|
+
# Create all instances in parallel
|
|
49
|
+
print(f"\nCreating {len(instance_groups)} instance(s) in parallel...")
|
|
50
|
+
instance_map = {}
|
|
51
|
+
|
|
52
|
+
async def create_instance(instance_key: str) -> Tuple[str, object]:
|
|
53
|
+
"""Create a single instance."""
|
|
54
|
+
try:
|
|
55
|
+
env_key, env_version, data_key, data_version = instance_key.split("×")
|
|
56
|
+
|
|
57
|
+
# Build env_key_str and data_key_str
|
|
58
|
+
if env_version:
|
|
59
|
+
env_key_str = f"{env_key}:{env_version}"
|
|
60
|
+
else:
|
|
61
|
+
env_key_str = env_key
|
|
62
|
+
|
|
63
|
+
if data_key and data_version:
|
|
64
|
+
data_key_str = f"{data_key}:{data_version}"
|
|
65
|
+
elif data_key:
|
|
66
|
+
data_key_str = data_key
|
|
67
|
+
else:
|
|
68
|
+
data_key_str = None
|
|
69
|
+
|
|
70
|
+
print(
|
|
71
|
+
f" Creating instance: {env_key_str}"
|
|
72
|
+
+ (f" with data {data_key_str}" if data_key_str else "")
|
|
73
|
+
)
|
|
74
|
+
env = await client.make(env_key=env_key_str, data_key=data_key_str)
|
|
75
|
+
return instance_key, env
|
|
76
|
+
except Exception as e:
|
|
77
|
+
print(f" ✗ Failed to create instance for {instance_key}: {e}")
|
|
78
|
+
return instance_key, None
|
|
79
|
+
|
|
80
|
+
# Create instances concurrently
|
|
81
|
+
instance_results = await asyncio.gather(
|
|
82
|
+
*[create_instance(key) for key in instance_groups.keys()],
|
|
83
|
+
return_exceptions=True,
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
for result in instance_results:
|
|
87
|
+
if isinstance(result, Exception):
|
|
88
|
+
print(f" ✗ Exception creating instance: {result}")
|
|
89
|
+
return False, {"__instance_creation__": str(result)}
|
|
90
|
+
instance_key, env = result
|
|
91
|
+
if env is None:
|
|
92
|
+
return False, {instance_key: "Failed to create instance"}
|
|
93
|
+
instance_map[instance_key] = env
|
|
94
|
+
|
|
95
|
+
print(f"✓ Created {len(instance_map)} instance(s)")
|
|
96
|
+
|
|
97
|
+
# Run all verifiers in parallel with concurrency limit
|
|
98
|
+
max_concurrent_verifiers = 5 # Limit concurrent verifier executions
|
|
99
|
+
print(
|
|
100
|
+
f"\nRunning {len(tasks)} verifier(s) in parallel (max {max_concurrent_verifiers} concurrent)..."
|
|
101
|
+
)
|
|
102
|
+
errors = {}
|
|
103
|
+
semaphore = asyncio.Semaphore(max_concurrent_verifiers)
|
|
104
|
+
|
|
105
|
+
async def run_single_verifier(task, instance_key: str) -> Tuple[str, bool, str]:
|
|
106
|
+
"""Run a single verifier and return (task_key, success, error_message)."""
|
|
107
|
+
async with semaphore:
|
|
108
|
+
try:
|
|
109
|
+
env = instance_map[instance_key]
|
|
110
|
+
task_key = task.key
|
|
111
|
+
|
|
112
|
+
# Run the verifier
|
|
113
|
+
if task.verifier is None:
|
|
114
|
+
return task_key, False, "No verifier found"
|
|
115
|
+
|
|
116
|
+
result = await task.verify_async(env)
|
|
117
|
+
|
|
118
|
+
# For sanity check: we expect verifiers to return 0.0 (TASK_FAILED_SCORE)
|
|
119
|
+
# since we're running on fresh instances with no task completion.
|
|
120
|
+
# This confirms the verifier runs without errors.
|
|
121
|
+
if isinstance(result, float):
|
|
122
|
+
if result == 0.0:
|
|
123
|
+
print(f" ✓ {task_key}: {result:.2f} (correctly returns 0.0)")
|
|
124
|
+
return task_key, True, ""
|
|
125
|
+
else:
|
|
126
|
+
print(
|
|
127
|
+
f" ⚠ {task_key}: {result:.2f} (expected 0.0 on fresh instance)"
|
|
128
|
+
)
|
|
129
|
+
return (
|
|
130
|
+
task_key,
|
|
131
|
+
False,
|
|
132
|
+
f"Expected 0.0 but got {result:.2f} on fresh instance",
|
|
133
|
+
)
|
|
134
|
+
else:
|
|
135
|
+
# Non-float result - verifier ran but didn't return expected type
|
|
136
|
+
print(f" ⚠ {task_key}: {result} (expected float 0.0)")
|
|
137
|
+
return (
|
|
138
|
+
task_key,
|
|
139
|
+
False,
|
|
140
|
+
f"Expected float 0.0 but got {type(result).__name__}: {result}",
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
except Exception as e:
|
|
144
|
+
task_key = task.key
|
|
145
|
+
error_msg = f"{type(e).__name__}: {str(e)}"
|
|
146
|
+
print(f" ✗ {task_key}: {error_msg}")
|
|
147
|
+
return task_key, False, error_msg
|
|
148
|
+
|
|
149
|
+
# Run verifiers concurrently with semaphore
|
|
150
|
+
verifier_results = await asyncio.gather(
|
|
151
|
+
*[
|
|
152
|
+
run_single_verifier(task, instance_key)
|
|
153
|
+
for instance_key, group_tasks in instance_groups.items()
|
|
154
|
+
for task in group_tasks
|
|
155
|
+
],
|
|
156
|
+
return_exceptions=True,
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
# Process results
|
|
160
|
+
for result in verifier_results:
|
|
161
|
+
if isinstance(result, Exception):
|
|
162
|
+
print(f" ✗ Exception running verifier: {result}")
|
|
163
|
+
errors["__verifier_exception__"] = str(result)
|
|
164
|
+
else:
|
|
165
|
+
task_key, success, error_msg = result
|
|
166
|
+
if not success:
|
|
167
|
+
errors[task_key] = error_msg
|
|
168
|
+
|
|
169
|
+
# Clean up instances
|
|
170
|
+
print(f"\nCleaning up {len(instance_map)} instance(s)...")
|
|
171
|
+
cleanup_tasks = [env.close() for env in instance_map.values()]
|
|
172
|
+
await asyncio.gather(*cleanup_tasks, return_exceptions=True)
|
|
173
|
+
print("✓ Cleanup complete")
|
|
174
|
+
|
|
175
|
+
# Summary
|
|
176
|
+
passed_count = len(tasks) - len(errors)
|
|
177
|
+
print("\n" + "=" * 60)
|
|
178
|
+
print(f"Sanity check complete: {passed_count}/{len(tasks)} passed")
|
|
179
|
+
|
|
180
|
+
if errors:
|
|
181
|
+
print(f"\n✗ {len(errors)} verifier(s) failed:")
|
|
182
|
+
for task_key, error_msg in list(errors.items())[:10]:
|
|
183
|
+
print(f" - {task_key}: {error_msg}")
|
|
184
|
+
if len(errors) > 10:
|
|
185
|
+
print(f" ... and {len(errors) - 10} more")
|
|
186
|
+
print("\nFix the verifiers and try again.")
|
|
187
|
+
return False, errors
|
|
188
|
+
else:
|
|
189
|
+
print("✓ All verifiers passed!")
|
|
190
|
+
return True, {}
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
async def main():
|
|
194
|
+
parser = argparse.ArgumentParser(description="Import tasks from a JSON file")
|
|
195
|
+
parser.add_argument("json_file", help="Path to the JSON file containing tasks")
|
|
196
|
+
parser.add_argument(
|
|
197
|
+
"--project-key",
|
|
198
|
+
"-p",
|
|
199
|
+
help="Optional project key to associate with the tasks",
|
|
200
|
+
default=None,
|
|
201
|
+
)
|
|
202
|
+
parser.add_argument(
|
|
203
|
+
"--yes",
|
|
204
|
+
"-y",
|
|
205
|
+
action="store_true",
|
|
206
|
+
help="Skip confirmation prompt and import automatically",
|
|
207
|
+
)
|
|
208
|
+
parser.add_argument(
|
|
209
|
+
"--skip-sanity-check",
|
|
210
|
+
action="store_true",
|
|
211
|
+
help="Skip the verifier sanity check (not recommended)",
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
args = parser.parse_args()
|
|
215
|
+
|
|
216
|
+
# Load and parse the JSON file
|
|
217
|
+
try:
|
|
218
|
+
with open(args.json_file, "r", encoding="utf-8") as f:
|
|
219
|
+
tasks_data = json.load(f)
|
|
220
|
+
except FileNotFoundError:
|
|
221
|
+
print(f"Error: File '{args.json_file}' not found")
|
|
222
|
+
sys.exit(1)
|
|
223
|
+
except json.JSONDecodeError as e:
|
|
224
|
+
print(f"Error: Invalid JSON in '{args.json_file}': {e}")
|
|
225
|
+
sys.exit(1)
|
|
226
|
+
|
|
227
|
+
# Extract task information and validate verifier_func
|
|
228
|
+
task_count = len(tasks_data)
|
|
229
|
+
task_keys = []
|
|
230
|
+
missing_verifier = []
|
|
231
|
+
for task_data in tasks_data:
|
|
232
|
+
task_key = task_data.get("key") or task_data.get("id")
|
|
233
|
+
if task_key:
|
|
234
|
+
task_keys.append(task_key)
|
|
235
|
+
else:
|
|
236
|
+
task_keys.append("(no key)")
|
|
237
|
+
|
|
238
|
+
# Check for verifier_func
|
|
239
|
+
verifier_code = task_data.get("verifier_func") or task_data.get("verifier_code")
|
|
240
|
+
if not verifier_code:
|
|
241
|
+
missing_verifier.append(task_key or "(no key)")
|
|
242
|
+
|
|
243
|
+
# Validate all tasks have verifier_func
|
|
244
|
+
if missing_verifier:
|
|
245
|
+
print(f"✗ Error: {len(missing_verifier)} task(s) missing verifier_func:")
|
|
246
|
+
for key in missing_verifier[:10]: # Show first 10
|
|
247
|
+
print(f" - {key}")
|
|
248
|
+
if len(missing_verifier) > 10:
|
|
249
|
+
print(f" ... and {len(missing_verifier) - 10} more")
|
|
250
|
+
print("\nAll tasks must have a verifier_func to be imported.")
|
|
251
|
+
sys.exit(1)
|
|
252
|
+
|
|
253
|
+
# Get account info
|
|
254
|
+
account = await fleet.env.account_async()
|
|
255
|
+
|
|
256
|
+
# Print summary
|
|
257
|
+
print(f"Importing to team: {account.team_name}")
|
|
258
|
+
print(f"\nFound {task_count} task(s) in '{args.json_file}':")
|
|
259
|
+
print("\nTask keys:")
|
|
260
|
+
for i, key in enumerate(task_keys, 1):
|
|
261
|
+
print(f" {i}. {key}")
|
|
262
|
+
|
|
263
|
+
if args.project_key:
|
|
264
|
+
print(f"\nProject key: {args.project_key}")
|
|
265
|
+
else:
|
|
266
|
+
print("\nProject key: (none)")
|
|
267
|
+
|
|
268
|
+
# Load tasks as Task objects
|
|
269
|
+
client = fleet.AsyncFleet()
|
|
270
|
+
tasks = []
|
|
271
|
+
print("\nLoading tasks...")
|
|
272
|
+
for task_data in tasks_data:
|
|
273
|
+
try:
|
|
274
|
+
task = await client.load_task_from_json(
|
|
275
|
+
task_data, raise_on_verifier_error=True
|
|
276
|
+
)
|
|
277
|
+
tasks.append(task)
|
|
278
|
+
except Exception as e:
|
|
279
|
+
task_key = task_data.get("key") or task_data.get("id", "unknown")
|
|
280
|
+
print(f"✗ Failed to load task {task_key}: {e}")
|
|
281
|
+
sys.exit(1)
|
|
282
|
+
print(f"✓ Loaded {len(tasks)} tasks")
|
|
283
|
+
|
|
284
|
+
# Run sanity check (unless skipped)
|
|
285
|
+
if not args.skip_sanity_check:
|
|
286
|
+
success, errors = await run_verifier_sanity_check(tasks, client)
|
|
287
|
+
if not success:
|
|
288
|
+
sys.exit(1)
|
|
289
|
+
else:
|
|
290
|
+
print("\n⚠️ Skipping sanity check (--skip-sanity-check)")
|
|
291
|
+
|
|
292
|
+
# Confirmation prompt (unless --yes flag is provided)
|
|
293
|
+
if not args.yes:
|
|
294
|
+
print("\n" + "=" * 60)
|
|
295
|
+
response = input("Type 'YES' to proceed with import: ")
|
|
296
|
+
if response != "YES":
|
|
297
|
+
print("Import cancelled.")
|
|
298
|
+
sys.exit(0)
|
|
299
|
+
|
|
300
|
+
# Import tasks
|
|
301
|
+
print("\nImporting tasks...")
|
|
302
|
+
try:
|
|
303
|
+
results = await fleet.import_tasks_async(
|
|
304
|
+
args.json_file, project_key=args.project_key
|
|
305
|
+
)
|
|
306
|
+
print(f"\n✓ Successfully imported {len(results)} task(s)")
|
|
307
|
+
except Exception as e:
|
|
308
|
+
print(f"\n✗ Error importing tasks: {e}")
|
|
309
|
+
sys.exit(1)
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
if __name__ == "__main__":
|
|
313
|
+
asyncio.run(main())
|
|
@@ -112,8 +112,11 @@ async def environment(env_key: str) -> Environment:
|
|
|
112
112
|
|
|
113
113
|
async def make(
|
|
114
114
|
env_key: str,
|
|
115
|
+
data_key: Optional[str] = None,
|
|
115
116
|
region: Optional[str] = None,
|
|
116
117
|
env_variables: Optional[Dict[str, Any]] = None,
|
|
118
|
+
image_type: Optional[str] = None,
|
|
119
|
+
ttl_seconds: Optional[int] = None,
|
|
117
120
|
) -> AsyncEnv:
|
|
118
121
|
"""Create a new environment instance.
|
|
119
122
|
|
|
@@ -121,7 +124,14 @@ async def make(
|
|
|
121
124
|
env = await fleet.make("fira")
|
|
122
125
|
env_with_vars = await fleet.make("fira", env_variables={"LOGGED_IN_NAME": "Alice"})
|
|
123
126
|
"""
|
|
124
|
-
return await _async_global_client.get_client().make(
|
|
127
|
+
return await _async_global_client.get_client().make(
|
|
128
|
+
env_key,
|
|
129
|
+
data_key=data_key,
|
|
130
|
+
region=region,
|
|
131
|
+
env_variables=env_variables,
|
|
132
|
+
image_type=image_type,
|
|
133
|
+
ttl_seconds=ttl_seconds,
|
|
134
|
+
)
|
|
125
135
|
|
|
126
136
|
|
|
127
137
|
async def make_for_task(task: Task) -> AsyncEnv:
|
|
@@ -211,6 +211,7 @@ class AsyncFleet:
|
|
|
211
211
|
region: Optional[str] = None,
|
|
212
212
|
env_variables: Optional[Dict[str, Any]] = None,
|
|
213
213
|
image_type: Optional[str] = None,
|
|
214
|
+
ttl_seconds: Optional[int] = None,
|
|
214
215
|
) -> AsyncEnv:
|
|
215
216
|
if ":" in env_key:
|
|
216
217
|
env_key_part, env_version = env_key.split(":", 1)
|
|
@@ -245,6 +246,7 @@ class AsyncFleet:
|
|
|
245
246
|
env_variables=env_variables,
|
|
246
247
|
image_type=image_type,
|
|
247
248
|
created_from="sdk",
|
|
249
|
+
ttl_seconds=ttl_seconds,
|
|
248
250
|
)
|
|
249
251
|
|
|
250
252
|
# Only use region-specific base URL if no custom base URL is set
|
|
@@ -9,6 +9,7 @@ async def make_async(
|
|
|
9
9
|
region: Optional[str] = None,
|
|
10
10
|
env_variables: Optional[Dict[str, Any]] = None,
|
|
11
11
|
image_type: Optional[str] = None,
|
|
12
|
+
ttl_seconds: Optional[int] = None,
|
|
12
13
|
) -> AsyncEnv:
|
|
13
14
|
return await AsyncFleet().make(
|
|
14
15
|
env_key,
|
|
@@ -16,6 +17,7 @@ async def make_async(
|
|
|
16
17
|
region=region,
|
|
17
18
|
env_variables=env_variables,
|
|
18
19
|
image_type=image_type,
|
|
20
|
+
ttl_seconds=ttl_seconds,
|
|
19
21
|
)
|
|
20
22
|
|
|
21
23
|
|
|
@@ -66,6 +66,7 @@ class InstanceRequest(BaseModel):
|
|
|
66
66
|
force_pull: Optional[bool] = Field(None, title="Force Pull")
|
|
67
67
|
env_variables: Optional[Dict[str, Any]] = Field(None, title="Env Variables")
|
|
68
68
|
created_from: Optional[str] = Field(None, title="Created From")
|
|
69
|
+
ttl_seconds: Optional[int] = Field(None, title="TTL Seconds")
|
|
69
70
|
|
|
70
71
|
|
|
71
72
|
class InstanceStatus(Enum):
|
|
@@ -222,7 +222,10 @@ class Task(BaseModel):
|
|
|
222
222
|
return await AsyncFleet().make(env_key=self.env_key, region=region)
|
|
223
223
|
|
|
224
224
|
async def make(
|
|
225
|
-
self,
|
|
225
|
+
self,
|
|
226
|
+
region: Optional[str] = None,
|
|
227
|
+
image_type: Optional[str] = None,
|
|
228
|
+
ttl_seconds: Optional[int] = None,
|
|
226
229
|
):
|
|
227
230
|
"""Create an environment instance with task's configuration.
|
|
228
231
|
|
|
@@ -234,6 +237,7 @@ class Task(BaseModel):
|
|
|
234
237
|
Args:
|
|
235
238
|
region: Optional AWS region for the environment
|
|
236
239
|
image_type: Optional image type for the environment
|
|
240
|
+
ttl_seconds: Optional TTL in seconds for the instance
|
|
237
241
|
|
|
238
242
|
Returns:
|
|
239
243
|
Environment instance configured for this task
|
|
@@ -255,6 +259,7 @@ class Task(BaseModel):
|
|
|
255
259
|
region=region,
|
|
256
260
|
env_variables=self.env_variables if self.env_variables else None,
|
|
257
261
|
image_type=image_type,
|
|
262
|
+
ttl_seconds=ttl_seconds,
|
|
258
263
|
)
|
|
259
264
|
|
|
260
265
|
|
|
@@ -274,10 +279,18 @@ def verifier_from_string(
|
|
|
274
279
|
"""
|
|
275
280
|
try:
|
|
276
281
|
import inspect
|
|
282
|
+
import re
|
|
277
283
|
from .verifiers.verifier import AsyncVerifierFunction
|
|
278
284
|
from fleet.verifiers.code import TASK_SUCCESSFUL_SCORE, TASK_FAILED_SCORE
|
|
279
285
|
from fleet.verifiers.db import IgnoreConfig
|
|
280
286
|
|
|
287
|
+
# Strip @verifier decorator if present to avoid double-wrapping
|
|
288
|
+
# Remove lines like: @verifier(key="...")
|
|
289
|
+
cleaned_code = re.sub(r'@verifier\([^)]*\)\s*\n', '', verifier_func)
|
|
290
|
+
# Also remove the verifier import if present
|
|
291
|
+
cleaned_code = re.sub(r'from fleet import.*verifier.*\n', '', cleaned_code)
|
|
292
|
+
cleaned_code = re.sub(r'import.*verifier.*\n', '', cleaned_code)
|
|
293
|
+
|
|
281
294
|
# Create a local namespace for executing the code
|
|
282
295
|
local_namespace = {
|
|
283
296
|
"TASK_SUCCESSFUL_SCORE": TASK_SUCCESSFUL_SCORE,
|
|
@@ -286,8 +299,8 @@ def verifier_from_string(
|
|
|
286
299
|
"Environment": object, # Add Environment type if needed
|
|
287
300
|
}
|
|
288
301
|
|
|
289
|
-
# Execute the verifier code in the namespace
|
|
290
|
-
exec(
|
|
302
|
+
# Execute the cleaned verifier code in the namespace
|
|
303
|
+
exec(cleaned_code, globals(), local_namespace)
|
|
291
304
|
|
|
292
305
|
# Find the function that was defined (not imported)
|
|
293
306
|
# Functions defined via exec have co_filename == '<string>'
|
|
@@ -211,6 +211,7 @@ class Fleet:
|
|
|
211
211
|
region: Optional[str] = None,
|
|
212
212
|
env_variables: Optional[Dict[str, Any]] = None,
|
|
213
213
|
image_type: Optional[str] = None,
|
|
214
|
+
ttl_seconds: Optional[int] = None,
|
|
214
215
|
) -> SyncEnv:
|
|
215
216
|
if ":" in env_key:
|
|
216
217
|
env_key_part, env_version = env_key.split(":", 1)
|
|
@@ -245,6 +246,7 @@ class Fleet:
|
|
|
245
246
|
env_variables=env_variables,
|
|
246
247
|
image_type=image_type,
|
|
247
248
|
created_from="sdk",
|
|
249
|
+
ttl_seconds=ttl_seconds,
|
|
248
250
|
)
|
|
249
251
|
|
|
250
252
|
# Only use region-specific base URL if no custom base URL is set
|
|
@@ -9,6 +9,7 @@ def make(
|
|
|
9
9
|
region: Optional[str] = None,
|
|
10
10
|
env_variables: Optional[Dict[str, Any]] = None,
|
|
11
11
|
image_type: Optional[str] = None,
|
|
12
|
+
ttl_seconds: Optional[int] = None,
|
|
12
13
|
) -> SyncEnv:
|
|
13
14
|
return Fleet().make(
|
|
14
15
|
env_key,
|
|
@@ -16,6 +17,7 @@ def make(
|
|
|
16
17
|
region=region,
|
|
17
18
|
env_variables=env_variables,
|
|
18
19
|
image_type=image_type,
|
|
20
|
+
ttl_seconds=ttl_seconds,
|
|
19
21
|
)
|
|
20
22
|
|
|
21
23
|
|
|
@@ -69,6 +69,7 @@ class InstanceRequest(BaseModel):
|
|
|
69
69
|
env_variables: Optional[Dict[str, Any]] = Field(None, title="Env Variables")
|
|
70
70
|
image_type: Optional[str] = Field(None, title="Image Type")
|
|
71
71
|
created_from: Optional[str] = Field(None, title="Created From")
|
|
72
|
+
ttl_seconds: Optional[int] = Field(None, title="TTL Seconds")
|
|
72
73
|
|
|
73
74
|
|
|
74
75
|
class InstanceStatus(Enum):
|
|
@@ -214,7 +214,12 @@ class Task(BaseModel):
|
|
|
214
214
|
|
|
215
215
|
return Fleet().make(env_key=self.env_key, region=region)
|
|
216
216
|
|
|
217
|
-
def make(
|
|
217
|
+
def make(
|
|
218
|
+
self,
|
|
219
|
+
region: Optional[str] = None,
|
|
220
|
+
image_type: Optional[str] = None,
|
|
221
|
+
ttl_seconds: Optional[int] = None,
|
|
222
|
+
):
|
|
218
223
|
"""Create an environment instance with task's configuration.
|
|
219
224
|
|
|
220
225
|
Auto-populates environment creation with:
|
|
@@ -225,6 +230,7 @@ class Task(BaseModel):
|
|
|
225
230
|
Args:
|
|
226
231
|
region: Optional AWS region for the environment
|
|
227
232
|
image_type: Optional image type for the environment
|
|
233
|
+
ttl_seconds: Optional TTL in seconds for the instance
|
|
228
234
|
|
|
229
235
|
Returns:
|
|
230
236
|
Environment instance configured for this task
|
|
@@ -246,6 +252,7 @@ class Task(BaseModel):
|
|
|
246
252
|
region=region,
|
|
247
253
|
env_variables=self.env_variables if self.env_variables else None,
|
|
248
254
|
image_type=image_type,
|
|
255
|
+
ttl_seconds=ttl_seconds,
|
|
249
256
|
)
|
|
250
257
|
|
|
251
258
|
|
|
@@ -265,10 +272,18 @@ def verifier_from_string(
|
|
|
265
272
|
"""
|
|
266
273
|
try:
|
|
267
274
|
import inspect
|
|
275
|
+
import re
|
|
268
276
|
from .verifiers import SyncVerifierFunction
|
|
269
277
|
from .verifiers.code import TASK_SUCCESSFUL_SCORE, TASK_FAILED_SCORE
|
|
270
278
|
from .verifiers.db import IgnoreConfig
|
|
271
279
|
|
|
280
|
+
# Strip @verifier decorator if present to avoid double-wrapping
|
|
281
|
+
# Remove lines like: @verifier(key="...")
|
|
282
|
+
cleaned_code = re.sub(r'@verifier\([^)]*\)\s*\n', '', verifier_func)
|
|
283
|
+
# Also remove the verifier import if present
|
|
284
|
+
cleaned_code = re.sub(r'from fleet import.*verifier.*\n', '', cleaned_code)
|
|
285
|
+
cleaned_code = re.sub(r'import.*verifier.*\n', '', cleaned_code)
|
|
286
|
+
|
|
272
287
|
# Create a globals namespace with all required imports
|
|
273
288
|
exec_globals = globals().copy()
|
|
274
289
|
exec_globals.update(
|
|
@@ -283,8 +298,8 @@ def verifier_from_string(
|
|
|
283
298
|
# Create a local namespace for executing the code
|
|
284
299
|
local_namespace = {}
|
|
285
300
|
|
|
286
|
-
# Execute the verifier code in the namespace
|
|
287
|
-
exec(
|
|
301
|
+
# Execute the cleaned verifier code in the namespace
|
|
302
|
+
exec(cleaned_code, exec_globals, local_namespace)
|
|
288
303
|
|
|
289
304
|
# Find the function that was defined (not imported)
|
|
290
305
|
# Functions defined via exec have co_filename == '<string>'
|
|
@@ -1,102 +0,0 @@
|
|
|
1
|
-
import asyncio
|
|
2
|
-
import argparse
|
|
3
|
-
import json
|
|
4
|
-
import sys
|
|
5
|
-
import fleet
|
|
6
|
-
from dotenv import load_dotenv
|
|
7
|
-
|
|
8
|
-
load_dotenv()
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
async def main():
|
|
12
|
-
parser = argparse.ArgumentParser(description="Import tasks from a JSON file")
|
|
13
|
-
parser.add_argument("json_file", help="Path to the JSON file containing tasks")
|
|
14
|
-
parser.add_argument(
|
|
15
|
-
"--project-key",
|
|
16
|
-
"-p",
|
|
17
|
-
help="Optional project key to associate with the tasks",
|
|
18
|
-
default=None,
|
|
19
|
-
)
|
|
20
|
-
parser.add_argument(
|
|
21
|
-
"--yes",
|
|
22
|
-
"-y",
|
|
23
|
-
action="store_true",
|
|
24
|
-
help="Skip confirmation prompt and import automatically",
|
|
25
|
-
)
|
|
26
|
-
|
|
27
|
-
args = parser.parse_args()
|
|
28
|
-
|
|
29
|
-
# Load and parse the JSON file
|
|
30
|
-
try:
|
|
31
|
-
with open(args.json_file, "r", encoding="utf-8") as f:
|
|
32
|
-
tasks_data = json.load(f)
|
|
33
|
-
except FileNotFoundError:
|
|
34
|
-
print(f"Error: File '{args.json_file}' not found")
|
|
35
|
-
sys.exit(1)
|
|
36
|
-
except json.JSONDecodeError as e:
|
|
37
|
-
print(f"Error: Invalid JSON in '{args.json_file}': {e}")
|
|
38
|
-
sys.exit(1)
|
|
39
|
-
|
|
40
|
-
# Extract task information and validate verifier_func
|
|
41
|
-
task_count = len(tasks_data)
|
|
42
|
-
task_keys = []
|
|
43
|
-
missing_verifier = []
|
|
44
|
-
for task_data in tasks_data:
|
|
45
|
-
task_key = task_data.get("key") or task_data.get("id")
|
|
46
|
-
if task_key:
|
|
47
|
-
task_keys.append(task_key)
|
|
48
|
-
else:
|
|
49
|
-
task_keys.append("(no key)")
|
|
50
|
-
|
|
51
|
-
# Check for verifier_func
|
|
52
|
-
verifier_code = task_data.get("verifier_func") or task_data.get("verifier_code")
|
|
53
|
-
if not verifier_code:
|
|
54
|
-
missing_verifier.append(task_key or "(no key)")
|
|
55
|
-
|
|
56
|
-
# Validate all tasks have verifier_func
|
|
57
|
-
if missing_verifier:
|
|
58
|
-
print(f"✗ Error: {len(missing_verifier)} task(s) missing verifier_func:")
|
|
59
|
-
for key in missing_verifier[:10]: # Show first 10
|
|
60
|
-
print(f" - {key}")
|
|
61
|
-
if len(missing_verifier) > 10:
|
|
62
|
-
print(f" ... and {len(missing_verifier) - 10} more")
|
|
63
|
-
print("\nAll tasks must have a verifier_func to be imported.")
|
|
64
|
-
sys.exit(1)
|
|
65
|
-
|
|
66
|
-
# Get account info
|
|
67
|
-
account = await fleet.env.account_async()
|
|
68
|
-
|
|
69
|
-
# Print summary
|
|
70
|
-
print(f"Importing to team: {account.team_name}")
|
|
71
|
-
print(f"\nFound {task_count} task(s) in '{args.json_file}':")
|
|
72
|
-
print("\nTask keys:")
|
|
73
|
-
for i, key in enumerate(task_keys, 1):
|
|
74
|
-
print(f" {i}. {key}")
|
|
75
|
-
|
|
76
|
-
if args.project_key:
|
|
77
|
-
print(f"\nProject key: {args.project_key}")
|
|
78
|
-
else:
|
|
79
|
-
print("\nProject key: (none)")
|
|
80
|
-
|
|
81
|
-
# Confirmation prompt (unless --yes flag is provided)
|
|
82
|
-
if not args.yes:
|
|
83
|
-
print("\n" + "=" * 60)
|
|
84
|
-
response = input("Type 'YES' to proceed with import: ")
|
|
85
|
-
if response != "YES":
|
|
86
|
-
print("Import cancelled.")
|
|
87
|
-
sys.exit(0)
|
|
88
|
-
|
|
89
|
-
# Import tasks
|
|
90
|
-
print("\nImporting tasks...")
|
|
91
|
-
try:
|
|
92
|
-
results = await fleet.import_tasks_async(
|
|
93
|
-
args.json_file, project_key=args.project_key
|
|
94
|
-
)
|
|
95
|
-
print(f"\n✓ Successfully imported {len(results)} task(s)")
|
|
96
|
-
except Exception as e:
|
|
97
|
-
print(f"\n✗ Error importing tasks: {e}")
|
|
98
|
-
sys.exit(1)
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
if __name__ == "__main__":
|
|
102
|
-
asyncio.run(main())
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|