fleet-python 0.2.69__py3-none-any.whl → 0.2.69b3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fleet-python might be problematic. Click here for more details.
- fleet/_async/base.py +3 -4
- fleet/_async/client.py +194 -38
- fleet/_async/instance/client.py +19 -4
- fleet/_async/resources/sqlite.py +150 -1
- fleet/_async/tasks.py +5 -2
- fleet/_async/verifiers/bundler.py +21 -22
- fleet/_async/verifiers/verifier.py +19 -20
- fleet/base.py +3 -4
- fleet/client.py +212 -44
- fleet/instance/client.py +20 -5
- fleet/resources/sqlite.py +143 -1
- fleet/tasks.py +5 -2
- fleet/verifiers/bundler.py +21 -22
- fleet/verifiers/decorator.py +1 -1
- fleet/verifiers/verifier.py +19 -20
- {fleet_python-0.2.69.dist-info → fleet_python-0.2.69b3.dist-info}/METADATA +1 -1
- {fleet_python-0.2.69.dist-info → fleet_python-0.2.69b3.dist-info}/RECORD +24 -20
- tests/test_app_method.py +85 -0
- tests/test_instance_dispatch.py +607 -0
- tests/test_sqlite_resource_dual_mode.py +263 -0
- tests/test_sqlite_shared_memory_behavior.py +117 -0
- {fleet_python-0.2.69.dist-info → fleet_python-0.2.69b3.dist-info}/WHEEL +0 -0
- {fleet_python-0.2.69.dist-info → fleet_python-0.2.69b3.dist-info}/licenses/LICENSE +0 -0
- {fleet_python-0.2.69.dist-info → fleet_python-0.2.69b3.dist-info}/top_level.txt +0 -0
|
@@ -37,7 +37,7 @@ class FunctionBundler:
|
|
|
37
37
|
) -> bytes:
|
|
38
38
|
"""Create a function bundle with statically extracted code."""
|
|
39
39
|
|
|
40
|
-
|
|
40
|
+
logger.info(f"Creating function bundle for {func.__name__}")
|
|
41
41
|
|
|
42
42
|
# 1. Parse the main function and find dependencies
|
|
43
43
|
mod_file = Path(func.__code__.co_filename)
|
|
@@ -115,7 +115,7 @@ class FunctionBundler:
|
|
|
115
115
|
|
|
116
116
|
# Find function calls within the verifier function
|
|
117
117
|
called_functions = self._extract_function_calls(main_func_ast)
|
|
118
|
-
|
|
118
|
+
logger.debug(f"Functions called in verifier: {called_functions}")
|
|
119
119
|
|
|
120
120
|
# Find all functions defined in the module
|
|
121
121
|
module_functions = {}
|
|
@@ -128,7 +128,7 @@ class FunctionBundler:
|
|
|
128
128
|
for func_name in called_functions:
|
|
129
129
|
if func_name in module_functions and func_name != func.__name__:
|
|
130
130
|
same_module_deps.append(func_name)
|
|
131
|
-
|
|
131
|
+
logger.debug(f"Found same-module dependency: {func_name}")
|
|
132
132
|
|
|
133
133
|
# Separate local and external imports
|
|
134
134
|
local_imports = {}
|
|
@@ -292,7 +292,7 @@ class FunctionBundler:
|
|
|
292
292
|
code = ast.unparse(node)
|
|
293
293
|
extracted_code.append(code)
|
|
294
294
|
except Exception as e:
|
|
295
|
-
|
|
295
|
+
logger.warning(f"Could not unparse AST node: {e}")
|
|
296
296
|
# Fallback to original source extraction
|
|
297
297
|
lines = content.split("\n")
|
|
298
298
|
start_line = node.lineno - 1
|
|
@@ -305,11 +305,11 @@ class FunctionBundler:
|
|
|
305
305
|
extracted_code.append(code)
|
|
306
306
|
|
|
307
307
|
result = "\n\n".join(extracted_code)
|
|
308
|
-
|
|
308
|
+
logger.debug(f"Extracted {len(extracted_code)} items from {file_path}")
|
|
309
309
|
return result
|
|
310
310
|
|
|
311
311
|
except Exception as e:
|
|
312
|
-
|
|
312
|
+
logger.warning(f"Failed to extract functions from {file_path}: {e}")
|
|
313
313
|
# Fallback to including the entire file
|
|
314
314
|
with open(file_path, "r", encoding="utf-8") as f:
|
|
315
315
|
return f.read()
|
|
@@ -464,14 +464,14 @@ class FunctionBundler:
|
|
|
464
464
|
version = dist.version # Get the installed version
|
|
465
465
|
package_with_version = f"{package_name}=={version}"
|
|
466
466
|
packages.add(package_with_version)
|
|
467
|
-
|
|
467
|
+
logger.debug(f"Mapped {mod} -> {package_with_version}")
|
|
468
468
|
except imd.PackageNotFoundError:
|
|
469
469
|
# Skip stdlib or local modules
|
|
470
|
-
|
|
470
|
+
logger.debug(f"Skipping {mod} (stdlib or local)")
|
|
471
471
|
continue
|
|
472
472
|
|
|
473
473
|
package_list = list(packages)
|
|
474
|
-
|
|
474
|
+
logger.debug(f"Final package list: {package_list}")
|
|
475
475
|
return package_list
|
|
476
476
|
|
|
477
477
|
def _merge_requirements(
|
|
@@ -511,10 +511,10 @@ class FunctionBundler:
|
|
|
511
511
|
if pkg_name not in seen_packages:
|
|
512
512
|
final_requirements.append(req)
|
|
513
513
|
seen_packages.add(pkg_name)
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
514
|
+
else:
|
|
515
|
+
logger.debug(
|
|
516
|
+
f"Skipping auto-detected {req}, using explicit version instead"
|
|
517
|
+
)
|
|
518
518
|
|
|
519
519
|
# Always ensure fleet-python is included
|
|
520
520
|
if "fleet-python" not in seen_packages:
|
|
@@ -565,9 +565,9 @@ class FunctionBundler:
|
|
|
565
565
|
)
|
|
566
566
|
if dep_src:
|
|
567
567
|
same_module_code += f"\n{dep_src}\n"
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
568
|
+
logger.debug(
|
|
569
|
+
f"Extracted same-module dependency: {dep_name}"
|
|
570
|
+
)
|
|
571
571
|
|
|
572
572
|
# Create verifier.py with the main function
|
|
573
573
|
verifier_file = build_dir / "verifier.py"
|
|
@@ -586,7 +586,7 @@ class FunctionBundler:
|
|
|
586
586
|
{code}
|
|
587
587
|
"""
|
|
588
588
|
dest_path.write_text(extracted_content)
|
|
589
|
-
|
|
589
|
+
logger.debug(f"Created extracted file: {relative_path}")
|
|
590
590
|
|
|
591
591
|
# Ensure __init__.py files exist
|
|
592
592
|
self._ensure_init_files(Path(relative_path), build_dir)
|
|
@@ -595,7 +595,7 @@ class FunctionBundler:
|
|
|
595
595
|
return self._create_zip_bundle(build_dir)
|
|
596
596
|
|
|
597
597
|
except Exception as e:
|
|
598
|
-
|
|
598
|
+
logger.error(f"Failed to build function bundle: {e}")
|
|
599
599
|
raise RuntimeError(f"Function bundle creation failed: {e}")
|
|
600
600
|
|
|
601
601
|
def _ensure_init_files(self, rel_path: Path, build_dir: Path):
|
|
@@ -607,7 +607,7 @@ class FunctionBundler:
|
|
|
607
607
|
if not init_file.exists():
|
|
608
608
|
init_file.parent.mkdir(parents=True, exist_ok=True)
|
|
609
609
|
init_file.write_text("# Auto-generated __init__.py")
|
|
610
|
-
|
|
610
|
+
logger.debug(f"Created __init__.py: {current}")
|
|
611
611
|
current = current.parent
|
|
612
612
|
|
|
613
613
|
def _create_zip_bundle(self, build_dir: Path) -> bytes:
|
|
@@ -621,7 +621,7 @@ class FunctionBundler:
|
|
|
621
621
|
zf.write(file_path, arcname)
|
|
622
622
|
|
|
623
623
|
bundle_size = len(zip_buffer.getvalue())
|
|
624
|
-
|
|
624
|
+
logger.debug(f"Created function bundle ({bundle_size:,} bytes)")
|
|
625
625
|
return zip_buffer.getvalue()
|
|
626
626
|
|
|
627
627
|
def _extract_function_source(
|
|
@@ -662,8 +662,7 @@ class FunctionBundler:
|
|
|
662
662
|
return "\n".join(func_lines)
|
|
663
663
|
|
|
664
664
|
except Exception as e:
|
|
665
|
-
|
|
666
|
-
pass
|
|
665
|
+
logger.warning(f"Failed to extract function {function_name}: {e}")
|
|
667
666
|
|
|
668
667
|
return None
|
|
669
668
|
|
|
@@ -79,9 +79,9 @@ class AsyncVerifierFunction:
|
|
|
79
79
|
|
|
80
80
|
self._bundle_data = zip_buffer.getvalue()
|
|
81
81
|
self._bundle_sha = _get_bundle_sha(self._bundle_data)
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
82
|
+
logger.debug(
|
|
83
|
+
f"Created bundle from raw code for {self.key} with SHA: {self._bundle_sha}"
|
|
84
|
+
)
|
|
85
85
|
else:
|
|
86
86
|
# Try to create bundle from function source
|
|
87
87
|
try:
|
|
@@ -89,9 +89,9 @@ class AsyncVerifierFunction:
|
|
|
89
89
|
self.func, self.extra_requirements, self.verifier_id
|
|
90
90
|
)
|
|
91
91
|
self._bundle_sha = _get_bundle_sha(self._bundle_data)
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
92
|
+
logger.debug(
|
|
93
|
+
f"Created bundle for {self.key} with SHA: {self._bundle_sha}"
|
|
94
|
+
)
|
|
95
95
|
except OSError as e:
|
|
96
96
|
# Can't create bundle - no source and no raw code
|
|
97
97
|
raise OSError(f"Cannot create bundle for {self.key}: {e}")
|
|
@@ -104,21 +104,20 @@ class AsyncVerifierFunction:
|
|
|
104
104
|
|
|
105
105
|
# If bundle_data is empty, we're using server-side bundle
|
|
106
106
|
if not bundle_data:
|
|
107
|
-
|
|
107
|
+
logger.debug(f"Using server-side bundle {bundle_sha[:8]}...")
|
|
108
108
|
return bundle_sha, False # No upload needed, server has it
|
|
109
109
|
|
|
110
110
|
# Always check if bundle exists on server
|
|
111
111
|
try:
|
|
112
112
|
exists = await env.check_bundle_exists(bundle_sha)
|
|
113
113
|
if exists.success:
|
|
114
|
-
|
|
114
|
+
logger.info(f"Bundle {bundle_sha[:8]}... found on server")
|
|
115
115
|
return bundle_sha, False # Found on server, no upload needed
|
|
116
116
|
except Exception as e:
|
|
117
|
-
|
|
118
|
-
pass
|
|
117
|
+
logger.warning(f"Failed to check bundle existence: {e}")
|
|
119
118
|
|
|
120
119
|
# Bundle not found on server - upload needed
|
|
121
|
-
|
|
120
|
+
logger.info(f"Bundle {bundle_sha[:8]}... needs to be uploaded")
|
|
122
121
|
return bundle_sha, True # Upload needed
|
|
123
122
|
|
|
124
123
|
async def __call__(self, env: AsyncEnv, *args, **kwargs) -> float:
|
|
@@ -148,7 +147,7 @@ class AsyncVerifierFunction:
|
|
|
148
147
|
)
|
|
149
148
|
|
|
150
149
|
except Exception as e:
|
|
151
|
-
|
|
150
|
+
logger.error(f"Error in verifier {self.key}: {e}")
|
|
152
151
|
# Return error score 0
|
|
153
152
|
return 0.0
|
|
154
153
|
|
|
@@ -180,7 +179,7 @@ class AsyncVerifierFunction:
|
|
|
180
179
|
try:
|
|
181
180
|
return float(result)
|
|
182
181
|
except (ValueError, TypeError):
|
|
183
|
-
|
|
182
|
+
logger.warning(f"Could not convert result to float: {result}")
|
|
184
183
|
return 0.0
|
|
185
184
|
|
|
186
185
|
def _raise_remote_error(self, error_info: Dict[str, Any]):
|
|
@@ -239,7 +238,7 @@ Remote traceback:
|
|
|
239
238
|
|
|
240
239
|
if needs_upload:
|
|
241
240
|
# Need to upload bundle to S3
|
|
242
|
-
|
|
241
|
+
logger.info(f"Uploading bundle {bundle_sha[:8]}... for {self.key}")
|
|
243
242
|
bundle_data, _ = self._get_or_create_bundle()
|
|
244
243
|
|
|
245
244
|
response = await env.execute_verifier_remote(
|
|
@@ -253,11 +252,11 @@ Remote traceback:
|
|
|
253
252
|
needs_upload=True,
|
|
254
253
|
)
|
|
255
254
|
|
|
256
|
-
|
|
255
|
+
logger.debug(f"Bundle {bundle_sha[:8]}... uploaded successfully")
|
|
257
256
|
|
|
258
257
|
else:
|
|
259
258
|
# Bundle already available - execute without upload
|
|
260
|
-
|
|
259
|
+
logger.info(f"Bundle {bundle_sha[:8]}... already cached for {self.key}")
|
|
261
260
|
response = await env.execute_verifier_remote(
|
|
262
261
|
bundle_data=b"", # Empty bundle since it's cached
|
|
263
262
|
bundle_sha=bundle_sha,
|
|
@@ -274,9 +273,9 @@ Remote traceback:
|
|
|
274
273
|
except Exception as e:
|
|
275
274
|
# Check if error indicates bundle not found and retry with upload
|
|
276
275
|
if self._is_bundle_not_found_error(e) and not needs_upload:
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
276
|
+
logger.info(
|
|
277
|
+
f"Bundle {bundle_sha[:8]}... not found on server, uploading..."
|
|
278
|
+
)
|
|
280
279
|
bundle_data, _ = self._get_or_create_bundle()
|
|
281
280
|
response = await env.execute_verifier_remote(
|
|
282
281
|
bundle_data=bundle_data,
|
|
@@ -290,7 +289,7 @@ Remote traceback:
|
|
|
290
289
|
)
|
|
291
290
|
return response
|
|
292
291
|
else:
|
|
293
|
-
|
|
292
|
+
logger.error(f"Error in remote execution of {self.key}: {e}")
|
|
294
293
|
raise
|
|
295
294
|
|
|
296
295
|
|
fleet/base.py
CHANGED
|
@@ -48,7 +48,7 @@ class BaseWrapper:
|
|
|
48
48
|
import logging
|
|
49
49
|
|
|
50
50
|
logger = logging.getLogger(__name__)
|
|
51
|
-
|
|
51
|
+
logger.debug(f"Headers being sent: {headers}")
|
|
52
52
|
return headers
|
|
53
53
|
|
|
54
54
|
|
|
@@ -93,9 +93,8 @@ class SyncWrapper(BaseWrapper):
|
|
|
93
93
|
|
|
94
94
|
# Debug log 500 errors
|
|
95
95
|
if status_code == 500:
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
pass
|
|
96
|
+
logger.error(f"Got 500 error from {response.url}")
|
|
97
|
+
logger.error(f"Response text: {response.text}")
|
|
99
98
|
|
|
100
99
|
# Try to parse error response as JSON
|
|
101
100
|
try:
|
fleet/client.py
CHANGED
|
@@ -21,7 +21,8 @@ import httpx
|
|
|
21
21
|
import json
|
|
22
22
|
import logging
|
|
23
23
|
import os
|
|
24
|
-
from typing import List, Optional, Dict, Any, TYPE_CHECKING
|
|
24
|
+
from typing import List, Optional, Dict, Any, TYPE_CHECKING, Union
|
|
25
|
+
from urllib.parse import urlparse
|
|
25
26
|
|
|
26
27
|
from .base import EnvironmentBase, SyncWrapper
|
|
27
28
|
from .models import (
|
|
@@ -47,6 +48,11 @@ from .instance import (
|
|
|
47
48
|
ResetResponse,
|
|
48
49
|
ExecuteFunctionResponse,
|
|
49
50
|
)
|
|
51
|
+
from .instance.models import (
|
|
52
|
+
Resource as ResourceModel,
|
|
53
|
+
ResourceType,
|
|
54
|
+
ResourceMode,
|
|
55
|
+
)
|
|
50
56
|
from .config import (
|
|
51
57
|
DEFAULT_MAX_RETRIES,
|
|
52
58
|
DEFAULT_TIMEOUT,
|
|
@@ -69,6 +75,14 @@ class SyncEnv(EnvironmentBase):
|
|
|
69
75
|
self._client = client
|
|
70
76
|
self._apps: Dict[str, InstanceClient] = {}
|
|
71
77
|
self._instance: Optional[InstanceClient] = None
|
|
78
|
+
self._manager_url_override: Optional[str] = None # For URL mode
|
|
79
|
+
|
|
80
|
+
@property
|
|
81
|
+
def manager_url(self) -> str:
|
|
82
|
+
"""Override to support URL mode where urls is None."""
|
|
83
|
+
if self._manager_url_override is not None:
|
|
84
|
+
return self._manager_url_override
|
|
85
|
+
return super().manager_url
|
|
72
86
|
|
|
73
87
|
@property
|
|
74
88
|
def instance(self) -> InstanceClient:
|
|
@@ -80,17 +94,17 @@ class SyncEnv(EnvironmentBase):
|
|
|
80
94
|
|
|
81
95
|
def app(self, name: str) -> InstanceClient:
|
|
82
96
|
if name not in self._apps:
|
|
83
|
-
# Extract
|
|
84
|
-
#
|
|
85
|
-
|
|
86
|
-
#
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
97
|
+
# Extract scheme://netloc from manager_url, then construct /{name}/api/v1/env
|
|
98
|
+
# Supports all URL formats:
|
|
99
|
+
# https://host/api/v1/env -> https://host/{name}/api/v1/env
|
|
100
|
+
# https://host/sentry/api/v1/env -> https://host/{name}/api/v1/env
|
|
101
|
+
# http://localhost:8080/api/v1/env -> http://localhost:8080/{name}/api/v1/env
|
|
102
|
+
parsed = urlparse(self.manager_url)
|
|
103
|
+
root = f"{parsed.scheme}://{parsed.netloc}"
|
|
104
|
+
new_url = f"{root}/{name}/api/v1/env"
|
|
91
105
|
|
|
92
106
|
self._apps[name] = InstanceClient(
|
|
93
|
-
|
|
107
|
+
new_url,
|
|
94
108
|
self._client.httpx_client if self._client else None,
|
|
95
109
|
)
|
|
96
110
|
return self._apps[name]
|
|
@@ -287,11 +301,165 @@ class Fleet:
|
|
|
287
301
|
for instance_data in response.json()
|
|
288
302
|
]
|
|
289
303
|
|
|
290
|
-
def instance(self, instance_id: str) -> SyncEnv:
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
304
|
+
def instance(self, instance_id: Union[str, Dict[str, str]]) -> SyncEnv:
|
|
305
|
+
"""Create or connect to an environment instance.
|
|
306
|
+
|
|
307
|
+
Supports three modes based on input type:
|
|
308
|
+
1. dict: Local filesystem mode - {"current": "./data.db", "seed": "./seed.db"}
|
|
309
|
+
2. str starting with http:// or https://: Localhost/URL mode
|
|
310
|
+
3. str (other): Remote cloud instance mode
|
|
311
|
+
|
|
312
|
+
Args:
|
|
313
|
+
instance_id: Instance identifier (str), URL (str starting with http://),
|
|
314
|
+
or local db mapping (dict)
|
|
315
|
+
|
|
316
|
+
Returns:
|
|
317
|
+
SyncEnv: Environment instance
|
|
318
|
+
"""
|
|
319
|
+
# Local filesystem mode - dict of resource names to file paths
|
|
320
|
+
if isinstance(instance_id, dict):
|
|
321
|
+
return self._create_local_instance(instance_id)
|
|
322
|
+
|
|
323
|
+
# Localhost/direct URL mode - string starting with http:// or https://
|
|
324
|
+
elif isinstance(instance_id, str) and instance_id.startswith(("http://", "https://")):
|
|
325
|
+
return self._create_url_instance(instance_id)
|
|
326
|
+
|
|
327
|
+
# Remote mode - existing behavior
|
|
328
|
+
else:
|
|
329
|
+
response = self.client.request("GET", f"/v1/env/instances/{instance_id}")
|
|
330
|
+
instance = SyncEnv(client=self.client, **response.json())
|
|
331
|
+
instance.instance.load()
|
|
332
|
+
return instance
|
|
333
|
+
|
|
334
|
+
def _create_url_instance(self, base_url: str) -> SyncEnv:
|
|
335
|
+
"""Create instance connected to a direct URL (localhost or custom).
|
|
336
|
+
|
|
337
|
+
Args:
|
|
338
|
+
base_url: URL of the instance manager API
|
|
339
|
+
|
|
340
|
+
Returns:
|
|
341
|
+
SyncEnv: Environment instance configured for URL mode
|
|
342
|
+
"""
|
|
343
|
+
instance_client = InstanceClient(url=base_url, httpx_client=self._httpx_client)
|
|
344
|
+
|
|
345
|
+
# Create a minimal environment for URL mode
|
|
346
|
+
env = SyncEnv(
|
|
347
|
+
client=self.client,
|
|
348
|
+
instance_id=base_url,
|
|
349
|
+
env_key="localhost",
|
|
350
|
+
version="",
|
|
351
|
+
status="running",
|
|
352
|
+
subdomain="localhost",
|
|
353
|
+
created_at="",
|
|
354
|
+
updated_at="",
|
|
355
|
+
terminated_at=None,
|
|
356
|
+
team_id="",
|
|
357
|
+
region="localhost",
|
|
358
|
+
env_variables=None,
|
|
359
|
+
data_key=None,
|
|
360
|
+
data_version=None,
|
|
361
|
+
urls=None,
|
|
362
|
+
health=None,
|
|
363
|
+
)
|
|
364
|
+
env._instance = instance_client
|
|
365
|
+
env._manager_url_override = base_url # Set manager_url for URL mode
|
|
366
|
+
return env
|
|
367
|
+
|
|
368
|
+
@staticmethod
|
|
369
|
+
def _normalize_db_path(path: str) -> tuple[str, bool]:
|
|
370
|
+
"""Normalize database path and detect if it's in-memory.
|
|
371
|
+
|
|
372
|
+
Args:
|
|
373
|
+
path: Database path - can be:
|
|
374
|
+
- File path: "./data.db"
|
|
375
|
+
- Plain memory: ":memory:"
|
|
376
|
+
- Named memory: ":memory:namespace"
|
|
377
|
+
- URI: "file:name?mode=memory&cache=shared"
|
|
378
|
+
|
|
379
|
+
Returns:
|
|
380
|
+
Tuple of (normalized_path, is_memory)
|
|
381
|
+
"""
|
|
382
|
+
import uuid
|
|
383
|
+
import sqlite3
|
|
384
|
+
|
|
385
|
+
if path == ":memory:":
|
|
386
|
+
# Plain :memory: - create unique namespace
|
|
387
|
+
name = f"mem_{uuid.uuid4().hex[:8]}"
|
|
388
|
+
return f"file:{name}?mode=memory&cache=shared", True
|
|
389
|
+
elif path.startswith(":memory:"):
|
|
390
|
+
# Named memory: :memory:current -> file:current?mode=memory&cache=shared
|
|
391
|
+
namespace = path[8:] # Remove ":memory:" prefix
|
|
392
|
+
return f"file:{namespace}?mode=memory&cache=shared", True
|
|
393
|
+
elif "mode=memory" in path:
|
|
394
|
+
# Already a proper memory URI
|
|
395
|
+
return path, True
|
|
396
|
+
else:
|
|
397
|
+
# Regular file path
|
|
398
|
+
return path, False
|
|
399
|
+
|
|
400
|
+
def _create_local_instance(self, dbs: Dict[str, str]) -> SyncEnv:
|
|
401
|
+
"""Create instance with local file-based or in-memory SQLite resources.
|
|
402
|
+
|
|
403
|
+
Args:
|
|
404
|
+
dbs: Map of resource names to paths (e.g., {"current": "./data.db"} or
|
|
405
|
+
{"current": ":memory:current"})
|
|
406
|
+
|
|
407
|
+
Returns:
|
|
408
|
+
SyncEnv: Environment instance configured for local mode
|
|
409
|
+
"""
|
|
410
|
+
import sqlite3
|
|
411
|
+
|
|
412
|
+
instance_client = InstanceClient(url="local://", httpx_client=None)
|
|
413
|
+
instance_client._resources = [] # Mark as loaded
|
|
414
|
+
instance_client._memory_anchors = {} # Store anchor connections for in-memory DBs
|
|
415
|
+
|
|
416
|
+
# Store creation parameters for local SQLiteResources
|
|
417
|
+
# This allows db() to create new instances each time (matching HTTP mode behavior)
|
|
418
|
+
for name, path in dbs.items():
|
|
419
|
+
# Normalize path and detect if it's in-memory
|
|
420
|
+
normalized_path, is_memory = self._normalize_db_path(path)
|
|
421
|
+
|
|
422
|
+
# Create anchor connection for in-memory databases
|
|
423
|
+
# This keeps the database alive as long as the env exists
|
|
424
|
+
if is_memory:
|
|
425
|
+
anchor_conn = sqlite3.connect(normalized_path, uri=True)
|
|
426
|
+
instance_client._memory_anchors[name] = anchor_conn
|
|
427
|
+
|
|
428
|
+
resource_model = ResourceModel(
|
|
429
|
+
name=name,
|
|
430
|
+
type=ResourceType.db,
|
|
431
|
+
mode=ResourceMode.rw,
|
|
432
|
+
label=f"Local: {path}",
|
|
433
|
+
)
|
|
434
|
+
instance_client._resources_state[ResourceType.db.value][name] = {
|
|
435
|
+
'type': 'local',
|
|
436
|
+
'resource_model': resource_model,
|
|
437
|
+
'db_path': normalized_path,
|
|
438
|
+
'is_memory': is_memory
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
# Create a minimal environment for local mode
|
|
442
|
+
env = SyncEnv(
|
|
443
|
+
client=self.client,
|
|
444
|
+
instance_id="local",
|
|
445
|
+
env_key="local",
|
|
446
|
+
version="",
|
|
447
|
+
status="running",
|
|
448
|
+
subdomain="local",
|
|
449
|
+
created_at="",
|
|
450
|
+
updated_at="",
|
|
451
|
+
terminated_at=None,
|
|
452
|
+
team_id="",
|
|
453
|
+
region="local",
|
|
454
|
+
env_variables=None,
|
|
455
|
+
data_key=None,
|
|
456
|
+
data_version=None,
|
|
457
|
+
urls=None,
|
|
458
|
+
health=None,
|
|
459
|
+
)
|
|
460
|
+
env._instance = instance_client
|
|
461
|
+
env._manager_url_override = "local://" # Set manager_url for local mode
|
|
462
|
+
return env
|
|
295
463
|
|
|
296
464
|
def check_bundle_exists(self, bundle_hash: str) -> VerifiersCheckResponse:
|
|
297
465
|
return _check_bundle_exists(self.client, bundle_hash)
|
|
@@ -404,8 +572,8 @@ class Fleet:
|
|
|
404
572
|
error_msg = f"Failed to create verifier {task_json.get('key', task_json.get('id'))}: {e}"
|
|
405
573
|
if raise_on_verifier_error:
|
|
406
574
|
raise ValueError(error_msg) from e
|
|
407
|
-
|
|
408
|
-
|
|
575
|
+
else:
|
|
576
|
+
logger.warning(error_msg)
|
|
409
577
|
|
|
410
578
|
task = Task(
|
|
411
579
|
key=task_json.get("key", task_json.get("id")),
|
|
@@ -495,23 +663,23 @@ class Fleet:
|
|
|
495
663
|
verifier_sha=tr.verifier.sha256,
|
|
496
664
|
)
|
|
497
665
|
except Exception as e:
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
666
|
+
logger.warning(
|
|
667
|
+
f"Failed to create verifier {tr.verifier.key}: {e}"
|
|
668
|
+
)
|
|
501
669
|
return None
|
|
502
670
|
else:
|
|
503
671
|
# Fallback: try fetching by ID
|
|
504
672
|
try:
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
673
|
+
logger.warning(
|
|
674
|
+
f"Embedded verifier code missing for {tr.verifier.key} (NoSuchKey). "
|
|
675
|
+
f"Attempting to refetch by id {tr.verifier.verifier_id}"
|
|
676
|
+
)
|
|
509
677
|
return self._load_verifier(tr.verifier.verifier_id)
|
|
510
678
|
except Exception as e:
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
679
|
+
logger.warning(
|
|
680
|
+
f"Refetch by verifier id failed for {tr.verifier.key}: {e}. "
|
|
681
|
+
"Leaving verifier unset."
|
|
682
|
+
)
|
|
515
683
|
return None
|
|
516
684
|
|
|
517
685
|
# Add the task for parallel execution
|
|
@@ -551,7 +719,7 @@ class Fleet:
|
|
|
551
719
|
result = future.result()
|
|
552
720
|
verifier_results.append(result)
|
|
553
721
|
except Exception as e:
|
|
554
|
-
|
|
722
|
+
logger.warning(f"Verifier loading failed: {e}")
|
|
555
723
|
verifier_results.append(None)
|
|
556
724
|
|
|
557
725
|
# Build tasks with results
|
|
@@ -638,10 +806,10 @@ class Fleet:
|
|
|
638
806
|
with open(filename, "w", encoding="utf-8") as f:
|
|
639
807
|
json.dump(tasks_data, f, indent=2, default=str)
|
|
640
808
|
|
|
641
|
-
|
|
809
|
+
logger.info(f"Exported {len(tasks)} tasks to {filename}")
|
|
642
810
|
return filename
|
|
643
811
|
else:
|
|
644
|
-
|
|
812
|
+
logger.info("No tasks found to export")
|
|
645
813
|
return None
|
|
646
814
|
|
|
647
815
|
def import_single_task(self, task: Task, project_key: Optional[str] = None):
|
|
@@ -670,7 +838,7 @@ class Fleet:
|
|
|
670
838
|
)
|
|
671
839
|
return response
|
|
672
840
|
except Exception as e:
|
|
673
|
-
|
|
841
|
+
logger.error(f"Failed to import task {task.key}: {e}")
|
|
674
842
|
return None
|
|
675
843
|
|
|
676
844
|
def import_tasks(self, filename: str, project_key: Optional[str] = None):
|
|
@@ -883,17 +1051,17 @@ def _execute_verifier_remote(
|
|
|
883
1051
|
request_data["bundle"] = bundle_b64
|
|
884
1052
|
|
|
885
1053
|
# Debug logging
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
1054
|
+
logger.debug(
|
|
1055
|
+
f"Sending verifier execute request: key={key}, sha256={bundle_sha[:8]}..., function_name={function_name}"
|
|
1056
|
+
)
|
|
1057
|
+
logger.debug(f"Request has bundle: {needs_upload}")
|
|
1058
|
+
logger.debug(f"Using client with base_url: {client.base_url}")
|
|
1059
|
+
logger.debug(f"Request data keys: {list(request_data.keys())}")
|
|
1060
|
+
logger.debug(
|
|
1061
|
+
f"Bundle size: {len(request_data.get('bundle', ''))} chars"
|
|
1062
|
+
if "bundle" in request_data
|
|
1063
|
+
else "No bundle"
|
|
1064
|
+
)
|
|
897
1065
|
|
|
898
1066
|
# Note: This should be called on the instance URL, not the orchestrator
|
|
899
1067
|
# The instance has manager URLs for verifier execution
|
|
@@ -901,6 +1069,6 @@ def _execute_verifier_remote(
|
|
|
901
1069
|
|
|
902
1070
|
# Debug the response
|
|
903
1071
|
response_json = response.json()
|
|
904
|
-
|
|
1072
|
+
logger.debug(f"Verifier execute response: {response_json}")
|
|
905
1073
|
|
|
906
1074
|
return VerifiersExecuteResponse(**response_json)
|
fleet/instance/client.py
CHANGED
|
@@ -83,9 +83,17 @@ class InstanceClient:
|
|
|
83
83
|
Returns:
|
|
84
84
|
An SQLite database resource for the given database name
|
|
85
85
|
"""
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
)
|
|
86
|
+
resource_info = self._resources_state[ResourceType.db.value][name]
|
|
87
|
+
# Local mode - resource_info is a dict with creation parameters
|
|
88
|
+
if isinstance(resource_info, dict) and resource_info.get('type') == 'local':
|
|
89
|
+
# Create new instance each time (matching HTTP mode behavior)
|
|
90
|
+
return SQLiteResource(
|
|
91
|
+
resource_info['resource_model'],
|
|
92
|
+
client=None,
|
|
93
|
+
db_path=resource_info['db_path']
|
|
94
|
+
)
|
|
95
|
+
# HTTP mode - resource_info is a ResourceModel, create new wrapper
|
|
96
|
+
return SQLiteResource(resource_info, self.client)
|
|
89
97
|
|
|
90
98
|
def browser(self, name: str) -> BrowserResource:
|
|
91
99
|
return BrowserResource(
|
|
@@ -175,10 +183,17 @@ class InstanceClient:
|
|
|
175
183
|
response = self.client.request("GET", "/health")
|
|
176
184
|
return HealthResponse(**response.json())
|
|
177
185
|
|
|
186
|
+
def close(self):
|
|
187
|
+
"""Close anchor connections for in-memory databases."""
|
|
188
|
+
if hasattr(self, '_memory_anchors'):
|
|
189
|
+
for conn in self._memory_anchors.values():
|
|
190
|
+
conn.close()
|
|
191
|
+
self._memory_anchors.clear()
|
|
192
|
+
|
|
178
193
|
def __enter__(self):
|
|
179
|
-
"""
|
|
194
|
+
"""Context manager entry."""
|
|
180
195
|
return self
|
|
181
196
|
|
|
182
197
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
183
|
-
"""
|
|
198
|
+
"""Context manager exit."""
|
|
184
199
|
self.close()
|