fleet-python 0.2.69__py3-none-any.whl → 0.2.69b3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fleet-python might be problematic. Click here for more details.

fleet/_async/base.py CHANGED
@@ -48,7 +48,7 @@ class BaseWrapper:
48
48
  import logging
49
49
 
50
50
  logger = logging.getLogger(__name__)
51
- # logger.debug(f"Headers being sent: {headers}")
51
+ logger.debug(f"Headers being sent: {headers}")
52
52
  return headers
53
53
 
54
54
 
@@ -93,9 +93,8 @@ class AsyncWrapper(BaseWrapper):
93
93
 
94
94
  # Debug log 500 errors
95
95
  if status_code == 500:
96
- # logger.error(f"Got 500 error from {response.url}")
97
- # logger.error(f"Response text: {response.text}")
98
- pass
96
+ logger.error(f"Got 500 error from {response.url}")
97
+ logger.error(f"Response text: {response.text}")
99
98
 
100
99
  # Try to parse error response as JSON
101
100
  try:
fleet/_async/client.py CHANGED
@@ -21,7 +21,7 @@ import httpx
21
21
  import json
22
22
  import logging
23
23
  import os
24
- from typing import List, Optional, Dict, Any, TYPE_CHECKING
24
+ from typing import List, Optional, Dict, Any, TYPE_CHECKING, Union
25
25
 
26
26
  from .base import EnvironmentBase, AsyncWrapper
27
27
  from ..models import (
@@ -47,6 +47,11 @@ from .instance import (
47
47
  ResetResponse,
48
48
  ExecuteFunctionResponse,
49
49
  )
50
+ from ..instance.models import (
51
+ Resource as ResourceModel,
52
+ ResourceType,
53
+ ResourceMode,
54
+ )
50
55
  from ..config import (
51
56
  DEFAULT_MAX_RETRIES,
52
57
  DEFAULT_TIMEOUT,
@@ -287,11 +292,163 @@ class AsyncFleet:
287
292
  for instance_data in response.json()
288
293
  ]
289
294
 
290
- async def instance(self, instance_id: str) -> AsyncEnv:
291
- response = await self.client.request("GET", f"/v1/env/instances/{instance_id}")
292
- instance = AsyncEnv(client=self.client, **response.json())
293
- await instance.instance.load()
294
- return instance
295
+ async def instance(self, instance_id: Union[str, Dict[str, str]]) -> AsyncEnv:
296
+ """Create or connect to an environment instance.
297
+
298
+ Supports three modes based on input type:
299
+ 1. dict: Local filesystem mode - {"current": "./data.db", "seed": "./seed.db"}
300
+ 2. str starting with http:// or https://: Localhost/URL mode
301
+ 3. str (other): Remote cloud instance mode
302
+
303
+ Args:
304
+ instance_id: Instance identifier (str), URL (str starting with http://),
305
+ or local db mapping (dict)
306
+
307
+ Returns:
308
+ AsyncEnv: Environment instance
309
+ """
310
+ # Local filesystem mode - dict of resource names to file paths
311
+ if isinstance(instance_id, dict):
312
+ return self._create_local_instance(instance_id)
313
+
314
+ # Localhost/direct URL mode - string starting with http:// or https://
315
+ elif isinstance(instance_id, str) and instance_id.startswith(("http://", "https://")):
316
+ return self._create_url_instance(instance_id)
317
+
318
+ # Remote mode - existing behavior
319
+ else:
320
+ response = await self.client.request("GET", f"/v1/env/instances/{instance_id}")
321
+ instance = AsyncEnv(client=self.client, **response.json())
322
+ await instance.instance.load()
323
+ return instance
324
+
325
+ def _create_url_instance(self, base_url: str) -> AsyncEnv:
326
+ """Create instance connected to a direct URL (localhost or custom).
327
+
328
+ Args:
329
+ base_url: URL of the instance manager API
330
+
331
+ Returns:
332
+ AsyncEnv: Environment instance configured for URL mode
333
+ """
334
+ instance_client = AsyncInstanceClient(url=base_url, httpx_client=self._httpx_client)
335
+
336
+ # Create a minimal environment for URL mode
337
+ env = AsyncEnv(
338
+ client=self.client,
339
+ instance_id=base_url,
340
+ env_key="localhost",
341
+ version="",
342
+ status="running",
343
+ subdomain="localhost",
344
+ created_at="",
345
+ updated_at="",
346
+ terminated_at=None,
347
+ team_id="",
348
+ region="localhost",
349
+ env_variables=None,
350
+ data_key=None,
351
+ data_version=None,
352
+ urls=None,
353
+ health=None,
354
+ )
355
+ env._instance = instance_client
356
+ return env
357
+
358
+ @staticmethod
359
+ def _normalize_db_path(path: str) -> tuple[str, bool]:
360
+ """Normalize database path and detect if it's in-memory.
361
+
362
+ Args:
363
+ path: Database path - can be:
364
+ - File path: "./data.db"
365
+ - Plain memory: ":memory:"
366
+ - Named memory: ":memory:namespace"
367
+ - URI: "file:name?mode=memory&cache=shared"
368
+
369
+ Returns:
370
+ Tuple of (normalized_path, is_memory)
371
+ """
372
+ import uuid
373
+ import sqlite3
374
+
375
+ if path == ":memory:":
376
+ # Plain :memory: - create unique namespace
377
+ name = f"mem_{uuid.uuid4().hex[:8]}"
378
+ return f"file:{name}?mode=memory&cache=shared", True
379
+ elif path.startswith(":memory:"):
380
+ # Named memory: :memory:current -> file:current?mode=memory&cache=shared
381
+ namespace = path[8:] # Remove ":memory:" prefix
382
+ return f"file:{namespace}?mode=memory&cache=shared", True
383
+ elif "mode=memory" in path:
384
+ # Already a proper memory URI
385
+ return path, True
386
+ else:
387
+ # Regular file path
388
+ return path, False
389
+
390
+ def _create_local_instance(self, dbs: Dict[str, str]) -> AsyncEnv:
391
+ """Create instance with local file-based or in-memory SQLite resources.
392
+
393
+ Args:
394
+ dbs: Map of resource names to paths (e.g., {"current": "./data.db"} or
395
+ {"current": ":memory:current"})
396
+
397
+ Returns:
398
+ AsyncEnv: Environment instance configured for local mode
399
+ """
400
+ import sqlite3
401
+
402
+ instance_client = AsyncInstanceClient(url="local://", httpx_client=None)
403
+ instance_client._resources = [] # Mark as loaded
404
+ instance_client._memory_anchors = {} # Store anchor connections for in-memory DBs
405
+
406
+ # Store creation parameters for local AsyncSQLiteResources
407
+ # This allows db() to create new instances each time (matching HTTP mode behavior)
408
+ for name, path in dbs.items():
409
+ # Normalize path and detect if it's in-memory
410
+ normalized_path, is_memory = self._normalize_db_path(path)
411
+
412
+ # Create anchor connection for in-memory databases
413
+ # This keeps the database alive as long as the env exists
414
+ if is_memory:
415
+ anchor_conn = sqlite3.connect(normalized_path, uri=True)
416
+ instance_client._memory_anchors[name] = anchor_conn
417
+
418
+ resource_model = ResourceModel(
419
+ name=name,
420
+ type=ResourceType.db,
421
+ mode=ResourceMode.rw,
422
+ label=f"Local: {path}",
423
+ )
424
+ instance_client._resources_state[ResourceType.db.value][name] = {
425
+ 'type': 'local',
426
+ 'resource_model': resource_model,
427
+ 'db_path': normalized_path,
428
+ 'is_memory': is_memory
429
+ }
430
+
431
+ # Create a minimal environment for local mode
432
+ env = AsyncEnv(
433
+ client=self.client,
434
+ instance_id="local",
435
+ env_key="local",
436
+ version="",
437
+ status="running",
438
+ subdomain="local",
439
+ created_at="",
440
+ updated_at="",
441
+ terminated_at=None,
442
+ team_id="",
443
+ region="local",
444
+ env_variables=None,
445
+ data_key=None,
446
+ data_version=None,
447
+ urls=None,
448
+ health=None,
449
+ )
450
+ env._instance = instance_client
451
+ return env
295
452
 
296
453
  async def check_bundle_exists(self, bundle_hash: str) -> VerifiersCheckResponse:
297
454
  return await _check_bundle_exists(self.client, bundle_hash)
@@ -406,8 +563,8 @@ class AsyncFleet:
406
563
  error_msg = f"Failed to create verifier {task_json.get('key', task_json.get('id'))}: {e}"
407
564
  if raise_on_verifier_error:
408
565
  raise ValueError(error_msg) from e
409
- # else:
410
- # logger.warning(error_msg)
566
+ else:
567
+ logger.warning(error_msg)
411
568
 
412
569
  task = Task(
413
570
  key=task_json.get("key", task_json.get("id")),
@@ -499,25 +656,25 @@ class AsyncFleet:
499
656
  verifier_sha=tr.verifier.sha256,
500
657
  )
501
658
  except Exception as e:
502
- # logger.warning(
503
- # f"Failed to create verifier {tr.verifier.key}: {e}"
504
- # )
659
+ logger.warning(
660
+ f"Failed to create verifier {tr.verifier.key}: {e}"
661
+ )
505
662
  return None
506
663
  else:
507
664
  # Fallback: try fetching by ID
508
665
  try:
509
- # logger.warning(
510
- # f"Embedded verifier code missing for {tr.verifier.key} (NoSuchKey). "
511
- # f"Attempting to refetch by id {tr.verifier.verifier_id}"
512
- # )
666
+ logger.warning(
667
+ f"Embedded verifier code missing for {tr.verifier.key} (NoSuchKey). "
668
+ f"Attempting to refetch by id {tr.verifier.verifier_id}"
669
+ )
513
670
  return await self._load_verifier(
514
671
  tr.verifier.verifier_id
515
672
  )
516
673
  except Exception as e:
517
- # logger.warning(
518
- # f"Refetch by verifier id failed for {tr.verifier.key}: {e}. "
519
- # "Leaving verifier unset."
520
- # )
674
+ logger.warning(
675
+ f"Refetch by verifier id failed for {tr.verifier.key}: {e}. "
676
+ "Leaving verifier unset."
677
+ )
521
678
  return None
522
679
 
523
680
  # Add the coroutine for parallel execution
@@ -556,10 +713,9 @@ class AsyncFleet:
556
713
  if task_response.verifier:
557
714
  # Process verifier result
558
715
  if isinstance(verifier_result, Exception):
559
- # logger.warning(
560
- # f"Verifier loading failed for {task_response.key}: {verifier_result}"
561
- # )
562
- pass
716
+ logger.warning(
717
+ f"Verifier loading failed for {task_response.key}: {verifier_result}"
718
+ )
563
719
  elif verifier_result is not None:
564
720
  verifier = verifier_result
565
721
  embedded_code = task_response.verifier.code or ""
@@ -633,10 +789,10 @@ class AsyncFleet:
633
789
  with open(filename, "w", encoding="utf-8") as f:
634
790
  json.dump(tasks_data, f, indent=2, default=str)
635
791
 
636
- # logger.info(f"Exported {len(tasks)} tasks to {filename}")
792
+ logger.info(f"Exported {len(tasks)} tasks to {filename}")
637
793
  return filename
638
794
  else:
639
- # logger.info("No tasks found to export")
795
+ logger.info("No tasks found to export")
640
796
  return None
641
797
 
642
798
  async def import_single_task(self, task: Task, project_key: Optional[str] = None):
@@ -665,7 +821,7 @@ class AsyncFleet:
665
821
  )
666
822
  return response
667
823
  except Exception as e:
668
- # logger.error(f"Failed to import task {task.key}: {e}")
824
+ logger.error(f"Failed to import task {task.key}: {e}")
669
825
  return None
670
826
 
671
827
  async def import_tasks(self, filename: str, project_key: Optional[str] = None):
@@ -885,17 +1041,17 @@ async def _execute_verifier_remote(
885
1041
  request_data["bundle"] = bundle_b64
886
1042
 
887
1043
  # Debug logging
888
- # logger.debug(
889
- # f"Sending verifier execute request: key={key}, sha256={bundle_sha[:8]}..., function_name={function_name}"
890
- # )
891
- # logger.debug(f"Request has bundle: {needs_upload}")
892
- # logger.debug(f"Using client with base_url: {client.base_url}")
893
- # logger.debug(f"Request data keys: {list(request_data.keys())}")
894
- # logger.debug(
895
- # f"Bundle size: {len(request_data.get('bundle', ''))} chars"
896
- # if "bundle" in request_data
897
- # else "No bundle"
898
- # )
1044
+ logger.debug(
1045
+ f"Sending verifier execute request: key={key}, sha256={bundle_sha[:8]}..., function_name={function_name}"
1046
+ )
1047
+ logger.debug(f"Request has bundle: {needs_upload}")
1048
+ logger.debug(f"Using client with base_url: {client.base_url}")
1049
+ logger.debug(f"Request data keys: {list(request_data.keys())}")
1050
+ logger.debug(
1051
+ f"Bundle size: {len(request_data.get('bundle', ''))} chars"
1052
+ if "bundle" in request_data
1053
+ else "No bundle"
1054
+ )
899
1055
 
900
1056
  # Note: This should be called on the instance URL, not the orchestrator
901
1057
  # The instance has manager URLs for verifier execution
@@ -903,6 +1059,6 @@ async def _execute_verifier_remote(
903
1059
 
904
1060
  # Debug the response
905
1061
  response_json = response.json()
906
- # logger.debug(f"Verifier execute response: {response_json}")
1062
+ logger.debug(f"Verifier execute response: {response_json}")
907
1063
 
908
1064
  return VerifiersExecuteResponse(**response_json)
@@ -85,9 +85,17 @@ class AsyncInstanceClient:
85
85
  Returns:
86
86
  An SQLite database resource for the given database name
87
87
  """
88
- return AsyncSQLiteResource(
89
- self._resources_state[ResourceType.db.value][name], self.client
90
- )
88
+ resource_info = self._resources_state[ResourceType.db.value][name]
89
+ # Local mode - resource_info is a dict with creation parameters
90
+ if isinstance(resource_info, dict) and resource_info.get('type') == 'local':
91
+ # Create new instance each time (matching HTTP mode behavior)
92
+ return AsyncSQLiteResource(
93
+ resource_info['resource_model'],
94
+ client=None,
95
+ db_path=resource_info['db_path']
96
+ )
97
+ # HTTP mode - resource_info is a ResourceModel, create new wrapper
98
+ return AsyncSQLiteResource(resource_info, self.client)
91
99
 
92
100
  def browser(self, name: str) -> AsyncBrowserResource:
93
101
  return AsyncBrowserResource(
@@ -177,10 +185,17 @@ class AsyncInstanceClient:
177
185
  response = await self.client.request("GET", "/health")
178
186
  return HealthResponse(**response.json())
179
187
 
188
+ def close(self):
189
+ """Close anchor connections for in-memory databases."""
190
+ if hasattr(self, '_memory_anchors'):
191
+ for conn in self._memory_anchors.values():
192
+ conn.close()
193
+ self._memory_anchors.clear()
194
+
180
195
  async def __aenter__(self):
181
196
  """Async context manager entry."""
182
197
  return self
183
198
 
184
199
  async def __aexit__(self, exc_type, exc_val, exc_tb):
185
200
  """Async context manager exit."""
186
- await self.close()
201
+ self.close()
@@ -6,6 +6,7 @@ from datetime import datetime
6
6
  import tempfile
7
7
  import sqlite3
8
8
  import os
9
+ import asyncio
9
10
 
10
11
  from typing import TYPE_CHECKING
11
12
 
@@ -679,17 +680,100 @@ class AsyncQueryBuilder:
679
680
 
680
681
 
681
682
  class AsyncSQLiteResource(Resource):
682
- def __init__(self, resource: ResourceModel, client: "AsyncWrapper"):
683
+ def __init__(
684
+ self,
685
+ resource: ResourceModel,
686
+ client: Optional["AsyncWrapper"] = None,
687
+ db_path: Optional[str] = None,
688
+ ):
683
689
  super().__init__(resource)
684
690
  self.client = client
691
+ self.db_path = db_path
692
+ self._mode = "direct" if db_path else "http"
693
+
694
+ @property
695
+ def mode(self) -> str:
696
+ """Return the mode of this resource: 'direct' (local file) or 'http' (remote API)."""
697
+ return self._mode
685
698
 
686
699
  async def describe(self) -> DescribeResponse:
687
700
  """Describe the SQLite database schema."""
701
+ if self._mode == "direct":
702
+ return await self._describe_direct()
703
+ else:
704
+ return await self._describe_http()
705
+
706
+ async def _describe_http(self) -> DescribeResponse:
707
+ """Describe database schema via HTTP API."""
688
708
  response = await self.client.request(
689
709
  "GET", f"/resources/sqlite/{self.resource.name}/describe"
690
710
  )
691
711
  return DescribeResponse(**response.json())
692
712
 
713
+ async def _describe_direct(self) -> DescribeResponse:
714
+ """Describe database schema from local file or in-memory database."""
715
+ def _sync_describe():
716
+ try:
717
+ # Check if we need URI mode (for shared memory databases)
718
+ use_uri = 'mode=memory' in self.db_path
719
+ conn = sqlite3.connect(self.db_path, uri=use_uri)
720
+ cursor = conn.cursor()
721
+
722
+ # Get all tables
723
+ cursor.execute(
724
+ "SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"
725
+ )
726
+ table_names = [row[0] for row in cursor.fetchall()]
727
+
728
+ tables = []
729
+ for table_name in table_names:
730
+ # Get table info
731
+ cursor.execute(f"PRAGMA table_info({table_name})")
732
+ columns = cursor.fetchall()
733
+
734
+ # Get CREATE TABLE SQL
735
+ cursor.execute(
736
+ f"SELECT sql FROM sqlite_master WHERE type='table' AND name=?",
737
+ (table_name,)
738
+ )
739
+ sql_row = cursor.fetchone()
740
+ create_sql = sql_row[0] if sql_row else ""
741
+
742
+ table_schema = {
743
+ "name": table_name,
744
+ "sql": create_sql,
745
+ "columns": [
746
+ {
747
+ "name": col[1],
748
+ "type": col[2],
749
+ "notnull": bool(col[3]),
750
+ "default_value": col[4],
751
+ "primary_key": col[5] > 0,
752
+ }
753
+ for col in columns
754
+ ],
755
+ }
756
+ tables.append(table_schema)
757
+
758
+ conn.close()
759
+
760
+ return DescribeResponse(
761
+ success=True,
762
+ resource_name=self.resource.name,
763
+ tables=tables,
764
+ message="Schema retrieved from local file",
765
+ )
766
+ except Exception as e:
767
+ return DescribeResponse(
768
+ success=False,
769
+ resource_name=self.resource.name,
770
+ tables=None,
771
+ error=str(e),
772
+ message=f"Failed to describe database: {str(e)}",
773
+ )
774
+
775
+ return await asyncio.to_thread(_sync_describe)
776
+
693
777
  async def query(
694
778
  self, query: str, args: Optional[List[Any]] = None
695
779
  ) -> QueryResponse:
@@ -701,6 +785,15 @@ class AsyncSQLiteResource(Resource):
701
785
  async def _query(
702
786
  self, query: str, args: Optional[List[Any]] = None, read_only: bool = True
703
787
  ) -> QueryResponse:
788
+ if self._mode == "direct":
789
+ return await self._query_direct(query, args, read_only)
790
+ else:
791
+ return await self._query_http(query, args, read_only)
792
+
793
+ async def _query_http(
794
+ self, query: str, args: Optional[List[Any]] = None, read_only: bool = True
795
+ ) -> QueryResponse:
796
+ """Execute query via HTTP API."""
704
797
  request = QueryRequest(query=query, args=args, read_only=read_only)
705
798
  response = await self.client.request(
706
799
  "POST",
@@ -709,6 +802,62 @@ class AsyncSQLiteResource(Resource):
709
802
  )
710
803
  return QueryResponse(**response.json())
711
804
 
805
+ async def _query_direct(
806
+ self, query: str, args: Optional[List[Any]] = None, read_only: bool = True
807
+ ) -> QueryResponse:
808
+ """Execute query directly on local SQLite file or in-memory database."""
809
+ def _sync_query():
810
+ try:
811
+ # Check if we need URI mode (for shared memory databases)
812
+ use_uri = 'mode=memory' in self.db_path
813
+ conn = sqlite3.connect(self.db_path, uri=use_uri)
814
+ cursor = conn.cursor()
815
+
816
+ # Execute the query
817
+ if args:
818
+ cursor.execute(query, args)
819
+ else:
820
+ cursor.execute(query)
821
+
822
+ # For write operations, commit the transaction
823
+ if not read_only:
824
+ conn.commit()
825
+
826
+ # Get column names if available
827
+ columns = [desc[0] for desc in cursor.description] if cursor.description else []
828
+
829
+ # Fetch results for SELECT queries
830
+ rows = []
831
+ rows_affected = 0
832
+ last_insert_id = None
833
+
834
+ if cursor.description: # SELECT query
835
+ rows = cursor.fetchall()
836
+ else: # INSERT/UPDATE/DELETE
837
+ rows_affected = cursor.rowcount
838
+ last_insert_id = cursor.lastrowid if cursor.lastrowid else None
839
+
840
+ conn.close()
841
+
842
+ return QueryResponse(
843
+ success=True,
844
+ columns=columns if columns else None,
845
+ rows=rows if rows else None,
846
+ rows_affected=rows_affected if rows_affected > 0 else None,
847
+ last_insert_id=last_insert_id,
848
+ message="Query executed successfully",
849
+ )
850
+ except Exception as e:
851
+ return QueryResponse(
852
+ success=False,
853
+ columns=None,
854
+ rows=None,
855
+ error=str(e),
856
+ message=f"Query failed: {str(e)}",
857
+ )
858
+
859
+ return await asyncio.to_thread(_sync_query)
860
+
712
861
  def table(self, table_name: str) -> AsyncQueryBuilder:
713
862
  """Create a query builder for the specified table."""
714
863
  return AsyncQueryBuilder(self, table_name)
fleet/_async/tasks.py CHANGED
@@ -295,8 +295,11 @@ def verifier_from_string(
295
295
  # Remove lines like: @verifier(key="...")
296
296
  cleaned_code = re.sub(r"@verifier\([^)]*\)\s*\n", "", verifier_func)
297
297
  # Also remove the verifier import if present
298
- cleaned_code = re.sub(r"from fleet import.*verifier.*\n", "", cleaned_code)
299
- cleaned_code = re.sub(r"import.*verifier.*\n", "", cleaned_code)
298
+ # Use MULTILINE flag to match beginning of lines with ^
299
+ cleaned_code = re.sub(r"^from fleet\.verifiers.*import.*verifier.*$\n?", "", cleaned_code, flags=re.MULTILINE)
300
+ cleaned_code = re.sub(r"^from fleet import verifier.*$\n?", "", cleaned_code, flags=re.MULTILINE)
301
+ cleaned_code = re.sub(r"^import fleet\.verifiers.*$\n?", "", cleaned_code, flags=re.MULTILINE)
302
+ cleaned_code = re.sub(r"^import fleet$\n?", "", cleaned_code, flags=re.MULTILINE)
300
303
 
301
304
  # Create a local namespace for executing the code
302
305
  local_namespace = {