futurehouse-client 0.4.2.dev274__py3-none-any.whl → 0.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- futurehouse_client/clients/data_storage_methods.py +844 -127
- futurehouse_client/clients/rest_client.py +110 -41
- futurehouse_client/models/client.py +5 -1
- futurehouse_client/models/data_storage_methods.py +24 -10
- futurehouse_client/models/rest.py +39 -7
- futurehouse_client/utils/general.py +35 -6
- futurehouse_client/utils/world_model_tools.py +21 -2
- futurehouse_client/version.py +2 -2
- {futurehouse_client-0.4.2.dev274.dist-info → futurehouse_client-0.4.3.dist-info}/METADATA +3 -1
- futurehouse_client-0.4.3.dist-info/RECORD +23 -0
- futurehouse_client-0.4.2.dev274.dist-info/RECORD +0 -23
- {futurehouse_client-0.4.2.dev274.dist-info → futurehouse_client-0.4.3.dist-info}/WHEEL +0 -0
- {futurehouse_client-0.4.2.dev274.dist-info → futurehouse_client-0.4.3.dist-info}/licenses/LICENSE +0 -0
- {futurehouse_client-0.4.2.dev274.dist-info → futurehouse_client-0.4.3.dist-info}/top_level.txt +0 -0
@@ -1,3 +1,4 @@
|
|
1
|
+
# ruff: noqa: PLR0915
|
1
2
|
import ast
|
2
3
|
import asyncio
|
3
4
|
import base64
|
@@ -54,12 +55,14 @@ from futurehouse_client.models.app import (
|
|
54
55
|
from futurehouse_client.models.rest import (
|
55
56
|
DiscoveryResponse,
|
56
57
|
ExecutionStatus,
|
58
|
+
SearchCriterion,
|
57
59
|
UserAgentRequest,
|
58
60
|
UserAgentRequestPostPayload,
|
59
61
|
UserAgentRequestStatus,
|
60
62
|
UserAgentResponsePayload,
|
61
63
|
WorldModel,
|
62
64
|
WorldModelResponse,
|
65
|
+
WorldModelSearchPayload,
|
63
66
|
)
|
64
67
|
from futurehouse_client.utils.auth import RefreshingJWT
|
65
68
|
from futurehouse_client.utils.general import (
|
@@ -162,7 +165,6 @@ retry_if_connection_error = create_retry_if_connection_error(FileUploadError)
|
|
162
165
|
DEFAULT_AGENT_TIMEOUT: int = 2400 # seconds
|
163
166
|
|
164
167
|
|
165
|
-
# pylint: disable=too-many-public-methods
|
166
168
|
class RestClient(DataStorageMethods):
|
167
169
|
REQUEST_TIMEOUT: ClassVar[float] = 30.0 # sec - for general API calls
|
168
170
|
FILE_UPLOAD_TIMEOUT: ClassVar[float] = 600.0 # 10 minutes - for file uploads
|
@@ -332,16 +334,23 @@ class RestClient(DataStorageMethods):
|
|
332
334
|
raise ValueError(f"Organization '{organization}' not found.")
|
333
335
|
return filtered_orgs
|
334
336
|
|
337
|
+
@retry(
|
338
|
+
stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
|
339
|
+
wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
|
340
|
+
retry=retry_if_connection_error,
|
341
|
+
before_sleep=before_sleep_log(logger, logging.WARNING),
|
342
|
+
)
|
335
343
|
def _check_job(self, name: str, organization: str) -> dict[str, Any]:
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
)
|
340
|
-
response.raise_for_status()
|
341
|
-
return response.json()
|
342
|
-
except Exception as e:
|
343
|
-
raise JobFetchError(f"Error checking job: {e!r}.") from e
|
344
|
+
response = self.client.get(f"/v0.1/crows/{name}/organizations/{organization}")
|
345
|
+
response.raise_for_status()
|
346
|
+
return response.json()
|
344
347
|
|
348
|
+
@retry(
|
349
|
+
stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
|
350
|
+
wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
|
351
|
+
retry=retry_if_connection_error,
|
352
|
+
before_sleep=before_sleep_log(logger, logging.WARNING),
|
353
|
+
)
|
345
354
|
def _fetch_my_orgs(self) -> list[str]:
|
346
355
|
response = self.client.get(f"/v0.1/organizations?filter={True}")
|
347
356
|
response.raise_for_status()
|
@@ -699,10 +708,12 @@ class RestClient(DataStorageMethods):
|
|
699
708
|
|
700
709
|
async def arun_tasks_until_done(
|
701
710
|
self,
|
702
|
-
task_data:
|
703
|
-
|
704
|
-
|
705
|
-
|
711
|
+
task_data: (
|
712
|
+
TaskRequest
|
713
|
+
| dict[str, Any]
|
714
|
+
| Collection[TaskRequest]
|
715
|
+
| Collection[dict[str, Any]]
|
716
|
+
),
|
706
717
|
verbose: bool = False,
|
707
718
|
progress_bar: bool = False,
|
708
719
|
concurrency: int = 10,
|
@@ -770,10 +781,12 @@ class RestClient(DataStorageMethods):
|
|
770
781
|
|
771
782
|
def run_tasks_until_done(
|
772
783
|
self,
|
773
|
-
task_data:
|
774
|
-
|
775
|
-
|
776
|
-
|
784
|
+
task_data: (
|
785
|
+
TaskRequest
|
786
|
+
| dict[str, Any]
|
787
|
+
| Collection[TaskRequest]
|
788
|
+
| Collection[dict[str, Any]]
|
789
|
+
),
|
777
790
|
verbose: bool = False,
|
778
791
|
progress_bar: bool = False,
|
779
792
|
timeout: int = DEFAULT_AGENT_TIMEOUT,
|
@@ -846,12 +859,9 @@ class RestClient(DataStorageMethods):
|
|
846
859
|
)
|
847
860
|
def get_build_status(self, build_id: UUID | None = None) -> dict[str, Any]:
|
848
861
|
"""Get the status of a build."""
|
849
|
-
|
850
|
-
|
851
|
-
|
852
|
-
response.raise_for_status()
|
853
|
-
except Exception as e:
|
854
|
-
raise JobFetchError(f"Error getting build status: {e!r}.") from e
|
862
|
+
build_id = build_id or self.build_id
|
863
|
+
response = self.client.get(f"/v0.1/builds/{build_id}")
|
864
|
+
response.raise_for_status()
|
855
865
|
return response.json()
|
856
866
|
|
857
867
|
# TODO: Refactor later so we don't have to ignore PLR0915
|
@@ -861,7 +871,7 @@ class RestClient(DataStorageMethods):
|
|
861
871
|
retry=retry_if_connection_error,
|
862
872
|
before_sleep=before_sleep_log(logger, logging.WARNING),
|
863
873
|
)
|
864
|
-
def create_job(self, config: JobDeploymentConfig) -> dict[str, Any]:
|
874
|
+
def create_job(self, config: JobDeploymentConfig) -> dict[str, Any]:
|
865
875
|
"""Creates a futurehouse job deployment from the environment and environment files.
|
866
876
|
|
867
877
|
Args:
|
@@ -1606,6 +1616,56 @@ class RestClient(DataStorageMethods):
|
|
1606
1616
|
except Exception as e:
|
1607
1617
|
raise WorldModelFetchError(f"An unexpected error occurred: {e!r}.") from e
|
1608
1618
|
|
1619
|
+
@retry(
|
1620
|
+
stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
|
1621
|
+
wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
|
1622
|
+
retry=retry_if_connection_error,
|
1623
|
+
)
|
1624
|
+
def list_world_models(
|
1625
|
+
self,
|
1626
|
+
name: str | None = None,
|
1627
|
+
project_id: UUID | str | None = None,
|
1628
|
+
limit: int = 150,
|
1629
|
+
offset: int = 0,
|
1630
|
+
sort_order: str = "asc",
|
1631
|
+
) -> list[WorldModelResponse]:
|
1632
|
+
"""List world models with different behavior based on filters.
|
1633
|
+
|
1634
|
+
When filtering by name: returns only the latest version for that name.
|
1635
|
+
When filtering by project_id (without name): returns all versions for that project.
|
1636
|
+
When no filters: returns latest version of each world model.
|
1637
|
+
|
1638
|
+
Args:
|
1639
|
+
name: Filter by world model name.
|
1640
|
+
project_id: Filter by project ID.
|
1641
|
+
limit: The maximum number of models to return.
|
1642
|
+
offset: Number of results to skip for pagination.
|
1643
|
+
sort_order: Sort order 'asc' or 'desc'.
|
1644
|
+
|
1645
|
+
Returns:
|
1646
|
+
A list of world model dictionaries.
|
1647
|
+
"""
|
1648
|
+
try:
|
1649
|
+
params: dict[str, str | int] = {
|
1650
|
+
"limit": limit,
|
1651
|
+
"offset": offset,
|
1652
|
+
"sort_order": sort_order,
|
1653
|
+
}
|
1654
|
+
if name:
|
1655
|
+
params["name"] = name
|
1656
|
+
if project_id:
|
1657
|
+
params["project_id"] = str(project_id)
|
1658
|
+
|
1659
|
+
response = self.client.get("/v0.1/world-models", params=params)
|
1660
|
+
response.raise_for_status()
|
1661
|
+
return response.json()
|
1662
|
+
except HTTPStatusError as e:
|
1663
|
+
raise WorldModelFetchError(
|
1664
|
+
f"Error listing world models: {e.response.status_code} - {e.response.text}"
|
1665
|
+
) from e
|
1666
|
+
except Exception as e:
|
1667
|
+
raise WorldModelFetchError(f"An unexpected error occurred: {e!r}.") from e
|
1668
|
+
|
1609
1669
|
@retry(
|
1610
1670
|
stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
|
1611
1671
|
wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
|
@@ -1613,34 +1673,43 @@ class RestClient(DataStorageMethods):
|
|
1613
1673
|
)
|
1614
1674
|
def search_world_models(
|
1615
1675
|
self,
|
1616
|
-
|
1676
|
+
criteria: list[SearchCriterion] | None = None,
|
1617
1677
|
size: int = 10,
|
1618
|
-
|
1678
|
+
project_id: UUID | str | None = None,
|
1619
1679
|
search_all_versions: bool = False,
|
1620
|
-
) -> list[
|
1621
|
-
"""Search
|
1680
|
+
) -> list[WorldModelResponse]:
|
1681
|
+
"""Search world models using structured criteria.
|
1622
1682
|
|
1623
1683
|
Args:
|
1624
|
-
|
1684
|
+
criteria: List of SearchCriterion objects with field, operator, and value.
|
1625
1685
|
size: The number of results to return.
|
1626
|
-
|
1627
|
-
search_all_versions: Whether to search all versions
|
1686
|
+
project_id: Optional filter by project ID.
|
1687
|
+
search_all_versions: Whether to search all versions or just latest.
|
1628
1688
|
|
1629
1689
|
Returns:
|
1630
|
-
A list of world model
|
1690
|
+
A list of world model responses.
|
1691
|
+
|
1692
|
+
Example:
|
1693
|
+
from futurehouse_client.models.rest import SearchCriterion, SearchOperator
|
1694
|
+
criteria = [
|
1695
|
+
SearchCriterion(field="name", operator=SearchOperator.CONTAINS, value="chemistry"),
|
1696
|
+
SearchCriterion(field="email", operator=SearchOperator.CONTAINS, value="tyler"),
|
1697
|
+
]
|
1698
|
+
results = client.search_world_models(criteria=criteria, size=20)
|
1631
1699
|
"""
|
1632
1700
|
try:
|
1633
|
-
|
1634
|
-
|
1635
|
-
|
1636
|
-
|
1637
|
-
|
1638
|
-
|
1639
|
-
|
1640
|
-
|
1701
|
+
payload = WorldModelSearchPayload(
|
1702
|
+
criteria=criteria or [],
|
1703
|
+
size=size,
|
1704
|
+
project_id=project_id,
|
1705
|
+
search_all_versions=search_all_versions,
|
1706
|
+
)
|
1707
|
+
|
1708
|
+
response = self.client.post(
|
1709
|
+
"/v0.1/world-models/search",
|
1710
|
+
json=payload.model_dump(mode="json"),
|
1641
1711
|
)
|
1642
1712
|
response.raise_for_status()
|
1643
|
-
# The new endpoint returns a list of models directly
|
1644
1713
|
return response.json()
|
1645
1714
|
except HTTPStatusError as e:
|
1646
1715
|
raise WorldModelFetchError(
|
@@ -27,13 +27,17 @@ class InitialState(BaseState):
|
|
27
27
|
|
28
28
|
class ASVState(BaseState, Generic[T]):
|
29
29
|
action: OpResult[T] = Field()
|
30
|
-
|
30
|
+
next_state: Any = Field()
|
31
31
|
value: float = Field()
|
32
32
|
|
33
33
|
@field_serializer("action")
|
34
34
|
def serialize_action(self, action: OpResult[T]) -> dict:
|
35
35
|
return action.to_dict()
|
36
36
|
|
37
|
+
@field_serializer("next_state")
|
38
|
+
def serialize_next_state(self, state: Any) -> str:
|
39
|
+
return str(state)
|
40
|
+
|
37
41
|
|
38
42
|
class EnvResetState(BaseState):
|
39
43
|
observations: list[Message] = Field()
|
@@ -59,6 +59,7 @@ class DataStorageEntry(BaseModel):
|
|
59
59
|
class DataStorageType(StrEnum):
|
60
60
|
BIGQUERY = auto()
|
61
61
|
GCS = auto()
|
62
|
+
LINK = auto()
|
62
63
|
PG_TABLE = auto()
|
63
64
|
RAW_CONTENT = auto()
|
64
65
|
ELASTIC_SEARCH = auto()
|
@@ -83,8 +84,8 @@ class DataStorageLocationPayload(BaseModel):
|
|
83
84
|
location: str | None = None
|
84
85
|
|
85
86
|
|
86
|
-
class
|
87
|
-
"""Model representing the location
|
87
|
+
class DataStorageLocationConfig(BaseModel):
|
88
|
+
"""Model representing the location configuration within a DataStorageLocations object."""
|
88
89
|
|
89
90
|
storage_type: str = Field(description="Type of storage (e.g., 'gcs', 'pg_table')")
|
90
91
|
content_type: str = Field(description="Type of content stored")
|
@@ -93,15 +94,19 @@ class DataStorageLocationDetails(BaseModel):
|
|
93
94
|
location: str | None = Field(
|
94
95
|
default=None, description="Location path or identifier"
|
95
96
|
)
|
97
|
+
signed_url: str | None = Field(
|
98
|
+
default=None,
|
99
|
+
description="Signed URL for uploading/downloading the file to/from GCS",
|
100
|
+
)
|
96
101
|
|
97
102
|
|
98
|
-
class
|
103
|
+
class DataStorageLocation(BaseModel):
|
99
104
|
"""Model representing storage locations for a data storage entry."""
|
100
105
|
|
101
106
|
id: UUID = Field(description="Unique identifier for the storage locations")
|
102
107
|
data_storage_id: UUID = Field(description="ID of the associated data storage entry")
|
103
|
-
storage_config:
|
104
|
-
description="Storage configuration
|
108
|
+
storage_config: DataStorageLocationConfig = Field(
|
109
|
+
description="Storage location configuration"
|
105
110
|
)
|
106
111
|
created_at: datetime = Field(description="Timestamp when the location was created")
|
107
112
|
|
@@ -110,13 +115,9 @@ class DataStorageResponse(BaseModel):
|
|
110
115
|
"""Response model for data storage operations."""
|
111
116
|
|
112
117
|
data_storage: DataStorageEntry = Field(description="The created data storage entry")
|
113
|
-
|
118
|
+
storage_locations: list[DataStorageLocation] = Field(
|
114
119
|
description="Storage location for this data entry"
|
115
120
|
)
|
116
|
-
signed_url: str | None = Field(
|
117
|
-
default=None,
|
118
|
-
description="Signed URL for uploading/downloading the file to/from GCS",
|
119
|
-
)
|
120
121
|
|
121
122
|
|
122
123
|
class DataStorageRequestPayload(BaseModel):
|
@@ -152,6 +153,19 @@ class DataStorageRequestPayload(BaseModel):
|
|
152
153
|
)
|
153
154
|
|
154
155
|
|
156
|
+
class CreateDatasetPayload(BaseModel):
|
157
|
+
"""Payload for creating a dataset."""
|
158
|
+
|
159
|
+
id: UUID | None = Field(
|
160
|
+
default=None,
|
161
|
+
description="ID of the dataset to create, or None to create a new dataset",
|
162
|
+
)
|
163
|
+
name: str = Field(description="Name of the dataset")
|
164
|
+
description: str | None = Field(
|
165
|
+
default=None, description="Description of the dataset"
|
166
|
+
)
|
167
|
+
|
168
|
+
|
155
169
|
class ManifestEntry(BaseModel):
|
156
170
|
"""Model representing a single entry in a manifest file."""
|
157
171
|
|
@@ -63,7 +63,37 @@ class WorldModel(BaseModel):
|
|
63
63
|
project_id: UUID | str | None = None
|
64
64
|
|
65
65
|
|
66
|
-
class
|
66
|
+
class SearchOperator(StrEnum):
|
67
|
+
"""Operators for structured search criteria."""
|
68
|
+
|
69
|
+
EQUALS = "equals"
|
70
|
+
CONTAINS = "contains"
|
71
|
+
STARTS_WITH = "starts_with"
|
72
|
+
ENDS_WITH = "ends_with"
|
73
|
+
GREATER_THAN = "greater_than"
|
74
|
+
LESS_THAN = "less_than"
|
75
|
+
BETWEEN = "between"
|
76
|
+
IN = "in"
|
77
|
+
|
78
|
+
|
79
|
+
class SearchCriterion(BaseModel):
|
80
|
+
"""A single search criterion with field, operator, and value."""
|
81
|
+
|
82
|
+
field: str
|
83
|
+
operator: SearchOperator
|
84
|
+
value: str | list[str] | bool
|
85
|
+
|
86
|
+
|
87
|
+
class WorldModelSearchPayload(BaseModel):
|
88
|
+
"""Payload for structured world model search."""
|
89
|
+
|
90
|
+
criteria: list[SearchCriterion]
|
91
|
+
size: int = 10
|
92
|
+
project_id: UUID | str | None = None
|
93
|
+
search_all_versions: bool = False
|
94
|
+
|
95
|
+
|
96
|
+
class WorldModelResponse(WorldModel):
|
67
97
|
"""
|
68
98
|
Response model for a world model snapshot.
|
69
99
|
|
@@ -71,13 +101,8 @@ class WorldModelResponse(BaseModel):
|
|
71
101
|
"""
|
72
102
|
|
73
103
|
id: UUID | str
|
74
|
-
|
75
|
-
name: str
|
76
|
-
description: str | None
|
77
|
-
content: str
|
78
|
-
trajectory_id: UUID | str | None
|
104
|
+
name: str # type: ignore[mutable-override] # The API always returns a non-optional name, overriding the base model's optional field.
|
79
105
|
email: str | None
|
80
|
-
model_metadata: JsonValue | None
|
81
106
|
enabled: bool
|
82
107
|
created_at: datetime
|
83
108
|
|
@@ -141,3 +166,10 @@ class DiscoveryResponse(BaseModel):
|
|
141
166
|
associated_trajectories: list[UUID | str]
|
142
167
|
validation_level: int
|
143
168
|
created_at: datetime
|
169
|
+
|
170
|
+
|
171
|
+
class DataStorageSearchPayload(BaseModel):
|
172
|
+
"""Payload for structured data storage search."""
|
173
|
+
|
174
|
+
criteria: list[SearchCriterion]
|
175
|
+
size: int = 10
|
@@ -1,22 +1,31 @@
|
|
1
1
|
import asyncio
|
2
|
-
from collections.abc import Awaitable, Iterable
|
2
|
+
from collections.abc import Awaitable, Callable, Iterable
|
3
3
|
from typing import TypeVar
|
4
4
|
|
5
5
|
from httpx import (
|
6
6
|
CloseError,
|
7
7
|
ConnectError,
|
8
8
|
ConnectTimeout,
|
9
|
+
HTTPStatusError,
|
9
10
|
NetworkError,
|
10
11
|
ReadError,
|
11
12
|
ReadTimeout,
|
12
13
|
RemoteProtocolError,
|
14
|
+
codes,
|
13
15
|
)
|
14
16
|
from requests.exceptions import RequestException, Timeout
|
15
|
-
from tenacity import
|
17
|
+
from tenacity import RetryCallState
|
16
18
|
from tqdm.asyncio import tqdm
|
17
19
|
|
18
20
|
T = TypeVar("T")
|
19
21
|
|
22
|
+
RETRYABLE_HTTP_STATUS_CODES = {
|
23
|
+
codes.TOO_MANY_REQUESTS,
|
24
|
+
codes.INTERNAL_SERVER_ERROR,
|
25
|
+
codes.BAD_GATEWAY,
|
26
|
+
codes.SERVICE_UNAVAILABLE,
|
27
|
+
codes.GATEWAY_TIMEOUT,
|
28
|
+
}
|
20
29
|
|
21
30
|
_BASE_CONNECTION_ERRORS = (
|
22
31
|
# From requests
|
@@ -33,12 +42,32 @@ _BASE_CONNECTION_ERRORS = (
|
|
33
42
|
CloseError,
|
34
43
|
)
|
35
44
|
|
36
|
-
retry_if_connection_error = retry_if_exception_type(_BASE_CONNECTION_ERRORS)
|
37
45
|
|
46
|
+
def create_retry_if_connection_error(
|
47
|
+
*additional_exceptions,
|
48
|
+
) -> Callable[[RetryCallState], bool]:
|
49
|
+
"""Create a retry condition with base connection errors, HTTP status errors, plus additional exceptions."""
|
38
50
|
|
39
|
-
def
|
40
|
-
|
41
|
-
|
51
|
+
def status_retries_with_exceptions(retry_state: RetryCallState) -> bool:
|
52
|
+
if retry_state.outcome is not None and hasattr(
|
53
|
+
retry_state.outcome, "exception"
|
54
|
+
):
|
55
|
+
exception = retry_state.outcome.exception()
|
56
|
+
# connection errors
|
57
|
+
if isinstance(exception, _BASE_CONNECTION_ERRORS):
|
58
|
+
return True
|
59
|
+
# custom exceptions provided
|
60
|
+
if additional_exceptions and isinstance(exception, additional_exceptions):
|
61
|
+
return True
|
62
|
+
# any http exceptions
|
63
|
+
if isinstance(exception, HTTPStatusError):
|
64
|
+
return exception.response.status_code in RETRYABLE_HTTP_STATUS_CODES
|
65
|
+
return False
|
66
|
+
|
67
|
+
return status_retries_with_exceptions
|
68
|
+
|
69
|
+
|
70
|
+
retry_if_connection_error = create_retry_if_connection_error()
|
42
71
|
|
43
72
|
|
44
73
|
async def gather_with_concurrency(
|
@@ -5,7 +5,7 @@ from aviary.core import Tool
|
|
5
5
|
|
6
6
|
from futurehouse_client.clients.rest_client import RestClient
|
7
7
|
from futurehouse_client.models.app import Stage
|
8
|
-
from futurehouse_client.models.rest import WorldModel
|
8
|
+
from futurehouse_client.models.rest import SearchCriterion, SearchOperator, WorldModel
|
9
9
|
|
10
10
|
|
11
11
|
class WorldModelTools:
|
@@ -56,7 +56,26 @@ class WorldModelTools:
|
|
56
56
|
Returns:
|
57
57
|
list[str]: A list of world model IDs that match the search query.
|
58
58
|
"""
|
59
|
-
|
59
|
+
criteria = (
|
60
|
+
[
|
61
|
+
SearchCriterion(
|
62
|
+
field="name", operator=SearchOperator.CONTAINS, value=query
|
63
|
+
),
|
64
|
+
SearchCriterion(
|
65
|
+
field="description", operator=SearchOperator.CONTAINS, value=query
|
66
|
+
),
|
67
|
+
SearchCriterion(
|
68
|
+
field="content", operator=SearchOperator.CONTAINS, value=query
|
69
|
+
),
|
70
|
+
]
|
71
|
+
if query
|
72
|
+
else []
|
73
|
+
)
|
74
|
+
|
75
|
+
results = WorldModelTools._get_client().search_world_models(
|
76
|
+
criteria=criteria, size=size
|
77
|
+
)
|
78
|
+
return [str(model.id) for model in results]
|
60
79
|
|
61
80
|
|
62
81
|
create_world_model_tool = Tool.from_function(WorldModelTools.create_world_model)
|
futurehouse_client/version.py
CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
28
28
|
commit_id: COMMIT_ID
|
29
29
|
__commit_id__: COMMIT_ID
|
30
30
|
|
31
|
-
__version__ = version = '0.4.
|
32
|
-
__version_tuple__ = version_tuple = (0, 4,
|
31
|
+
__version__ = version = '0.4.3'
|
32
|
+
__version_tuple__ = version_tuple = (0, 4, 3)
|
33
33
|
|
34
34
|
__commit_id__ = commit_id = None
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: futurehouse-client
|
3
|
-
Version: 0.4.
|
3
|
+
Version: 0.4.3
|
4
4
|
Summary: A client for interacting with endpoints of the FutureHouse service.
|
5
5
|
Author-email: FutureHouse technical staff <hello@futurehouse.org>
|
6
6
|
License: Apache License
|
@@ -220,6 +220,7 @@ Requires-Dist: google-resumable-media[aiohttp]
|
|
220
220
|
Requires-Dist: httpx
|
221
221
|
Requires-Dist: ldp>=0.22.0
|
222
222
|
Requires-Dist: litellm
|
223
|
+
Requires-Dist: openai<1.100.0,>=1
|
223
224
|
Requires-Dist: pydantic
|
224
225
|
Requires-Dist: python-dotenv
|
225
226
|
Requires-Dist: requests
|
@@ -249,6 +250,7 @@ Provides-Extra: monitoring
|
|
249
250
|
Requires-Dist: newrelic>=8.8.0; extra == "monitoring"
|
250
251
|
Provides-Extra: typing
|
251
252
|
Requires-Dist: types-PyYAML; extra == "typing"
|
253
|
+
Requires-Dist: types-aiofiles; extra == "typing"
|
252
254
|
Requires-Dist: types-requests; extra == "typing"
|
253
255
|
Requires-Dist: types-tqdm; extra == "typing"
|
254
256
|
Dynamic: license-file
|
@@ -0,0 +1,23 @@
|
|
1
|
+
futurehouse_client/__init__.py,sha256=PvFTkocA-hobsWoDEBEdrUgLIbuVbDs_0nvMdImJmHk,707
|
2
|
+
futurehouse_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
|
+
futurehouse_client/version.py,sha256=bmI9ViMEsJ1Rjce-6ExwiNh2B7sZKTyBkze4k8NsTrU,704
|
4
|
+
futurehouse_client/clients/__init__.py,sha256=-HXNj-XJ3LRO5XM6MZ709iPs29YpApss0Q2YYg1qMZw,280
|
5
|
+
futurehouse_client/clients/data_storage_methods.py,sha256=VESdX0J_frITd0QAjQ5UMPiqYvpToo0ooDCrS-U4uH8,99535
|
6
|
+
futurehouse_client/clients/job_client.py,sha256=b5gpzulZpxpv9R337r3UKItnMdtd6CGlI1sV3_VQJso,13985
|
7
|
+
futurehouse_client/clients/rest_client.py,sha256=RdyFEipvADDCHyY5XFy565IoL9-N1myJjF0G8x2wlK8,103183
|
8
|
+
futurehouse_client/models/__init__.py,sha256=0YlzKGymbY1g4cXxnUc0BUnthTkVBf12bCZlGUcMQqk,701
|
9
|
+
futurehouse_client/models/app.py,sha256=UUg17I3zk6cH_7mrdojHGYvQfm_SeDkuUxsPlRyIYz0,31895
|
10
|
+
futurehouse_client/models/client.py,sha256=n4HD0KStKLm6Ek9nL9ylP-bkK10yzAaD1uIDF83Qp_A,1828
|
11
|
+
futurehouse_client/models/data_storage_methods.py,sha256=GS1FbuMsUJSh7Evjt86vOri-95hfiLyASBS1xG7erNk,12793
|
12
|
+
futurehouse_client/models/rest.py,sha256=Fqw0_ypULzd7IV93PKooSG9W5_g7fGFsdW9jNVVImHA,4514
|
13
|
+
futurehouse_client/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
14
|
+
futurehouse_client/utils/auth.py,sha256=tgWELjKfg8eWme_qdcRmc8TjQN9DVZuHHaVXZNHLchk,2960
|
15
|
+
futurehouse_client/utils/general.py,sha256=PIkGLCSA3kUvc6mwR-prEB7YnMdKILOIm6cPowSZzzs,2532
|
16
|
+
futurehouse_client/utils/module_utils.py,sha256=aFyd-X-pDARXz9GWpn8SSViUVYdSbuy9vSkrzcVIaGI,4955
|
17
|
+
futurehouse_client/utils/monitoring.py,sha256=UjRlufe67kI3VxRHOd5fLtJmlCbVA2Wqwpd4uZhXkQM,8728
|
18
|
+
futurehouse_client/utils/world_model_tools.py,sha256=v2krZGrco0ur2a_pcRMtnQL05SxlIoBXuJ5R1JkQNws,2921
|
19
|
+
futurehouse_client-0.4.3.dist-info/licenses/LICENSE,sha256=oQ9ZHjUi-_6GfP3gs14FlPb0OlGwE1QCCKFGnJ4LD2I,11341
|
20
|
+
futurehouse_client-0.4.3.dist-info/METADATA,sha256=_1Vt3hXrwsJFgg6r5PyI8W3A4dCLj0Q8uAUtI5SW6L0,27063
|
21
|
+
futurehouse_client-0.4.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
22
|
+
futurehouse_client-0.4.3.dist-info/top_level.txt,sha256=TRuLUCt_qBnggdFHCX4O_BoCu1j2X43lKfIZC-ElwWY,19
|
23
|
+
futurehouse_client-0.4.3.dist-info/RECORD,,
|
@@ -1,23 +0,0 @@
|
|
1
|
-
futurehouse_client/__init__.py,sha256=PvFTkocA-hobsWoDEBEdrUgLIbuVbDs_0nvMdImJmHk,707
|
2
|
-
futurehouse_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
|
-
futurehouse_client/version.py,sha256=Rysz_q8l0T_Q0y2ULuxn71ApOfUlXV-jxY5p3loPl1E,721
|
4
|
-
futurehouse_client/clients/__init__.py,sha256=-HXNj-XJ3LRO5XM6MZ709iPs29YpApss0Q2YYg1qMZw,280
|
5
|
-
futurehouse_client/clients/data_storage_methods.py,sha256=wfvN3GdS0R8IT_b_vizNny6nADbE7ZTVIVAE6MrA5iE,71558
|
6
|
-
futurehouse_client/clients/job_client.py,sha256=b5gpzulZpxpv9R337r3UKItnMdtd6CGlI1sV3_VQJso,13985
|
7
|
-
futurehouse_client/clients/rest_client.py,sha256=zSQfvS63yzzCnbhvwmJk8Pkyn1442cQNthpIE7Ebd5g,100498
|
8
|
-
futurehouse_client/models/__init__.py,sha256=0YlzKGymbY1g4cXxnUc0BUnthTkVBf12bCZlGUcMQqk,701
|
9
|
-
futurehouse_client/models/app.py,sha256=UUg17I3zk6cH_7mrdojHGYvQfm_SeDkuUxsPlRyIYz0,31895
|
10
|
-
futurehouse_client/models/client.py,sha256=WFD1ddR0O7nD1ErqcJ-kt_miIW22KP6IDOSkaSdVZ8M,1716
|
11
|
-
futurehouse_client/models/data_storage_methods.py,sha256=49YXPToxi6AzujtEnIo08GbPh_uHrwaCsvFh9cARArE,12377
|
12
|
-
futurehouse_client/models/rest.py,sha256=x0qp8ut628fNBwB29ZJzSsWXdpSsIedsUS-Xq5ctL7c,3671
|
13
|
-
futurehouse_client/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
14
|
-
futurehouse_client/utils/auth.py,sha256=tgWELjKfg8eWme_qdcRmc8TjQN9DVZuHHaVXZNHLchk,2960
|
15
|
-
futurehouse_client/utils/general.py,sha256=Gxy8JJ2g6nO-gphf_kHAlkowb0eP_DqD4MSF58IXExE,1592
|
16
|
-
futurehouse_client/utils/module_utils.py,sha256=aFyd-X-pDARXz9GWpn8SSViUVYdSbuy9vSkrzcVIaGI,4955
|
17
|
-
futurehouse_client/utils/monitoring.py,sha256=UjRlufe67kI3VxRHOd5fLtJmlCbVA2Wqwpd4uZhXkQM,8728
|
18
|
-
futurehouse_client/utils/world_model_tools.py,sha256=Ctiy-EfK7EXrjmKO_nI6V5VhOJyHKWc0sKwa8Q0HAAo,2292
|
19
|
-
futurehouse_client-0.4.2.dev274.dist-info/licenses/LICENSE,sha256=oQ9ZHjUi-_6GfP3gs14FlPb0OlGwE1QCCKFGnJ4LD2I,11341
|
20
|
-
futurehouse_client-0.4.2.dev274.dist-info/METADATA,sha256=ZFyppyLGspK7Iz_VovFU5imp5ePeipB2fM-JS0X4x-g,26987
|
21
|
-
futurehouse_client-0.4.2.dev274.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
22
|
-
futurehouse_client-0.4.2.dev274.dist-info/top_level.txt,sha256=TRuLUCt_qBnggdFHCX4O_BoCu1j2X43lKfIZC-ElwWY,19
|
23
|
-
futurehouse_client-0.4.2.dev274.dist-info/RECORD,,
|
File without changes
|
{futurehouse_client-0.4.2.dev274.dist-info → futurehouse_client-0.4.3.dist-info}/licenses/LICENSE
RENAMED
File without changes
|
{futurehouse_client-0.4.2.dev274.dist-info → futurehouse_client-0.4.3.dist-info}/top_level.txt
RENAMED
File without changes
|