futurehouse-client 0.4.1.dev95__py3-none-any.whl → 0.4.2.dev11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -26,14 +26,21 @@ from httpx import (
26
26
  AsyncClient,
27
27
  Client,
28
28
  CloseError,
29
+ ConnectError,
30
+ ConnectTimeout,
29
31
  HTTPStatusError,
32
+ NetworkError,
33
+ ReadError,
34
+ ReadTimeout,
30
35
  RemoteProtocolError,
31
36
  codes,
32
37
  )
33
38
  from ldp.agent import AgentConfig
39
+ from requests.exceptions import RequestException, Timeout
34
40
  from tenacity import (
35
41
  before_sleep_log,
36
42
  retry,
43
+ retry_if_exception_type,
37
44
  stop_after_attempt,
38
45
  wait_exponential,
39
46
  )
@@ -41,7 +48,6 @@ from tqdm import tqdm as sync_tqdm
41
48
  from tqdm.asyncio import tqdm
42
49
 
43
50
  from futurehouse_client.clients import JobNames
44
- from futurehouse_client.clients.data_storage_methods import DataStorageMethods
45
51
  from futurehouse_client.models.app import (
46
52
  AuthType,
47
53
  JobDeploymentConfig,
@@ -62,10 +68,7 @@ from futurehouse_client.models.rest import (
62
68
  WorldModelResponse,
63
69
  )
64
70
  from futurehouse_client.utils.auth import RefreshingJWT
65
- from futurehouse_client.utils.general import (
66
- create_retry_if_connection_error,
67
- gather_with_concurrency,
68
- )
71
+ from futurehouse_client.utils.general import gather_with_concurrency
69
72
  from futurehouse_client.utils.module_utils import (
70
73
  OrganizationSelector,
71
74
  fetch_environment_function_docstring,
@@ -157,15 +160,28 @@ class FileUploadError(RestClientError):
157
160
  """Raised when there's an error uploading a file."""
158
161
 
159
162
 
160
- retry_if_connection_error = create_retry_if_connection_error(FileUploadError)
163
+ retry_if_connection_error = retry_if_exception_type((
164
+ # From requests
165
+ Timeout,
166
+ ConnectionError,
167
+ RequestException,
168
+ # From httpx
169
+ ConnectError,
170
+ ConnectTimeout,
171
+ ReadTimeout,
172
+ ReadError,
173
+ NetworkError,
174
+ RemoteProtocolError,
175
+ CloseError,
176
+ FileUploadError,
177
+ ))
161
178
 
162
179
  DEFAULT_AGENT_TIMEOUT: int = 2400 # seconds
163
180
 
164
181
 
165
182
  # pylint: disable=too-many-public-methods
166
- class RestClient(DataStorageMethods):
167
- REQUEST_TIMEOUT: ClassVar[float] = 30.0 # sec - for general API calls
168
- FILE_UPLOAD_TIMEOUT: ClassVar[float] = 600.0 # 10 minutes - for file uploads
183
+ class RestClient:
184
+ REQUEST_TIMEOUT: ClassVar[float] = 30.0 # sec
169
185
  MAX_RETRY_ATTEMPTS: ClassVar[int] = 3
170
186
  RETRY_MULTIPLIER: ClassVar[int] = 1
171
187
  MAX_RETRY_WAIT: ClassVar[int] = 10
@@ -223,35 +239,11 @@ class RestClient(DataStorageMethods):
223
239
  """Authenticated HTTP client for multipart uploads."""
224
240
  return cast(Client, self.get_client(None, authenticated=True))
225
241
 
226
- @property
227
- def file_upload_client(self) -> Client:
228
- """Authenticated HTTP client with extended timeout for file uploads."""
229
- return cast(
230
- Client,
231
- self.get_client(
232
- "application/json", authenticated=True, timeout=self.FILE_UPLOAD_TIMEOUT
233
- ),
234
- )
235
-
236
- @property
237
- def async_file_upload_client(self) -> AsyncClient:
238
- """Authenticated async HTTP client with extended timeout for file uploads."""
239
- return cast(
240
- AsyncClient,
241
- self.get_client(
242
- "application/json",
243
- authenticated=True,
244
- async_client=True,
245
- timeout=self.FILE_UPLOAD_TIMEOUT,
246
- ),
247
- )
248
-
249
242
  def get_client(
250
243
  self,
251
244
  content_type: str | None = "application/json",
252
245
  authenticated: bool = True,
253
246
  async_client: bool = False,
254
- timeout: float | None = None,
255
247
  ) -> Client | AsyncClient:
256
248
  """Return a cached HTTP client or create one if needed.
257
249
 
@@ -259,13 +251,12 @@ class RestClient(DataStorageMethods):
259
251
  content_type: The desired content type header. Use None for multipart uploads.
260
252
  authenticated: Whether the client should include authentication.
261
253
  async_client: Whether to use an async client.
262
- timeout: Custom timeout in seconds. Uses REQUEST_TIMEOUT if not provided.
263
254
 
264
255
  Returns:
265
256
  An HTTP client configured with the appropriate headers.
266
257
  """
267
- client_timeout = timeout or self.REQUEST_TIMEOUT
268
- key = f"{content_type or 'multipart'}_{authenticated}_{async_client}_{client_timeout}"
258
+ # Create a composite key based on content type and auth flag
259
+ key = f"{content_type or 'multipart'}_{authenticated}_{async_client}"
269
260
 
270
261
  if key not in self._clients:
271
262
  headers = copy.deepcopy(self.headers)
@@ -291,14 +282,14 @@ class RestClient(DataStorageMethods):
291
282
  AsyncClient(
292
283
  base_url=self.base_url,
293
284
  headers=headers,
294
- timeout=client_timeout,
285
+ timeout=self.REQUEST_TIMEOUT,
295
286
  auth=auth,
296
287
  )
297
288
  if async_client
298
289
  else Client(
299
290
  base_url=self.base_url,
300
291
  headers=headers,
301
- timeout=client_timeout,
292
+ timeout=self.REQUEST_TIMEOUT,
302
293
  auth=auth,
303
294
  )
304
295
  )
@@ -1630,16 +1621,17 @@ class RestClient(DataStorageMethods):
1630
1621
  A list of world model names.
1631
1622
  """
1632
1623
  try:
1624
+ # Use the consolidated endpoint with search parameters
1633
1625
  response = self.client.get(
1634
- "/v0.1/world-models/search/",
1626
+ "/v0.1/world-models",
1635
1627
  params={
1636
- "query": query,
1628
+ "q": query,
1637
1629
  "size": size,
1638
- "total_search_size": total_search_size,
1639
1630
  "search_all_versions": search_all_versions,
1640
1631
  },
1641
1632
  )
1642
1633
  response.raise_for_status()
1634
+ # The new endpoint returns a list of models directly
1643
1635
  return response.json()
1644
1636
  except HTTPStatusError as e:
1645
1637
  raise WorldModelFetchError(
@@ -1754,22 +1746,19 @@ class RestClient(DataStorageMethods):
1754
1746
  wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
1755
1747
  retry=retry_if_connection_error,
1756
1748
  )
1757
- def get_project_by_name(self, name: str) -> UUID:
1749
+ def get_project_by_name(self, name: str, limit: int = 2) -> UUID | list[UUID]:
1758
1750
  """Get a project UUID by name.
1759
1751
 
1760
1752
  Args:
1761
1753
  name: The name of the project to find
1754
+ limit: Maximum number of projects to return
1762
1755
 
1763
1756
  Returns:
1764
- UUID of the project as a string
1765
-
1766
- Raises:
1767
- ProjectError: If no project is found, multiple projects are found, or there's an error
1757
+ UUID of the project as a string or a list of UUIDs if multiple projects are found
1768
1758
  """
1769
1759
  try:
1770
- # Get projects filtered by name (backend now filters by name and owner)
1771
1760
  response = self.client.get(
1772
- "/v0.1/projects", params={"limit": 2, "name": name}
1761
+ "/v0.1/projects", params={"limit": limit, "name": name}
1773
1762
  )
1774
1763
  response.raise_for_status()
1775
1764
  projects = response.json()
@@ -1782,32 +1771,33 @@ class RestClient(DataStorageMethods):
1782
1771
  if len(projects) == 0:
1783
1772
  raise ProjectError(f"No project found with name '{name}'")
1784
1773
  if len(projects) > 1:
1785
- raise ProjectError(
1774
+ logger.warning(
1786
1775
  f"Multiple projects found with name '{name}'. Found {len(projects)} projects."
1787
1776
  )
1788
1777
 
1789
- return UUID(projects[0]["id"])
1778
+ ids = [UUID(project["id"]) for project in projects]
1779
+ return ids[0] if len(ids) == 1 else ids
1790
1780
 
1791
1781
  @retry(
1792
1782
  stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
1793
1783
  wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
1794
1784
  retry=retry_if_connection_error,
1795
1785
  )
1796
- async def aget_project_by_name(self, name: str) -> UUID:
1786
+ async def aget_project_by_name(
1787
+ self, name: str, limit: int = 2
1788
+ ) -> UUID | list[UUID]:
1797
1789
  """Asynchronously get a project UUID by name.
1798
1790
 
1799
1791
  Args:
1800
1792
  name: The name of the project to find
1793
+ limit: Maximum number of projects to return
1801
1794
 
1802
1795
  Returns:
1803
- UUID of the project as a string
1804
-
1805
- Raises:
1806
- ProjectError: If no project is found, multiple projects are found, or there's an error
1796
+ UUID of the project as a string or a list of UUIDs if multiple projects are found
1807
1797
  """
1808
1798
  try:
1809
1799
  response = await self.async_client.get(
1810
- "/v0.1/projects", params={"limit": 2, "name": name}
1800
+ "/v0.1/projects", params={"limit": limit, "name": name}
1811
1801
  )
1812
1802
  response.raise_for_status()
1813
1803
  projects = response.json()
@@ -1816,11 +1806,12 @@ class RestClient(DataStorageMethods):
1816
1806
  if len(projects) == 0:
1817
1807
  raise ProjectError(f"No project found with name '{name}'")
1818
1808
  if len(projects) > 1:
1819
- raise ProjectError(
1809
+ logger.warning(
1820
1810
  f"Multiple projects found with name '{name}'. Found {len(projects)} projects."
1821
1811
  )
1822
1812
 
1823
- return UUID(projects[0]["id"])
1813
+ ids = [UUID(project["id"]) for project in projects]
1814
+ return ids[0] if len(ids) == 1 else ids
1824
1815
 
1825
1816
  @retry(
1826
1817
  stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
@@ -27,10 +27,7 @@ if TYPE_CHECKING:
27
27
  MAX_CROW_JOB_RUN_TIMEOUT = 60 * 60 * 24 # 24 hours in sec
28
28
  MIN_CROW_JOB_RUN_TIMEOUT = 0 # sec
29
29
 
30
-
31
- class PythonVersion(StrEnum):
32
- V3_11 = "3.11"
33
- V3_12 = "3.12"
30
+ DEFAULT_PYTHON_VERSION_USED_FOR_JOB_BUILDS = "3.13"
34
31
 
35
32
 
36
33
  class AuthType(StrEnum):
@@ -420,9 +417,9 @@ class JobDeploymentConfig(BaseModel):
420
417
  description="The configuration for the cloud run container.",
421
418
  )
422
419
 
423
- python_version: PythonVersion = Field(
424
- default=PythonVersion.V3_12,
425
- description="The python version your docker image should build with.",
420
+ python_version: str = Field(
421
+ default=DEFAULT_PYTHON_VERSION_USED_FOR_JOB_BUILDS,
422
+ description="The python version your docker image should build with (e.g., '3.11', '3.12', '3.13').",
426
423
  )
427
424
 
428
425
  agent: Agent | AgentConfig | str = Field(
@@ -27,17 +27,13 @@ class InitialState(BaseState):
27
27
 
28
28
  class ASVState(BaseState, Generic[T]):
29
29
  action: OpResult[T] = Field()
30
- next_state: Any = Field()
30
+ next_agent_state: Any = Field()
31
31
  value: float = Field()
32
32
 
33
33
  @field_serializer("action")
34
34
  def serialize_action(self, action: OpResult[T]) -> dict:
35
35
  return action.to_dict()
36
36
 
37
- @field_serializer("next_state")
38
- def serialize_next_state(self, state: Any) -> str:
39
- return str(state)
40
-
41
37
 
42
38
  class EnvResetState(BaseState):
43
39
  observations: list[Message] = Field()
@@ -2,45 +2,11 @@ import asyncio
2
2
  from collections.abc import Awaitable, Iterable
3
3
  from typing import TypeVar
4
4
 
5
- from httpx import (
6
- CloseError,
7
- ConnectError,
8
- ConnectTimeout,
9
- NetworkError,
10
- ReadError,
11
- ReadTimeout,
12
- RemoteProtocolError,
13
- )
14
- from requests.exceptions import RequestException, Timeout
15
- from tenacity import retry_if_exception_type
16
5
  from tqdm.asyncio import tqdm
17
6
 
18
7
  T = TypeVar("T")
19
8
 
20
9
 
21
- _BASE_CONNECTION_ERRORS = (
22
- # From requests
23
- Timeout,
24
- ConnectionError,
25
- RequestException,
26
- # From httpx
27
- ConnectError,
28
- ConnectTimeout,
29
- ReadTimeout,
30
- ReadError,
31
- NetworkError,
32
- RemoteProtocolError,
33
- CloseError,
34
- )
35
-
36
- retry_if_connection_error = retry_if_exception_type(_BASE_CONNECTION_ERRORS)
37
-
38
-
39
- def create_retry_if_connection_error(*additional_exceptions):
40
- """Create a retry condition with base connection errors plus additional exceptions."""
41
- return retry_if_exception_type(_BASE_CONNECTION_ERRORS + additional_exceptions)
42
-
43
-
44
10
  async def gather_with_concurrency(
45
11
  n: int | asyncio.Semaphore, coros: Iterable[Awaitable[T]], progress: bool = False
46
12
  ) -> list[T]:
@@ -46,16 +46,17 @@ class WorldModelTools:
46
46
  return WorldModelTools._get_client().create_world_model(world_model)
47
47
 
48
48
  @staticmethod
49
- def search_world_models(query: str) -> list[str]:
49
+ def search_world_models(query: str, size: int = 10) -> list[str]:
50
50
  """Search for world models using a text query.
51
51
 
52
52
  Args:
53
53
  query: The search query string to match against world model content.
54
+ size: The number of results to return (default: 10).
54
55
 
55
56
  Returns:
56
57
  list[str]: A list of world model IDs that match the search query.
57
58
  """
58
- return WorldModelTools._get_client().search_world_models(query, size=1)
59
+ return WorldModelTools._get_client().search_world_models(query, size=size)
59
60
 
60
61
 
61
62
  create_world_model_tool = Tool.from_function(WorldModelTools.create_world_model)
@@ -1,7 +1,14 @@
1
1
  # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
3
 
4
- __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
5
12
 
6
13
  TYPE_CHECKING = False
7
14
  if TYPE_CHECKING:
@@ -9,13 +16,19 @@ if TYPE_CHECKING:
9
16
  from typing import Union
10
17
 
11
18
  VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
12
20
  else:
13
21
  VERSION_TUPLE = object
22
+ COMMIT_ID = object
14
23
 
15
24
  version: str
16
25
  __version__: str
17
26
  __version_tuple__: VERSION_TUPLE
18
27
  version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
19
30
 
20
- __version__ = version = '0.4.1.dev95'
21
- __version_tuple__ = version_tuple = (0, 4, 1, 'dev95')
31
+ __version__ = version = '0.4.2.dev11'
32
+ __version_tuple__ = version_tuple = (0, 4, 2, 'dev11')
33
+
34
+ __commit_id__ = commit_id = None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: futurehouse-client
3
- Version: 0.4.1.dev95
3
+ Version: 0.4.2.dev11
4
4
  Summary: A client for interacting with endpoints of the FutureHouse service.
5
5
  Author-email: FutureHouse technical staff <hello@futurehouse.org>
6
6
  License: Apache License
@@ -213,22 +213,18 @@ Classifier: Programming Language :: Python
213
213
  Requires-Python: <3.14,>=3.11
214
214
  Description-Content-Type: text/markdown
215
215
  License-File: LICENSE
216
- Requires-Dist: aiofiles
217
216
  Requires-Dist: cloudpickle
218
217
  Requires-Dist: fhaviary
219
- Requires-Dist: google-resumable-media[aiohttp]
220
218
  Requires-Dist: httpx
221
219
  Requires-Dist: ldp>=0.22.0
222
220
  Requires-Dist: litellm
223
221
  Requires-Dist: pydantic
224
222
  Requires-Dist: python-dotenv
225
- Requires-Dist: requests
226
223
  Requires-Dist: tenacity
227
224
  Requires-Dist: tqdm>=4.62
228
225
  Provides-Extra: dev
229
226
  Requires-Dist: black; extra == "dev"
230
227
  Requires-Dist: futurehouse-client[monitoring,typing]; extra == "dev"
231
- Requires-Dist: ipykernel; extra == "dev"
232
228
  Requires-Dist: jupyter; extra == "dev"
233
229
  Requires-Dist: jupyterlab; extra == "dev"
234
230
  Requires-Dist: mypy; extra == "dev"
@@ -248,7 +244,6 @@ Requires-Dist: setuptools_scm; extra == "dev"
248
244
  Provides-Extra: monitoring
249
245
  Requires-Dist: newrelic>=8.8.0; extra == "monitoring"
250
246
  Provides-Extra: typing
251
- Requires-Dist: types-PyYAML; extra == "typing"
252
247
  Requires-Dist: types-requests; extra == "typing"
253
248
  Requires-Dist: types-tqdm; extra == "typing"
254
249
  Dynamic: license-file
@@ -0,0 +1,21 @@
1
+ futurehouse_client/__init__.py,sha256=PvFTkocA-hobsWoDEBEdrUgLIbuVbDs_0nvMdImJmHk,707
2
+ futurehouse_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ futurehouse_client/version.py,sha256=c_JzNS0ouORBU9NbAiOBOJBqK2VWGvvGLFYyQtq8WEs,719
4
+ futurehouse_client/clients/__init__.py,sha256=-HXNj-XJ3LRO5XM6MZ709iPs29YpApss0Q2YYg1qMZw,280
5
+ futurehouse_client/clients/job_client.py,sha256=D51_qTxya6g5Wfg_ZfJdP031TV_YDJeXkGMiYAJ1qRc,11962
6
+ futurehouse_client/clients/rest_client.py,sha256=OsdskJ1OT8SdDJQHG4bbt2_sVZMpHMtbG_x78cdV8ac,99790
7
+ futurehouse_client/models/__init__.py,sha256=kQ4R7VEuRxO0IQEW_sk9CndBL7zzl8rUKI24ddyYLM0,647
8
+ futurehouse_client/models/app.py,sha256=UUg17I3zk6cH_7mrdojHGYvQfm_SeDkuUxsPlRyIYz0,31895
9
+ futurehouse_client/models/client.py,sha256=WFD1ddR0O7nD1ErqcJ-kt_miIW22KP6IDOSkaSdVZ8M,1716
10
+ futurehouse_client/models/rest.py,sha256=ybelLsyTsKYud7DYUCF0sFF6u81bl8WmS_wWAnbX-0M,3382
11
+ futurehouse_client/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
+ futurehouse_client/utils/auth.py,sha256=tgWELjKfg8eWme_qdcRmc8TjQN9DVZuHHaVXZNHLchk,2960
13
+ futurehouse_client/utils/general.py,sha256=A_rtTiYW30ELGEZlWCIArO7q1nEmqi8hUlmBRYkMQ_c,767
14
+ futurehouse_client/utils/module_utils.py,sha256=aFyd-X-pDARXz9GWpn8SSViUVYdSbuy9vSkrzcVIaGI,4955
15
+ futurehouse_client/utils/monitoring.py,sha256=UjRlufe67kI3VxRHOd5fLtJmlCbVA2Wqwpd4uZhXkQM,8728
16
+ futurehouse_client/utils/world_model_tools.py,sha256=Ctiy-EfK7EXrjmKO_nI6V5VhOJyHKWc0sKwa8Q0HAAo,2292
17
+ futurehouse_client-0.4.2.dev11.dist-info/licenses/LICENSE,sha256=oQ9ZHjUi-_6GfP3gs14FlPb0OlGwE1QCCKFGnJ4LD2I,11341
18
+ futurehouse_client-0.4.2.dev11.dist-info/METADATA,sha256=xb_dj1oOZU-vjhdpMUnRrlLzbqfkaJCA-ZNa4PUE2LQ,26803
19
+ futurehouse_client-0.4.2.dev11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
20
+ futurehouse_client-0.4.2.dev11.dist-info/top_level.txt,sha256=TRuLUCt_qBnggdFHCX4O_BoCu1j2X43lKfIZC-ElwWY,19
21
+ futurehouse_client-0.4.2.dev11.dist-info/RECORD,,