futurehouse-client 0.4.2.dev11__py3-none-any.whl → 0.4.2.dev274__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -19,6 +19,7 @@ from futurehouse_client.models.rest import (
19
19
  FinalEnvironmentRequest,
20
20
  StoreAgentStatePostRequest,
21
21
  StoreEnvironmentFrameRequest,
22
+ TrajectoryPatchRequest,
22
23
  )
23
24
  from futurehouse_client.utils.monitoring import (
24
25
  external_trace,
@@ -318,3 +319,52 @@ class JobClient:
318
319
  f"Unexpected error storing environment frame for state {state_identifier}",
319
320
  )
320
321
  raise
322
+
323
+ async def patch_trajectory(
324
+ self,
325
+ public: bool | None = None,
326
+ shared_with: list[int] | None = None,
327
+ notification_enabled: bool | None = None,
328
+ notification_type: str | None = None,
329
+ min_estimated_time: float | None = None,
330
+ max_estimated_time: float | None = None,
331
+ ) -> None:
332
+ data = TrajectoryPatchRequest(
333
+ public=public,
334
+ shared_with=shared_with,
335
+ notification_enabled=notification_enabled,
336
+ notification_type=notification_type,
337
+ min_estimated_time=min_estimated_time,
338
+ max_estimated_time=max_estimated_time,
339
+ )
340
+ try:
341
+ async with httpx.AsyncClient(timeout=self.REQUEST_TIMEOUT) as client:
342
+ url = f"{self.base_uri}/v0.1/trajectories/{self.trajectory_id}"
343
+ headers = {
344
+ "Authorization": f"Bearer {self.oauth_jwt}",
345
+ "x-trajectory-id": self.trajectory_id,
346
+ }
347
+ response = await client.patch(
348
+ url=url,
349
+ json=data.model_dump(mode="json", exclude_none=True),
350
+ headers=headers,
351
+ )
352
+ response.raise_for_status()
353
+ logger.debug("Trajectory updated successfully")
354
+ except httpx.HTTPStatusError as e:
355
+ logger.exception(
356
+ "HTTP error while patching trajectory. "
357
+ f"Status code: {e.response.status_code}, "
358
+ f"Response: {e.response.text}",
359
+ )
360
+ except httpx.TimeoutException:
361
+ logger.exception(
362
+ f"Timeout while patching trajectory after {self.REQUEST_TIMEOUT}s",
363
+ )
364
+ raise
365
+ except httpx.NetworkError:
366
+ logger.exception("Network error while patching trajectory")
367
+ raise
368
+ except Exception:
369
+ logger.exception("Unexpected error while patching trajectory")
370
+ raise
@@ -26,21 +26,14 @@ from httpx import (
26
26
  AsyncClient,
27
27
  Client,
28
28
  CloseError,
29
- ConnectError,
30
- ConnectTimeout,
31
29
  HTTPStatusError,
32
- NetworkError,
33
- ReadError,
34
- ReadTimeout,
35
30
  RemoteProtocolError,
36
31
  codes,
37
32
  )
38
33
  from ldp.agent import AgentConfig
39
- from requests.exceptions import RequestException, Timeout
40
34
  from tenacity import (
41
35
  before_sleep_log,
42
36
  retry,
43
- retry_if_exception_type,
44
37
  stop_after_attempt,
45
38
  wait_exponential,
46
39
  )
@@ -48,6 +41,7 @@ from tqdm import tqdm as sync_tqdm
48
41
  from tqdm.asyncio import tqdm
49
42
 
50
43
  from futurehouse_client.clients import JobNames
44
+ from futurehouse_client.clients.data_storage_methods import DataStorageMethods
51
45
  from futurehouse_client.models.app import (
52
46
  AuthType,
53
47
  JobDeploymentConfig,
@@ -68,7 +62,10 @@ from futurehouse_client.models.rest import (
68
62
  WorldModelResponse,
69
63
  )
70
64
  from futurehouse_client.utils.auth import RefreshingJWT
71
- from futurehouse_client.utils.general import gather_with_concurrency
65
+ from futurehouse_client.utils.general import (
66
+ create_retry_if_connection_error,
67
+ gather_with_concurrency,
68
+ )
72
69
  from futurehouse_client.utils.module_utils import (
73
70
  OrganizationSelector,
74
71
  fetch_environment_function_docstring,
@@ -160,28 +157,15 @@ class FileUploadError(RestClientError):
160
157
  """Raised when there's an error uploading a file."""
161
158
 
162
159
 
163
- retry_if_connection_error = retry_if_exception_type((
164
- # From requests
165
- Timeout,
166
- ConnectionError,
167
- RequestException,
168
- # From httpx
169
- ConnectError,
170
- ConnectTimeout,
171
- ReadTimeout,
172
- ReadError,
173
- NetworkError,
174
- RemoteProtocolError,
175
- CloseError,
176
- FileUploadError,
177
- ))
160
+ retry_if_connection_error = create_retry_if_connection_error(FileUploadError)
178
161
 
179
162
  DEFAULT_AGENT_TIMEOUT: int = 2400 # seconds
180
163
 
181
164
 
182
165
  # pylint: disable=too-many-public-methods
183
- class RestClient:
184
- REQUEST_TIMEOUT: ClassVar[float] = 30.0 # sec
166
+ class RestClient(DataStorageMethods):
167
+ REQUEST_TIMEOUT: ClassVar[float] = 30.0 # sec - for general API calls
168
+ FILE_UPLOAD_TIMEOUT: ClassVar[float] = 600.0 # 10 minutes - for file uploads
185
169
  MAX_RETRY_ATTEMPTS: ClassVar[int] = 3
186
170
  RETRY_MULTIPLIER: ClassVar[int] = 1
187
171
  MAX_RETRY_WAIT: ClassVar[int] = 10
@@ -239,11 +223,35 @@ class RestClient:
239
223
  """Authenticated HTTP client for multipart uploads."""
240
224
  return cast(Client, self.get_client(None, authenticated=True))
241
225
 
226
+ @property
227
+ def file_upload_client(self) -> Client:
228
+ """Authenticated HTTP client with extended timeout for file uploads."""
229
+ return cast(
230
+ Client,
231
+ self.get_client(
232
+ "application/json", authenticated=True, timeout=self.FILE_UPLOAD_TIMEOUT
233
+ ),
234
+ )
235
+
236
+ @property
237
+ def async_file_upload_client(self) -> AsyncClient:
238
+ """Authenticated async HTTP client with extended timeout for file uploads."""
239
+ return cast(
240
+ AsyncClient,
241
+ self.get_client(
242
+ "application/json",
243
+ authenticated=True,
244
+ async_client=True,
245
+ timeout=self.FILE_UPLOAD_TIMEOUT,
246
+ ),
247
+ )
248
+
242
249
  def get_client(
243
250
  self,
244
251
  content_type: str | None = "application/json",
245
252
  authenticated: bool = True,
246
253
  async_client: bool = False,
254
+ timeout: float | None = None,
247
255
  ) -> Client | AsyncClient:
248
256
  """Return a cached HTTP client or create one if needed.
249
257
 
@@ -251,12 +259,13 @@ class RestClient:
251
259
  content_type: The desired content type header. Use None for multipart uploads.
252
260
  authenticated: Whether the client should include authentication.
253
261
  async_client: Whether to use an async client.
262
+ timeout: Custom timeout in seconds. Uses REQUEST_TIMEOUT if not provided.
254
263
 
255
264
  Returns:
256
265
  An HTTP client configured with the appropriate headers.
257
266
  """
258
- # Create a composite key based on content type and auth flag
259
- key = f"{content_type or 'multipart'}_{authenticated}_{async_client}"
267
+ client_timeout = timeout or self.REQUEST_TIMEOUT
268
+ key = f"{content_type or 'multipart'}_{authenticated}_{async_client}_{client_timeout}"
260
269
 
261
270
  if key not in self._clients:
262
271
  headers = copy.deepcopy(self.headers)
@@ -282,14 +291,14 @@ class RestClient:
282
291
  AsyncClient(
283
292
  base_url=self.base_url,
284
293
  headers=headers,
285
- timeout=self.REQUEST_TIMEOUT,
294
+ timeout=client_timeout,
286
295
  auth=auth,
287
296
  )
288
297
  if async_client
289
298
  else Client(
290
299
  base_url=self.base_url,
291
300
  headers=headers,
292
- timeout=self.REQUEST_TIMEOUT,
301
+ timeout=client_timeout,
293
302
  auth=auth,
294
303
  )
295
304
  )
@@ -13,7 +13,7 @@ from .app import (
13
13
  TaskResponse,
14
14
  TaskResponseVerbose,
15
15
  )
16
- from .rest import WorldModel, WorldModelResponse
16
+ from .rest import TrajectoryPatchRequest, WorldModel, WorldModelResponse
17
17
 
18
18
  __all__ = [
19
19
  "AuthType",
@@ -29,6 +29,7 @@ __all__ = [
29
29
  "TaskRequest",
30
30
  "TaskResponse",
31
31
  "TaskResponseVerbose",
32
+ "TrajectoryPatchRequest",
32
33
  "WorldModel",
33
34
  "WorldModelResponse",
34
35
  ]
@@ -0,0 +1,341 @@
1
+ import contextlib
2
+ from datetime import datetime
3
+ from enum import StrEnum, auto
4
+ from os import PathLike
5
+ from pathlib import Path
6
+ from typing import Any
7
+ from uuid import UUID
8
+
9
+ from pydantic import BaseModel, Field, JsonValue
10
+
11
+
12
+ class DataStorageEntry(BaseModel):
13
+ """Model representing a data storage entry."""
14
+
15
+ id: UUID = Field(description="Unique identifier for the data storage entry")
16
+ name: str = Field(description="Name of the data storage entry")
17
+ description: str | None = Field(
18
+ default=None, description="Description of the data storage entry"
19
+ )
20
+ content: str | None = Field(
21
+ default=None, description="Content of the data storage entry"
22
+ )
23
+ embedding: list[float] | None = Field(
24
+ default=None, description="Embedding vector for the content"
25
+ )
26
+ is_collection: bool = Field(
27
+ default=False, description="Whether this entry is a collection"
28
+ )
29
+ tags: list[str] | None = Field(
30
+ default=None,
31
+ description="List of tags associated with the data storage entry",
32
+ )
33
+ parent_id: UUID | None = Field(
34
+ default=None,
35
+ description="ID of the parent entry if this is a sub-entry for hierarchical storage",
36
+ )
37
+ project_id: UUID | None = Field(
38
+ default=None,
39
+ description="ID of the project this data storage entry belongs to",
40
+ )
41
+ dataset_id: UUID | None = Field(
42
+ default=None,
43
+ description="ID of the dataset this entry belongs to",
44
+ )
45
+ path: str | None = Field(
46
+ default=None,
47
+ description="Path in the storage system where this entry is located, if a file.",
48
+ )
49
+ bigquery_schema: Any | None = Field(
50
+ default=None, description="Target BigQuery schema for the data storage entry"
51
+ )
52
+ user_id: str = Field(description="ID of the user who created this entry")
53
+ created_at: datetime = Field(description="Timestamp when the entry was created")
54
+ modified_at: datetime = Field(
55
+ description="Timestamp when the entry was last updated"
56
+ )
57
+
58
+
59
+ class DataStorageType(StrEnum):
60
+ BIGQUERY = auto()
61
+ GCS = auto()
62
+ PG_TABLE = auto()
63
+ RAW_CONTENT = auto()
64
+ ELASTIC_SEARCH = auto()
65
+
66
+
67
+ class DataContentType(StrEnum):
68
+ BQ_DATASET = auto()
69
+ BQ_TABLE = auto()
70
+ TEXT = auto()
71
+ TEXT_W_EMBEDDINGS = auto()
72
+ DIRECTORY = auto()
73
+ FILE = auto()
74
+ INDEX = auto()
75
+ INDEX_W_EMBEDDINGS = auto()
76
+
77
+
78
+ class DataStorageLocationPayload(BaseModel):
79
+ storage_type: DataStorageType
80
+ content_type: DataContentType
81
+ content_schema: JsonValue | None = None
82
+ metadata: JsonValue | None = None
83
+ location: str | None = None
84
+
85
+
86
+ class DataStorageLocationDetails(BaseModel):
87
+ """Model representing the location details within a DataStorageLocations object."""
88
+
89
+ storage_type: str = Field(description="Type of storage (e.g., 'gcs', 'pg_table')")
90
+ content_type: str = Field(description="Type of content stored")
91
+ content_schema: JsonValue | None = Field(default=None, description="Content schema")
92
+ metadata: JsonValue | None = Field(default=None, description="Location metadata")
93
+ location: str | None = Field(
94
+ default=None, description="Location path or identifier"
95
+ )
96
+
97
+
98
+ class DataStorageLocations(BaseModel):
99
+ """Model representing storage locations for a data storage entry."""
100
+
101
+ id: UUID = Field(description="Unique identifier for the storage locations")
102
+ data_storage_id: UUID = Field(description="ID of the associated data storage entry")
103
+ storage_config: DataStorageLocationDetails = Field(
104
+ description="Storage configuration details"
105
+ )
106
+ created_at: datetime = Field(description="Timestamp when the location was created")
107
+
108
+
109
+ class DataStorageResponse(BaseModel):
110
+ """Response model for data storage operations."""
111
+
112
+ data_storage: DataStorageEntry = Field(description="The created data storage entry")
113
+ storage_location: DataStorageLocations = Field(
114
+ description="Storage location for this data entry"
115
+ )
116
+ signed_url: str | None = Field(
117
+ default=None,
118
+ description="Signed URL for uploading/downloading the file to/from GCS",
119
+ )
120
+
121
+
122
+ class DataStorageRequestPayload(BaseModel):
123
+ """Payload for creating a data storage entry."""
124
+
125
+ name: str = Field(description="Name of the data storage entry")
126
+ description: str | None = Field(
127
+ default=None, description="Description of the data storage entry"
128
+ )
129
+ content: str | None = Field(
130
+ default=None, description="Content of the data storage entry"
131
+ )
132
+ is_collection: bool = Field(
133
+ default=False, description="Whether this entry is a collection"
134
+ )
135
+ parent_id: UUID | None = Field(
136
+ default=None, description="ID of the parent entry for hierarchical storage"
137
+ )
138
+ project_id: UUID | None = Field(
139
+ default=None,
140
+ description="ID of the project this data storage entry belongs to",
141
+ )
142
+ dataset_id: UUID | None = Field(
143
+ default=None,
144
+ description="ID of existing dataset to add entry to, or None to create new dataset",
145
+ )
146
+ path: PathLike | str | None = Field(
147
+ default=None,
148
+ description="Path to store in the GCS bucket.",
149
+ )
150
+ existing_location: DataStorageLocationPayload | None = Field(
151
+ default=None, description="Target storage metadata"
152
+ )
153
+
154
+
155
+ class ManifestEntry(BaseModel):
156
+ """Model representing a single entry in a manifest file."""
157
+
158
+ description: str | None = Field(
159
+ default=None, description="Description of the file or directory"
160
+ )
161
+ metadata: dict[str, Any] | None = Field(
162
+ default=None, description="Additional metadata for the entry"
163
+ )
164
+
165
+
166
+ class DirectoryManifest(BaseModel):
167
+ """Model representing the structure of a manifest file."""
168
+
169
+ entries: dict[str, "ManifestEntry | DirectoryManifest"] = Field(
170
+ default_factory=dict,
171
+ description="Map of file/directory names to their manifest entries",
172
+ )
173
+
174
+ def get_entry_description(self, name: str) -> str | None:
175
+ """Get description for a specific entry."""
176
+ entry = self.entries.get(name)
177
+ if isinstance(entry, ManifestEntry):
178
+ return entry.description
179
+ if isinstance(entry, DirectoryManifest):
180
+ # For nested directories, could derive description from contents
181
+ return None
182
+ return None
183
+
184
+ def get_entry_metadata(self, name: str) -> dict[str, Any] | None:
185
+ """Get metadata for a specific entry."""
186
+ entry = self.entries.get(name)
187
+ if isinstance(entry, ManifestEntry):
188
+ return entry.metadata
189
+ return None
190
+
191
+ @classmethod
192
+ def from_dict(cls, data: dict[str, Any]) -> "DirectoryManifest":
193
+ """Create DirectoryManifest from a dictionary (loaded from JSON/YAML)."""
194
+ entries: dict[str, ManifestEntry | DirectoryManifest] = {}
195
+ for name, value in data.items():
196
+ if isinstance(value, dict):
197
+ if "description" in value or "metadata" in value:
198
+ # This looks like a ManifestEntry
199
+ entries[name] = ManifestEntry(**value)
200
+ else:
201
+ # This looks like a nested directory
202
+ entries[name] = cls.from_dict(value)
203
+ else:
204
+ # Simple string description
205
+ entries[name] = ManifestEntry(description=str(value))
206
+
207
+ return cls(entries=entries)
208
+
209
+ def to_dict(self) -> dict[str, Any]:
210
+ """Convert back to dictionary format."""
211
+ result = {}
212
+ for name, entry in self.entries.items():
213
+ if isinstance(entry, ManifestEntry):
214
+ if entry.description is not None or entry.metadata is not None:
215
+ entry_dict = {}
216
+ if entry.description is not None:
217
+ entry_dict["description"] = entry.description
218
+ if entry.metadata is not None:
219
+ entry_dict.update(entry.metadata)
220
+ result[name] = entry_dict
221
+ elif isinstance(entry, DirectoryManifest):
222
+ result[name] = entry.to_dict()
223
+ return result
224
+
225
+
226
+ class FileMetadata(BaseModel):
227
+ """Model representing metadata for a file being processed."""
228
+
229
+ path: Path = Field(description="Path to the file")
230
+ name: str = Field(description="Name of the file")
231
+ size: int | None = Field(default=None, description="Size of the file in bytes")
232
+ description: str | None = Field(
233
+ default=None, description="Description from manifest or generated"
234
+ )
235
+ is_directory: bool = Field(default=False, description="Whether this is a directory")
236
+ parent_id: UUID | None = Field(
237
+ default=None, description="Parent directory ID in the storage system"
238
+ )
239
+ dataset_id: UUID | None = Field(
240
+ default=None, description="Dataset ID this file belongs to"
241
+ )
242
+
243
+ @classmethod
244
+ def from_path(
245
+ cls,
246
+ path: Path,
247
+ description: str | None = None,
248
+ parent_id: UUID | None = None,
249
+ dataset_id: UUID | None = None,
250
+ ) -> "FileMetadata":
251
+ """Create FileMetadata from a Path object."""
252
+ size = None
253
+ is_directory = path.is_dir()
254
+
255
+ if not is_directory:
256
+ with contextlib.suppress(OSError):
257
+ size = path.stat().st_size
258
+
259
+ return cls(
260
+ path=path,
261
+ name=path.name,
262
+ size=size,
263
+ description=description,
264
+ is_directory=is_directory,
265
+ parent_id=parent_id,
266
+ dataset_id=dataset_id,
267
+ )
268
+
269
+
270
+ class UploadProgress(BaseModel):
271
+ """Model for tracking upload progress."""
272
+
273
+ total_files: int = Field(description="Total number of files to upload")
274
+ uploaded_files: int = Field(default=0, description="Number of files uploaded")
275
+ total_bytes: int | None = Field(default=None, description="Total bytes to upload")
276
+ uploaded_bytes: int = Field(default=0, description="Number of bytes uploaded")
277
+ current_file: str | None = Field(
278
+ default=None, description="Currently uploading file"
279
+ )
280
+ errors: list[str] = Field(
281
+ default_factory=list, description="List of error messages"
282
+ )
283
+
284
+ @property
285
+ def progress_percentage(self) -> float:
286
+ """Calculate progress percentage based on files."""
287
+ if self.total_files == 0:
288
+ return 0.0
289
+ return (self.uploaded_files / self.total_files) * 100.0
290
+
291
+ @property
292
+ def bytes_percentage(self) -> float | None:
293
+ """Calculate progress percentage based on bytes."""
294
+ if not self.total_bytes or self.total_bytes == 0:
295
+ return None
296
+ return (self.uploaded_bytes / self.total_bytes) * 100.0
297
+
298
+ def add_error(self, error: str) -> None:
299
+ """Add an error message."""
300
+ self.errors.append(error)
301
+
302
+ def increment_files(self, bytes_uploaded: int = 0) -> None:
303
+ """Increment the uploaded files counter."""
304
+ self.uploaded_files += 1
305
+ self.uploaded_bytes += bytes_uploaded
306
+
307
+
308
+ class DirectoryUploadConfig(BaseModel):
309
+ """Configuration for directory uploads."""
310
+
311
+ name: str = Field(description="Name for the directory upload")
312
+ description: str | None = Field(
313
+ default=None, description="Description for the directory"
314
+ )
315
+ as_collection: bool = Field(
316
+ default=False, description="Upload as single collection or hierarchically"
317
+ )
318
+ manifest_filename: str | None = Field(
319
+ default=None, description="Name of manifest file to use"
320
+ )
321
+ ignore_patterns: list[str] = Field(
322
+ default_factory=list, description="Patterns to ignore"
323
+ )
324
+ ignore_filename: str = Field(
325
+ default=".gitignore", description="Name of ignore file to read"
326
+ )
327
+ base_path: str | None = Field(default=None, description="Base path for storage")
328
+ parent_id: UUID | None = Field(default=None, description="Parent directory ID")
329
+ dataset_id: UUID | None = Field(default=None, description="Dataset ID to use")
330
+
331
+ def with_parent(
332
+ self, parent_id: UUID, dataset_id: UUID | None = None
333
+ ) -> "DirectoryUploadConfig":
334
+ """Create a new config with parent and dataset IDs set."""
335
+ return self.model_copy(
336
+ update={"parent_id": parent_id, "dataset_id": dataset_id or self.dataset_id}
337
+ )
338
+
339
+
340
+ # Forward reference resolution for DirectoryManifest
341
+ DirectoryManifest.model_rebuild()
@@ -23,6 +23,15 @@ class StoreEnvironmentFrameRequest(BaseModel):
23
23
  trajectory_timestep: int
24
24
 
25
25
 
26
+ class TrajectoryPatchRequest(BaseModel):
27
+ public: bool | None = None
28
+ shared_with: list[int] | None = None
29
+ notification_enabled: bool | None = None
30
+ notification_type: str | None = None
31
+ min_estimated_time: float | None = None
32
+ max_estimated_time: float | None = None
33
+
34
+
26
35
  class ExecutionStatus(StrEnum):
27
36
  QUEUED = auto()
28
37
  IN_PROGRESS = "in progress"
@@ -2,11 +2,45 @@ import asyncio
2
2
  from collections.abc import Awaitable, Iterable
3
3
  from typing import TypeVar
4
4
 
5
+ from httpx import (
6
+ CloseError,
7
+ ConnectError,
8
+ ConnectTimeout,
9
+ NetworkError,
10
+ ReadError,
11
+ ReadTimeout,
12
+ RemoteProtocolError,
13
+ )
14
+ from requests.exceptions import RequestException, Timeout
15
+ from tenacity import retry_if_exception_type
5
16
  from tqdm.asyncio import tqdm
6
17
 
7
18
  T = TypeVar("T")
8
19
 
9
20
 
21
+ _BASE_CONNECTION_ERRORS = (
22
+ # From requests
23
+ Timeout,
24
+ ConnectionError,
25
+ RequestException,
26
+ # From httpx
27
+ ConnectError,
28
+ ConnectTimeout,
29
+ ReadTimeout,
30
+ ReadError,
31
+ NetworkError,
32
+ RemoteProtocolError,
33
+ CloseError,
34
+ )
35
+
36
+ retry_if_connection_error = retry_if_exception_type(_BASE_CONNECTION_ERRORS)
37
+
38
+
39
+ def create_retry_if_connection_error(*additional_exceptions):
40
+ """Create a retry condition with base connection errors plus additional exceptions."""
41
+ return retry_if_exception_type(_BASE_CONNECTION_ERRORS + additional_exceptions)
42
+
43
+
10
44
  async def gather_with_concurrency(
11
45
  n: int | asyncio.Semaphore, coros: Iterable[Awaitable[T]], progress: bool = False
12
46
  ) -> list[T]:
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.4.2.dev11'
32
- __version_tuple__ = version_tuple = (0, 4, 2, 'dev11')
31
+ __version__ = version = '0.4.2.dev274'
32
+ __version_tuple__ = version_tuple = (0, 4, 2, 'dev274')
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: futurehouse-client
3
- Version: 0.4.2.dev11
3
+ Version: 0.4.2.dev274
4
4
  Summary: A client for interacting with endpoints of the FutureHouse service.
5
5
  Author-email: FutureHouse technical staff <hello@futurehouse.org>
6
6
  License: Apache License
@@ -213,18 +213,22 @@ Classifier: Programming Language :: Python
213
213
  Requires-Python: <3.14,>=3.11
214
214
  Description-Content-Type: text/markdown
215
215
  License-File: LICENSE
216
+ Requires-Dist: aiofiles
216
217
  Requires-Dist: cloudpickle
217
218
  Requires-Dist: fhaviary
219
+ Requires-Dist: google-resumable-media[aiohttp]
218
220
  Requires-Dist: httpx
219
221
  Requires-Dist: ldp>=0.22.0
220
222
  Requires-Dist: litellm
221
223
  Requires-Dist: pydantic
222
224
  Requires-Dist: python-dotenv
225
+ Requires-Dist: requests
223
226
  Requires-Dist: tenacity
224
227
  Requires-Dist: tqdm>=4.62
225
228
  Provides-Extra: dev
226
229
  Requires-Dist: black; extra == "dev"
227
230
  Requires-Dist: futurehouse-client[monitoring,typing]; extra == "dev"
231
+ Requires-Dist: ipykernel; extra == "dev"
228
232
  Requires-Dist: jupyter; extra == "dev"
229
233
  Requires-Dist: jupyterlab; extra == "dev"
230
234
  Requires-Dist: mypy; extra == "dev"
@@ -244,6 +248,7 @@ Requires-Dist: setuptools_scm; extra == "dev"
244
248
  Provides-Extra: monitoring
245
249
  Requires-Dist: newrelic>=8.8.0; extra == "monitoring"
246
250
  Provides-Extra: typing
251
+ Requires-Dist: types-PyYAML; extra == "typing"
247
252
  Requires-Dist: types-requests; extra == "typing"
248
253
  Requires-Dist: types-tqdm; extra == "typing"
249
254
  Dynamic: license-file