futurehouse-client 0.4.2.dev11__py3-none-any.whl → 0.4.3.dev3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,355 @@
1
+ import contextlib
2
+ from datetime import datetime
3
+ from enum import StrEnum, auto
4
+ from os import PathLike
5
+ from pathlib import Path
6
+ from typing import Any
7
+ from uuid import UUID
8
+
9
+ from pydantic import BaseModel, Field, JsonValue
10
+
11
+
12
+ class DataStorageEntry(BaseModel):
13
+ """Model representing a data storage entry."""
14
+
15
+ id: UUID = Field(description="Unique identifier for the data storage entry")
16
+ name: str = Field(description="Name of the data storage entry")
17
+ description: str | None = Field(
18
+ default=None, description="Description of the data storage entry"
19
+ )
20
+ content: str | None = Field(
21
+ default=None, description="Content of the data storage entry"
22
+ )
23
+ embedding: list[float] | None = Field(
24
+ default=None, description="Embedding vector for the content"
25
+ )
26
+ is_collection: bool = Field(
27
+ default=False, description="Whether this entry is a collection"
28
+ )
29
+ tags: list[str] | None = Field(
30
+ default=None,
31
+ description="List of tags associated with the data storage entry",
32
+ )
33
+ parent_id: UUID | None = Field(
34
+ default=None,
35
+ description="ID of the parent entry if this is a sub-entry for hierarchical storage",
36
+ )
37
+ project_id: UUID | None = Field(
38
+ default=None,
39
+ description="ID of the project this data storage entry belongs to",
40
+ )
41
+ dataset_id: UUID | None = Field(
42
+ default=None,
43
+ description="ID of the dataset this entry belongs to",
44
+ )
45
+ path: str | None = Field(
46
+ default=None,
47
+ description="Path in the storage system where this entry is located, if a file.",
48
+ )
49
+ bigquery_schema: Any | None = Field(
50
+ default=None, description="Target BigQuery schema for the data storage entry"
51
+ )
52
+ user_id: str = Field(description="ID of the user who created this entry")
53
+ created_at: datetime = Field(description="Timestamp when the entry was created")
54
+ modified_at: datetime = Field(
55
+ description="Timestamp when the entry was last updated"
56
+ )
57
+
58
+
59
+ class DataStorageType(StrEnum):
60
+ BIGQUERY = auto()
61
+ GCS = auto()
62
+ LINK = auto()
63
+ PG_TABLE = auto()
64
+ RAW_CONTENT = auto()
65
+ ELASTIC_SEARCH = auto()
66
+
67
+
68
+ class DataContentType(StrEnum):
69
+ BQ_DATASET = auto()
70
+ BQ_TABLE = auto()
71
+ TEXT = auto()
72
+ TEXT_W_EMBEDDINGS = auto()
73
+ DIRECTORY = auto()
74
+ FILE = auto()
75
+ INDEX = auto()
76
+ INDEX_W_EMBEDDINGS = auto()
77
+
78
+
79
+ class DataStorageLocationPayload(BaseModel):
80
+ storage_type: DataStorageType
81
+ content_type: DataContentType
82
+ content_schema: JsonValue | None = None
83
+ metadata: JsonValue | None = None
84
+ location: str | None = None
85
+
86
+
87
+ class DataStorageLocationConfig(BaseModel):
88
+ """Model representing the location configuration within a DataStorageLocations object."""
89
+
90
+ storage_type: str = Field(description="Type of storage (e.g., 'gcs', 'pg_table')")
91
+ content_type: str = Field(description="Type of content stored")
92
+ content_schema: JsonValue | None = Field(default=None, description="Content schema")
93
+ metadata: JsonValue | None = Field(default=None, description="Location metadata")
94
+ location: str | None = Field(
95
+ default=None, description="Location path or identifier"
96
+ )
97
+ signed_url: str | None = Field(
98
+ default=None,
99
+ description="Signed URL for uploading/downloading the file to/from GCS",
100
+ )
101
+
102
+
103
+ class DataStorageLocation(BaseModel):
104
+ """Model representing storage locations for a data storage entry."""
105
+
106
+ id: UUID = Field(description="Unique identifier for the storage locations")
107
+ data_storage_id: UUID = Field(description="ID of the associated data storage entry")
108
+ storage_config: DataStorageLocationConfig = Field(
109
+ description="Storage location configuration"
110
+ )
111
+ created_at: datetime = Field(description="Timestamp when the location was created")
112
+
113
+
114
+ class DataStorageResponse(BaseModel):
115
+ """Response model for data storage operations."""
116
+
117
+ data_storage: DataStorageEntry = Field(description="The created data storage entry")
118
+ storage_locations: list[DataStorageLocation] = Field(
119
+ description="Storage location for this data entry"
120
+ )
121
+
122
+
123
+ class DataStorageRequestPayload(BaseModel):
124
+ """Payload for creating a data storage entry."""
125
+
126
+ name: str = Field(description="Name of the data storage entry")
127
+ description: str | None = Field(
128
+ default=None, description="Description of the data storage entry"
129
+ )
130
+ content: str | None = Field(
131
+ default=None, description="Content of the data storage entry"
132
+ )
133
+ is_collection: bool = Field(
134
+ default=False, description="Whether this entry is a collection"
135
+ )
136
+ parent_id: UUID | None = Field(
137
+ default=None, description="ID of the parent entry for hierarchical storage"
138
+ )
139
+ project_id: UUID | None = Field(
140
+ default=None,
141
+ description="ID of the project this data storage entry belongs to",
142
+ )
143
+ dataset_id: UUID | None = Field(
144
+ default=None,
145
+ description="ID of existing dataset to add entry to, or None to create new dataset",
146
+ )
147
+ path: PathLike | str | None = Field(
148
+ default=None,
149
+ description="Path to store in the GCS bucket.",
150
+ )
151
+ existing_location: DataStorageLocationPayload | None = Field(
152
+ default=None, description="Target storage metadata"
153
+ )
154
+
155
+
156
+ class CreateDatasetPayload(BaseModel):
157
+ """Payload for creating a dataset."""
158
+
159
+ id: UUID | None = Field(
160
+ default=None,
161
+ description="ID of the dataset to create, or None to create a new dataset",
162
+ )
163
+ name: str = Field(description="Name of the dataset")
164
+ description: str | None = Field(
165
+ default=None, description="Description of the dataset"
166
+ )
167
+
168
+
169
+ class ManifestEntry(BaseModel):
170
+ """Model representing a single entry in a manifest file."""
171
+
172
+ description: str | None = Field(
173
+ default=None, description="Description of the file or directory"
174
+ )
175
+ metadata: dict[str, Any] | None = Field(
176
+ default=None, description="Additional metadata for the entry"
177
+ )
178
+
179
+
180
+ class DirectoryManifest(BaseModel):
181
+ """Model representing the structure of a manifest file."""
182
+
183
+ entries: dict[str, "ManifestEntry | DirectoryManifest"] = Field(
184
+ default_factory=dict,
185
+ description="Map of file/directory names to their manifest entries",
186
+ )
187
+
188
+ def get_entry_description(self, name: str) -> str | None:
189
+ """Get description for a specific entry."""
190
+ entry = self.entries.get(name)
191
+ if isinstance(entry, ManifestEntry):
192
+ return entry.description
193
+ if isinstance(entry, DirectoryManifest):
194
+ # For nested directories, could derive description from contents
195
+ return None
196
+ return None
197
+
198
+ def get_entry_metadata(self, name: str) -> dict[str, Any] | None:
199
+ """Get metadata for a specific entry."""
200
+ entry = self.entries.get(name)
201
+ if isinstance(entry, ManifestEntry):
202
+ return entry.metadata
203
+ return None
204
+
205
+ @classmethod
206
+ def from_dict(cls, data: dict[str, Any]) -> "DirectoryManifest":
207
+ """Create DirectoryManifest from a dictionary (loaded from JSON/YAML)."""
208
+ entries: dict[str, ManifestEntry | DirectoryManifest] = {}
209
+ for name, value in data.items():
210
+ if isinstance(value, dict):
211
+ if "description" in value or "metadata" in value:
212
+ # This looks like a ManifestEntry
213
+ entries[name] = ManifestEntry(**value)
214
+ else:
215
+ # This looks like a nested directory
216
+ entries[name] = cls.from_dict(value)
217
+ else:
218
+ # Simple string description
219
+ entries[name] = ManifestEntry(description=str(value))
220
+
221
+ return cls(entries=entries)
222
+
223
+ def to_dict(self) -> dict[str, Any]:
224
+ """Convert back to dictionary format."""
225
+ result = {}
226
+ for name, entry in self.entries.items():
227
+ if isinstance(entry, ManifestEntry):
228
+ if entry.description is not None or entry.metadata is not None:
229
+ entry_dict = {}
230
+ if entry.description is not None:
231
+ entry_dict["description"] = entry.description
232
+ if entry.metadata is not None:
233
+ entry_dict.update(entry.metadata)
234
+ result[name] = entry_dict
235
+ elif isinstance(entry, DirectoryManifest):
236
+ result[name] = entry.to_dict()
237
+ return result
238
+
239
+
240
+ class FileMetadata(BaseModel):
241
+ """Model representing metadata for a file being processed."""
242
+
243
+ path: Path = Field(description="Path to the file")
244
+ name: str = Field(description="Name of the file")
245
+ size: int | None = Field(default=None, description="Size of the file in bytes")
246
+ description: str | None = Field(
247
+ default=None, description="Description from manifest or generated"
248
+ )
249
+ is_directory: bool = Field(default=False, description="Whether this is a directory")
250
+ parent_id: UUID | None = Field(
251
+ default=None, description="Parent directory ID in the storage system"
252
+ )
253
+ dataset_id: UUID | None = Field(
254
+ default=None, description="Dataset ID this file belongs to"
255
+ )
256
+
257
+ @classmethod
258
+ def from_path(
259
+ cls,
260
+ path: Path,
261
+ description: str | None = None,
262
+ parent_id: UUID | None = None,
263
+ dataset_id: UUID | None = None,
264
+ ) -> "FileMetadata":
265
+ """Create FileMetadata from a Path object."""
266
+ size = None
267
+ is_directory = path.is_dir()
268
+
269
+ if not is_directory:
270
+ with contextlib.suppress(OSError):
271
+ size = path.stat().st_size
272
+
273
+ return cls(
274
+ path=path,
275
+ name=path.name,
276
+ size=size,
277
+ description=description,
278
+ is_directory=is_directory,
279
+ parent_id=parent_id,
280
+ dataset_id=dataset_id,
281
+ )
282
+
283
+
284
+ class UploadProgress(BaseModel):
285
+ """Model for tracking upload progress."""
286
+
287
+ total_files: int = Field(description="Total number of files to upload")
288
+ uploaded_files: int = Field(default=0, description="Number of files uploaded")
289
+ total_bytes: int | None = Field(default=None, description="Total bytes to upload")
290
+ uploaded_bytes: int = Field(default=0, description="Number of bytes uploaded")
291
+ current_file: str | None = Field(
292
+ default=None, description="Currently uploading file"
293
+ )
294
+ errors: list[str] = Field(
295
+ default_factory=list, description="List of error messages"
296
+ )
297
+
298
+ @property
299
+ def progress_percentage(self) -> float:
300
+ """Calculate progress percentage based on files."""
301
+ if self.total_files == 0:
302
+ return 0.0
303
+ return (self.uploaded_files / self.total_files) * 100.0
304
+
305
+ @property
306
+ def bytes_percentage(self) -> float | None:
307
+ """Calculate progress percentage based on bytes."""
308
+ if not self.total_bytes or self.total_bytes == 0:
309
+ return None
310
+ return (self.uploaded_bytes / self.total_bytes) * 100.0
311
+
312
+ def add_error(self, error: str) -> None:
313
+ """Add an error message."""
314
+ self.errors.append(error)
315
+
316
+ def increment_files(self, bytes_uploaded: int = 0) -> None:
317
+ """Increment the uploaded files counter."""
318
+ self.uploaded_files += 1
319
+ self.uploaded_bytes += bytes_uploaded
320
+
321
+
322
+ class DirectoryUploadConfig(BaseModel):
323
+ """Configuration for directory uploads."""
324
+
325
+ name: str = Field(description="Name for the directory upload")
326
+ description: str | None = Field(
327
+ default=None, description="Description for the directory"
328
+ )
329
+ as_collection: bool = Field(
330
+ default=False, description="Upload as single collection or hierarchically"
331
+ )
332
+ manifest_filename: str | None = Field(
333
+ default=None, description="Name of manifest file to use"
334
+ )
335
+ ignore_patterns: list[str] = Field(
336
+ default_factory=list, description="Patterns to ignore"
337
+ )
338
+ ignore_filename: str = Field(
339
+ default=".gitignore", description="Name of ignore file to read"
340
+ )
341
+ base_path: str | None = Field(default=None, description="Base path for storage")
342
+ parent_id: UUID | None = Field(default=None, description="Parent directory ID")
343
+ dataset_id: UUID | None = Field(default=None, description="Dataset ID to use")
344
+
345
+ def with_parent(
346
+ self, parent_id: UUID, dataset_id: UUID | None = None
347
+ ) -> "DirectoryUploadConfig":
348
+ """Create a new config with parent and dataset IDs set."""
349
+ return self.model_copy(
350
+ update={"parent_id": parent_id, "dataset_id": dataset_id or self.dataset_id}
351
+ )
352
+
353
+
354
+ # Forward reference resolution for DirectoryManifest
355
+ DirectoryManifest.model_rebuild()
@@ -23,6 +23,15 @@ class StoreEnvironmentFrameRequest(BaseModel):
23
23
  trajectory_timestep: int
24
24
 
25
25
 
26
+ class TrajectoryPatchRequest(BaseModel):
27
+ public: bool | None = None
28
+ shared_with: list[int] | None = None
29
+ notification_enabled: bool | None = None
30
+ notification_type: str | None = None
31
+ min_estimated_time: float | None = None
32
+ max_estimated_time: float | None = None
33
+
34
+
26
35
  class ExecutionStatus(StrEnum):
27
36
  QUEUED = auto()
28
37
  IN_PROGRESS = "in progress"
@@ -54,7 +63,37 @@ class WorldModel(BaseModel):
54
63
  project_id: UUID | str | None = None
55
64
 
56
65
 
57
- class WorldModelResponse(BaseModel):
66
+ class SearchOperator(StrEnum):
67
+ """Operators for structured search criteria."""
68
+
69
+ EQUALS = "equals"
70
+ CONTAINS = "contains"
71
+ STARTS_WITH = "starts_with"
72
+ ENDS_WITH = "ends_with"
73
+ GREATER_THAN = "greater_than"
74
+ LESS_THAN = "less_than"
75
+ BETWEEN = "between"
76
+ IN = "in"
77
+
78
+
79
+ class SearchCriterion(BaseModel):
80
+ """A single search criterion with field, operator, and value."""
81
+
82
+ field: str
83
+ operator: SearchOperator
84
+ value: str | list[str] | bool
85
+
86
+
87
+ class WorldModelSearchPayload(BaseModel):
88
+ """Payload for structured world model search."""
89
+
90
+ criteria: list[SearchCriterion]
91
+ size: int = 10
92
+ project_id: UUID | str | None = None
93
+ search_all_versions: bool = False
94
+
95
+
96
+ class WorldModelResponse(WorldModel):
58
97
  """
59
98
  Response model for a world model snapshot.
60
99
 
@@ -62,13 +101,8 @@ class WorldModelResponse(BaseModel):
62
101
  """
63
102
 
64
103
  id: UUID | str
65
- prior: UUID | str | None
66
- name: str
67
- description: str | None
68
- content: str
69
- trajectory_id: UUID | str | None
104
+ name: str # type: ignore[mutable-override] # The API always returns a non-optional name, overriding the base model's optional field.
70
105
  email: str | None
71
- model_metadata: JsonValue | None
72
106
  enabled: bool
73
107
  created_at: datetime
74
108
 
@@ -132,3 +166,10 @@ class DiscoveryResponse(BaseModel):
132
166
  associated_trajectories: list[UUID | str]
133
167
  validation_level: int
134
168
  created_at: datetime
169
+
170
+
171
+ class DataStorageSearchPayload(BaseModel):
172
+ """Payload for structured data storage search."""
173
+
174
+ criteria: list[SearchCriterion]
175
+ size: int = 10
@@ -1,11 +1,74 @@
1
1
  import asyncio
2
- from collections.abc import Awaitable, Iterable
2
+ from collections.abc import Awaitable, Callable, Iterable
3
3
  from typing import TypeVar
4
4
 
5
+ from httpx import (
6
+ CloseError,
7
+ ConnectError,
8
+ ConnectTimeout,
9
+ HTTPStatusError,
10
+ NetworkError,
11
+ ReadError,
12
+ ReadTimeout,
13
+ RemoteProtocolError,
14
+ codes,
15
+ )
16
+ from requests.exceptions import RequestException, Timeout
17
+ from tenacity import RetryCallState
5
18
  from tqdm.asyncio import tqdm
6
19
 
7
20
  T = TypeVar("T")
8
21
 
22
+ RETRYABLE_HTTP_STATUS_CODES = {
23
+ codes.TOO_MANY_REQUESTS,
24
+ codes.INTERNAL_SERVER_ERROR,
25
+ codes.BAD_GATEWAY,
26
+ codes.SERVICE_UNAVAILABLE,
27
+ codes.GATEWAY_TIMEOUT,
28
+ }
29
+
30
+ _BASE_CONNECTION_ERRORS = (
31
+ # From requests
32
+ Timeout,
33
+ ConnectionError,
34
+ RequestException,
35
+ # From httpx
36
+ ConnectError,
37
+ ConnectTimeout,
38
+ ReadTimeout,
39
+ ReadError,
40
+ NetworkError,
41
+ RemoteProtocolError,
42
+ CloseError,
43
+ )
44
+
45
+
46
+ def create_retry_if_connection_error(
47
+ *additional_exceptions,
48
+ ) -> Callable[[RetryCallState], bool]:
49
+ """Create a retry condition with base connection errors, HTTP status errors, plus additional exceptions."""
50
+
51
+ def status_retries_with_exceptions(retry_state: RetryCallState) -> bool:
52
+ if retry_state.outcome is not None and hasattr(
53
+ retry_state.outcome, "exception"
54
+ ):
55
+ exception = retry_state.outcome.exception()
56
+ # connection errors
57
+ if isinstance(exception, _BASE_CONNECTION_ERRORS):
58
+ return True
59
+ # custom exceptions provided
60
+ if additional_exceptions and isinstance(exception, additional_exceptions):
61
+ return True
62
+ # any http exceptions
63
+ if isinstance(exception, HTTPStatusError):
64
+ return exception.response.status_code in RETRYABLE_HTTP_STATUS_CODES
65
+ return False
66
+
67
+ return status_retries_with_exceptions
68
+
69
+
70
+ retry_if_connection_error = create_retry_if_connection_error()
71
+
9
72
 
10
73
  async def gather_with_concurrency(
11
74
  n: int | asyncio.Semaphore, coros: Iterable[Awaitable[T]], progress: bool = False
@@ -5,7 +5,7 @@ from aviary.core import Tool
5
5
 
6
6
  from futurehouse_client.clients.rest_client import RestClient
7
7
  from futurehouse_client.models.app import Stage
8
- from futurehouse_client.models.rest import WorldModel
8
+ from futurehouse_client.models.rest import SearchCriterion, SearchOperator, WorldModel
9
9
 
10
10
 
11
11
  class WorldModelTools:
@@ -56,7 +56,26 @@ class WorldModelTools:
56
56
  Returns:
57
57
  list[str]: A list of world model IDs that match the search query.
58
58
  """
59
- return WorldModelTools._get_client().search_world_models(query, size=size)
59
+ criteria = (
60
+ [
61
+ SearchCriterion(
62
+ field="name", operator=SearchOperator.CONTAINS, value=query
63
+ ),
64
+ SearchCriterion(
65
+ field="description", operator=SearchOperator.CONTAINS, value=query
66
+ ),
67
+ SearchCriterion(
68
+ field="content", operator=SearchOperator.CONTAINS, value=query
69
+ ),
70
+ ]
71
+ if query
72
+ else []
73
+ )
74
+
75
+ results = WorldModelTools._get_client().search_world_models(
76
+ criteria=criteria, size=size
77
+ )
78
+ return [str(model.id) for model in results]
60
79
 
61
80
 
62
81
  create_world_model_tool = Tool.from_function(WorldModelTools.create_world_model)
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.4.2.dev11'
32
- __version_tuple__ = version_tuple = (0, 4, 2, 'dev11')
31
+ __version__ = version = '0.4.3.dev3'
32
+ __version_tuple__ = version_tuple = (0, 4, 3, 'dev3')
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: futurehouse-client
3
- Version: 0.4.2.dev11
3
+ Version: 0.4.3.dev3
4
4
  Summary: A client for interacting with endpoints of the FutureHouse service.
5
5
  Author-email: FutureHouse technical staff <hello@futurehouse.org>
6
6
  License: Apache License
@@ -213,18 +213,23 @@ Classifier: Programming Language :: Python
213
213
  Requires-Python: <3.14,>=3.11
214
214
  Description-Content-Type: text/markdown
215
215
  License-File: LICENSE
216
+ Requires-Dist: aiofiles
216
217
  Requires-Dist: cloudpickle
217
218
  Requires-Dist: fhaviary
219
+ Requires-Dist: google-resumable-media[aiohttp]
218
220
  Requires-Dist: httpx
219
221
  Requires-Dist: ldp>=0.22.0
220
222
  Requires-Dist: litellm
223
+ Requires-Dist: openai<1.100.0,>=1
221
224
  Requires-Dist: pydantic
222
225
  Requires-Dist: python-dotenv
226
+ Requires-Dist: requests
223
227
  Requires-Dist: tenacity
224
228
  Requires-Dist: tqdm>=4.62
225
229
  Provides-Extra: dev
226
230
  Requires-Dist: black; extra == "dev"
227
231
  Requires-Dist: futurehouse-client[monitoring,typing]; extra == "dev"
232
+ Requires-Dist: ipykernel; extra == "dev"
228
233
  Requires-Dist: jupyter; extra == "dev"
229
234
  Requires-Dist: jupyterlab; extra == "dev"
230
235
  Requires-Dist: mypy; extra == "dev"
@@ -244,6 +249,8 @@ Requires-Dist: setuptools_scm; extra == "dev"
244
249
  Provides-Extra: monitoring
245
250
  Requires-Dist: newrelic>=8.8.0; extra == "monitoring"
246
251
  Provides-Extra: typing
252
+ Requires-Dist: types-PyYAML; extra == "typing"
253
+ Requires-Dist: types-aiofiles; extra == "typing"
247
254
  Requires-Dist: types-requests; extra == "typing"
248
255
  Requires-Dist: types-tqdm; extra == "typing"
249
256
  Dynamic: license-file
@@ -0,0 +1,23 @@
1
+ futurehouse_client/__init__.py,sha256=PvFTkocA-hobsWoDEBEdrUgLIbuVbDs_0nvMdImJmHk,707
2
+ futurehouse_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ futurehouse_client/version.py,sha256=_c6r7-3ldIKHE25bQ2mtbV5_BGB4bVAIxsyhTiprV-g,717
4
+ futurehouse_client/clients/__init__.py,sha256=-HXNj-XJ3LRO5XM6MZ709iPs29YpApss0Q2YYg1qMZw,280
5
+ futurehouse_client/clients/data_storage_methods.py,sha256=NBbJIUF3vWc7bRYhapELegpnAlB4R38mZsSDs7Y2-Tc,97165
6
+ futurehouse_client/clients/job_client.py,sha256=b5gpzulZpxpv9R337r3UKItnMdtd6CGlI1sV3_VQJso,13985
7
+ futurehouse_client/clients/rest_client.py,sha256=Ng36P8obNW1WTRYWTqLQ0ka5tefT-r723_Kr0haT9aM,103225
8
+ futurehouse_client/models/__init__.py,sha256=0YlzKGymbY1g4cXxnUc0BUnthTkVBf12bCZlGUcMQqk,701
9
+ futurehouse_client/models/app.py,sha256=UUg17I3zk6cH_7mrdojHGYvQfm_SeDkuUxsPlRyIYz0,31895
10
+ futurehouse_client/models/client.py,sha256=n4HD0KStKLm6Ek9nL9ylP-bkK10yzAaD1uIDF83Qp_A,1828
11
+ futurehouse_client/models/data_storage_methods.py,sha256=GS1FbuMsUJSh7Evjt86vOri-95hfiLyASBS1xG7erNk,12793
12
+ futurehouse_client/models/rest.py,sha256=Fqw0_ypULzd7IV93PKooSG9W5_g7fGFsdW9jNVVImHA,4514
13
+ futurehouse_client/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ futurehouse_client/utils/auth.py,sha256=tgWELjKfg8eWme_qdcRmc8TjQN9DVZuHHaVXZNHLchk,2960
15
+ futurehouse_client/utils/general.py,sha256=PIkGLCSA3kUvc6mwR-prEB7YnMdKILOIm6cPowSZzzs,2532
16
+ futurehouse_client/utils/module_utils.py,sha256=aFyd-X-pDARXz9GWpn8SSViUVYdSbuy9vSkrzcVIaGI,4955
17
+ futurehouse_client/utils/monitoring.py,sha256=UjRlufe67kI3VxRHOd5fLtJmlCbVA2Wqwpd4uZhXkQM,8728
18
+ futurehouse_client/utils/world_model_tools.py,sha256=v2krZGrco0ur2a_pcRMtnQL05SxlIoBXuJ5R1JkQNws,2921
19
+ futurehouse_client-0.4.3.dev3.dist-info/licenses/LICENSE,sha256=oQ9ZHjUi-_6GfP3gs14FlPb0OlGwE1QCCKFGnJ4LD2I,11341
20
+ futurehouse_client-0.4.3.dev3.dist-info/METADATA,sha256=0GVESlronhQMiFUHqhgZcRw9dkmIGPdkJA7m8NY_u7E,27068
21
+ futurehouse_client-0.4.3.dev3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
22
+ futurehouse_client-0.4.3.dev3.dist-info/top_level.txt,sha256=TRuLUCt_qBnggdFHCX4O_BoCu1j2X43lKfIZC-ElwWY,19
23
+ futurehouse_client-0.4.3.dev3.dist-info/RECORD,,
@@ -1,21 +0,0 @@
1
- futurehouse_client/__init__.py,sha256=PvFTkocA-hobsWoDEBEdrUgLIbuVbDs_0nvMdImJmHk,707
2
- futurehouse_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- futurehouse_client/version.py,sha256=c_JzNS0ouORBU9NbAiOBOJBqK2VWGvvGLFYyQtq8WEs,719
4
- futurehouse_client/clients/__init__.py,sha256=-HXNj-XJ3LRO5XM6MZ709iPs29YpApss0Q2YYg1qMZw,280
5
- futurehouse_client/clients/job_client.py,sha256=D51_qTxya6g5Wfg_ZfJdP031TV_YDJeXkGMiYAJ1qRc,11962
6
- futurehouse_client/clients/rest_client.py,sha256=OsdskJ1OT8SdDJQHG4bbt2_sVZMpHMtbG_x78cdV8ac,99790
7
- futurehouse_client/models/__init__.py,sha256=kQ4R7VEuRxO0IQEW_sk9CndBL7zzl8rUKI24ddyYLM0,647
8
- futurehouse_client/models/app.py,sha256=UUg17I3zk6cH_7mrdojHGYvQfm_SeDkuUxsPlRyIYz0,31895
9
- futurehouse_client/models/client.py,sha256=WFD1ddR0O7nD1ErqcJ-kt_miIW22KP6IDOSkaSdVZ8M,1716
10
- futurehouse_client/models/rest.py,sha256=ybelLsyTsKYud7DYUCF0sFF6u81bl8WmS_wWAnbX-0M,3382
11
- futurehouse_client/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- futurehouse_client/utils/auth.py,sha256=tgWELjKfg8eWme_qdcRmc8TjQN9DVZuHHaVXZNHLchk,2960
13
- futurehouse_client/utils/general.py,sha256=A_rtTiYW30ELGEZlWCIArO7q1nEmqi8hUlmBRYkMQ_c,767
14
- futurehouse_client/utils/module_utils.py,sha256=aFyd-X-pDARXz9GWpn8SSViUVYdSbuy9vSkrzcVIaGI,4955
15
- futurehouse_client/utils/monitoring.py,sha256=UjRlufe67kI3VxRHOd5fLtJmlCbVA2Wqwpd4uZhXkQM,8728
16
- futurehouse_client/utils/world_model_tools.py,sha256=Ctiy-EfK7EXrjmKO_nI6V5VhOJyHKWc0sKwa8Q0HAAo,2292
17
- futurehouse_client-0.4.2.dev11.dist-info/licenses/LICENSE,sha256=oQ9ZHjUi-_6GfP3gs14FlPb0OlGwE1QCCKFGnJ4LD2I,11341
18
- futurehouse_client-0.4.2.dev11.dist-info/METADATA,sha256=xb_dj1oOZU-vjhdpMUnRrlLzbqfkaJCA-ZNa4PUE2LQ,26803
19
- futurehouse_client-0.4.2.dev11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
20
- futurehouse_client-0.4.2.dev11.dist-info/top_level.txt,sha256=TRuLUCt_qBnggdFHCX4O_BoCu1j2X43lKfIZC-ElwWY,19
21
- futurehouse_client-0.4.2.dev11.dist-info/RECORD,,