futurehouse-client 0.4.1__py3-none-any.whl → 0.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,354 @@
1
+ import contextlib
2
+ from datetime import datetime
3
+ from enum import StrEnum, auto
4
+ from os import PathLike
5
+ from pathlib import Path
6
+ from typing import Any
7
+ from uuid import UUID
8
+
9
+ from pydantic import BaseModel, Field, JsonValue
10
+
11
+
12
+ class DataStorageEntry(BaseModel):
13
+ """Model representing a data storage entry."""
14
+
15
+ id: UUID = Field(description="Unique identifier for the data storage entry")
16
+ name: str = Field(description="Name of the data storage entry")
17
+ description: str | None = Field(
18
+ default=None, description="Description of the data storage entry"
19
+ )
20
+ content: str | None = Field(
21
+ default=None, description="Content of the data storage entry"
22
+ )
23
+ embedding: list[float] | None = Field(
24
+ default=None, description="Embedding vector for the content"
25
+ )
26
+ is_collection: bool = Field(
27
+ default=False, description="Whether this entry is a collection"
28
+ )
29
+ tags: list[str] | None = Field(
30
+ default=None,
31
+ description="List of tags associated with the data storage entry",
32
+ )
33
+ parent_id: UUID | None = Field(
34
+ default=None,
35
+ description="ID of the parent entry if this is a sub-entry for hierarchical storage",
36
+ )
37
+ project_id: UUID | None = Field(
38
+ default=None,
39
+ description="ID of the project this data storage entry belongs to",
40
+ )
41
+ dataset_id: UUID | None = Field(
42
+ default=None,
43
+ description="ID of the dataset this entry belongs to",
44
+ )
45
+ path: str | None = Field(
46
+ default=None,
47
+ description="Path in the storage system where this entry is located, if a file.",
48
+ )
49
+ bigquery_schema: Any | None = Field(
50
+ default=None, description="Target BigQuery schema for the data storage entry"
51
+ )
52
+ user_id: str = Field(description="ID of the user who created this entry")
53
+ created_at: datetime = Field(description="Timestamp when the entry was created")
54
+ modified_at: datetime = Field(
55
+ description="Timestamp when the entry was last updated"
56
+ )
57
+
58
+
59
+ class DataStorageType(StrEnum):
60
+ BIGQUERY = auto()
61
+ GCS = auto()
62
+ PG_TABLE = auto()
63
+ RAW_CONTENT = auto()
64
+ ELASTIC_SEARCH = auto()
65
+
66
+
67
+ class DataContentType(StrEnum):
68
+ BQ_DATASET = auto()
69
+ BQ_TABLE = auto()
70
+ TEXT = auto()
71
+ TEXT_W_EMBEDDINGS = auto()
72
+ DIRECTORY = auto()
73
+ FILE = auto()
74
+ INDEX = auto()
75
+ INDEX_W_EMBEDDINGS = auto()
76
+
77
+
78
+ class DataStorageLocationPayload(BaseModel):
79
+ storage_type: DataStorageType
80
+ content_type: DataContentType
81
+ content_schema: JsonValue | None = None
82
+ metadata: JsonValue | None = None
83
+ location: str | None = None
84
+
85
+
86
+ class DataStorageLocationConfig(BaseModel):
87
+ """Model representing the location configuration within a DataStorageLocations object."""
88
+
89
+ storage_type: str = Field(description="Type of storage (e.g., 'gcs', 'pg_table')")
90
+ content_type: str = Field(description="Type of content stored")
91
+ content_schema: JsonValue | None = Field(default=None, description="Content schema")
92
+ metadata: JsonValue | None = Field(default=None, description="Location metadata")
93
+ location: str | None = Field(
94
+ default=None, description="Location path or identifier"
95
+ )
96
+ signed_url: str | None = Field(
97
+ default=None,
98
+ description="Signed URL for uploading/downloading the file to/from GCS",
99
+ )
100
+
101
+
102
+ class DataStorageLocation(BaseModel):
103
+ """Model representing storage locations for a data storage entry."""
104
+
105
+ id: UUID = Field(description="Unique identifier for the storage locations")
106
+ data_storage_id: UUID = Field(description="ID of the associated data storage entry")
107
+ storage_config: DataStorageLocationConfig = Field(
108
+ description="Storage location configuration"
109
+ )
110
+ created_at: datetime = Field(description="Timestamp when the location was created")
111
+
112
+
113
+ class DataStorageResponse(BaseModel):
114
+ """Response model for data storage operations."""
115
+
116
+ data_storage: DataStorageEntry = Field(description="The created data storage entry")
117
+ storage_locations: list[DataStorageLocation] = Field(
118
+ description="Storage location for this data entry"
119
+ )
120
+
121
+
122
+ class DataStorageRequestPayload(BaseModel):
123
+ """Payload for creating a data storage entry."""
124
+
125
+ name: str = Field(description="Name of the data storage entry")
126
+ description: str | None = Field(
127
+ default=None, description="Description of the data storage entry"
128
+ )
129
+ content: str | None = Field(
130
+ default=None, description="Content of the data storage entry"
131
+ )
132
+ is_collection: bool = Field(
133
+ default=False, description="Whether this entry is a collection"
134
+ )
135
+ parent_id: UUID | None = Field(
136
+ default=None, description="ID of the parent entry for hierarchical storage"
137
+ )
138
+ project_id: UUID | None = Field(
139
+ default=None,
140
+ description="ID of the project this data storage entry belongs to",
141
+ )
142
+ dataset_id: UUID | None = Field(
143
+ default=None,
144
+ description="ID of existing dataset to add entry to, or None to create new dataset",
145
+ )
146
+ path: PathLike | str | None = Field(
147
+ default=None,
148
+ description="Path to store in the GCS bucket.",
149
+ )
150
+ existing_location: DataStorageLocationPayload | None = Field(
151
+ default=None, description="Target storage metadata"
152
+ )
153
+
154
+
155
+ class CreateDatasetPayload(BaseModel):
156
+ """Payload for creating a dataset."""
157
+
158
+ id: UUID | None = Field(
159
+ default=None,
160
+ description="ID of the dataset to create, or None to create a new dataset",
161
+ )
162
+ name: str = Field(description="Name of the dataset")
163
+ description: str | None = Field(
164
+ default=None, description="Description of the dataset"
165
+ )
166
+
167
+
168
+ class ManifestEntry(BaseModel):
169
+ """Model representing a single entry in a manifest file."""
170
+
171
+ description: str | None = Field(
172
+ default=None, description="Description of the file or directory"
173
+ )
174
+ metadata: dict[str, Any] | None = Field(
175
+ default=None, description="Additional metadata for the entry"
176
+ )
177
+
178
+
179
+ class DirectoryManifest(BaseModel):
180
+ """Model representing the structure of a manifest file."""
181
+
182
+ entries: dict[str, "ManifestEntry | DirectoryManifest"] = Field(
183
+ default_factory=dict,
184
+ description="Map of file/directory names to their manifest entries",
185
+ )
186
+
187
+ def get_entry_description(self, name: str) -> str | None:
188
+ """Get description for a specific entry."""
189
+ entry = self.entries.get(name)
190
+ if isinstance(entry, ManifestEntry):
191
+ return entry.description
192
+ if isinstance(entry, DirectoryManifest):
193
+ # For nested directories, could derive description from contents
194
+ return None
195
+ return None
196
+
197
+ def get_entry_metadata(self, name: str) -> dict[str, Any] | None:
198
+ """Get metadata for a specific entry."""
199
+ entry = self.entries.get(name)
200
+ if isinstance(entry, ManifestEntry):
201
+ return entry.metadata
202
+ return None
203
+
204
+ @classmethod
205
+ def from_dict(cls, data: dict[str, Any]) -> "DirectoryManifest":
206
+ """Create DirectoryManifest from a dictionary (loaded from JSON/YAML)."""
207
+ entries: dict[str, ManifestEntry | DirectoryManifest] = {}
208
+ for name, value in data.items():
209
+ if isinstance(value, dict):
210
+ if "description" in value or "metadata" in value:
211
+ # This looks like a ManifestEntry
212
+ entries[name] = ManifestEntry(**value)
213
+ else:
214
+ # This looks like a nested directory
215
+ entries[name] = cls.from_dict(value)
216
+ else:
217
+ # Simple string description
218
+ entries[name] = ManifestEntry(description=str(value))
219
+
220
+ return cls(entries=entries)
221
+
222
+ def to_dict(self) -> dict[str, Any]:
223
+ """Convert back to dictionary format."""
224
+ result = {}
225
+ for name, entry in self.entries.items():
226
+ if isinstance(entry, ManifestEntry):
227
+ if entry.description is not None or entry.metadata is not None:
228
+ entry_dict = {}
229
+ if entry.description is not None:
230
+ entry_dict["description"] = entry.description
231
+ if entry.metadata is not None:
232
+ entry_dict.update(entry.metadata)
233
+ result[name] = entry_dict
234
+ elif isinstance(entry, DirectoryManifest):
235
+ result[name] = entry.to_dict()
236
+ return result
237
+
238
+
239
+ class FileMetadata(BaseModel):
240
+ """Model representing metadata for a file being processed."""
241
+
242
+ path: Path = Field(description="Path to the file")
243
+ name: str = Field(description="Name of the file")
244
+ size: int | None = Field(default=None, description="Size of the file in bytes")
245
+ description: str | None = Field(
246
+ default=None, description="Description from manifest or generated"
247
+ )
248
+ is_directory: bool = Field(default=False, description="Whether this is a directory")
249
+ parent_id: UUID | None = Field(
250
+ default=None, description="Parent directory ID in the storage system"
251
+ )
252
+ dataset_id: UUID | None = Field(
253
+ default=None, description="Dataset ID this file belongs to"
254
+ )
255
+
256
+ @classmethod
257
+ def from_path(
258
+ cls,
259
+ path: Path,
260
+ description: str | None = None,
261
+ parent_id: UUID | None = None,
262
+ dataset_id: UUID | None = None,
263
+ ) -> "FileMetadata":
264
+ """Create FileMetadata from a Path object."""
265
+ size = None
266
+ is_directory = path.is_dir()
267
+
268
+ if not is_directory:
269
+ with contextlib.suppress(OSError):
270
+ size = path.stat().st_size
271
+
272
+ return cls(
273
+ path=path,
274
+ name=path.name,
275
+ size=size,
276
+ description=description,
277
+ is_directory=is_directory,
278
+ parent_id=parent_id,
279
+ dataset_id=dataset_id,
280
+ )
281
+
282
+
283
+ class UploadProgress(BaseModel):
284
+ """Model for tracking upload progress."""
285
+
286
+ total_files: int = Field(description="Total number of files to upload")
287
+ uploaded_files: int = Field(default=0, description="Number of files uploaded")
288
+ total_bytes: int | None = Field(default=None, description="Total bytes to upload")
289
+ uploaded_bytes: int = Field(default=0, description="Number of bytes uploaded")
290
+ current_file: str | None = Field(
291
+ default=None, description="Currently uploading file"
292
+ )
293
+ errors: list[str] = Field(
294
+ default_factory=list, description="List of error messages"
295
+ )
296
+
297
+ @property
298
+ def progress_percentage(self) -> float:
299
+ """Calculate progress percentage based on files."""
300
+ if self.total_files == 0:
301
+ return 0.0
302
+ return (self.uploaded_files / self.total_files) * 100.0
303
+
304
+ @property
305
+ def bytes_percentage(self) -> float | None:
306
+ """Calculate progress percentage based on bytes."""
307
+ if not self.total_bytes or self.total_bytes == 0:
308
+ return None
309
+ return (self.uploaded_bytes / self.total_bytes) * 100.0
310
+
311
+ def add_error(self, error: str) -> None:
312
+ """Add an error message."""
313
+ self.errors.append(error)
314
+
315
+ def increment_files(self, bytes_uploaded: int = 0) -> None:
316
+ """Increment the uploaded files counter."""
317
+ self.uploaded_files += 1
318
+ self.uploaded_bytes += bytes_uploaded
319
+
320
+
321
+ class DirectoryUploadConfig(BaseModel):
322
+ """Configuration for directory uploads."""
323
+
324
+ name: str = Field(description="Name for the directory upload")
325
+ description: str | None = Field(
326
+ default=None, description="Description for the directory"
327
+ )
328
+ as_collection: bool = Field(
329
+ default=False, description="Upload as single collection or hierarchically"
330
+ )
331
+ manifest_filename: str | None = Field(
332
+ default=None, description="Name of manifest file to use"
333
+ )
334
+ ignore_patterns: list[str] = Field(
335
+ default_factory=list, description="Patterns to ignore"
336
+ )
337
+ ignore_filename: str = Field(
338
+ default=".gitignore", description="Name of ignore file to read"
339
+ )
340
+ base_path: str | None = Field(default=None, description="Base path for storage")
341
+ parent_id: UUID | None = Field(default=None, description="Parent directory ID")
342
+ dataset_id: UUID | None = Field(default=None, description="Dataset ID to use")
343
+
344
+ def with_parent(
345
+ self, parent_id: UUID, dataset_id: UUID | None = None
346
+ ) -> "DirectoryUploadConfig":
347
+ """Create a new config with parent and dataset IDs set."""
348
+ return self.model_copy(
349
+ update={"parent_id": parent_id, "dataset_id": dataset_id or self.dataset_id}
350
+ )
351
+
352
+
353
+ # Forward reference resolution for DirectoryManifest
354
+ DirectoryManifest.model_rebuild()
@@ -23,6 +23,15 @@ class StoreEnvironmentFrameRequest(BaseModel):
23
23
  trajectory_timestep: int
24
24
 
25
25
 
26
+ class TrajectoryPatchRequest(BaseModel):
27
+ public: bool | None = None
28
+ shared_with: list[int] | None = None
29
+ notification_enabled: bool | None = None
30
+ notification_type: str | None = None
31
+ min_estimated_time: float | None = None
32
+ max_estimated_time: float | None = None
33
+
34
+
26
35
  class ExecutionStatus(StrEnum):
27
36
  QUEUED = auto()
28
37
  IN_PROGRESS = "in progress"
@@ -54,7 +63,37 @@ class WorldModel(BaseModel):
54
63
  project_id: UUID | str | None = None
55
64
 
56
65
 
57
- class WorldModelResponse(BaseModel):
66
+ class SearchOperator(StrEnum):
67
+ """Operators for structured search criteria."""
68
+
69
+ EQUALS = "equals"
70
+ CONTAINS = "contains"
71
+ STARTS_WITH = "starts_with"
72
+ ENDS_WITH = "ends_with"
73
+ GREATER_THAN = "greater_than"
74
+ LESS_THAN = "less_than"
75
+ BETWEEN = "between"
76
+ IN = "in"
77
+
78
+
79
+ class SearchCriterion(BaseModel):
80
+ """A single search criterion with field, operator, and value."""
81
+
82
+ field: str
83
+ operator: SearchOperator
84
+ value: str | list[str] | bool
85
+
86
+
87
+ class WorldModelSearchPayload(BaseModel):
88
+ """Payload for structured world model search."""
89
+
90
+ criteria: list[SearchCriterion]
91
+ size: int = 10
92
+ project_id: UUID | str | None = None
93
+ search_all_versions: bool = False
94
+
95
+
96
+ class WorldModelResponse(WorldModel):
58
97
  """
59
98
  Response model for a world model snapshot.
60
99
 
@@ -62,13 +101,8 @@ class WorldModelResponse(BaseModel):
62
101
  """
63
102
 
64
103
  id: UUID | str
65
- prior: UUID | str | None
66
- name: str
67
- description: str | None
68
- content: str
69
- trajectory_id: UUID | str | None
104
+ name: str # type: ignore[mutable-override] # The API always returns a non-optional name, overriding the base model's optional field.
70
105
  email: str | None
71
- model_metadata: JsonValue | None
72
106
  enabled: bool
73
107
  created_at: datetime
74
108
 
@@ -132,3 +166,10 @@ class DiscoveryResponse(BaseModel):
132
166
  associated_trajectories: list[UUID | str]
133
167
  validation_level: int
134
168
  created_at: datetime
169
+
170
+
171
+ class DataStorageSearchPayload(BaseModel):
172
+ """Payload for structured data storage search."""
173
+
174
+ criteria: list[SearchCriterion]
175
+ size: int = 10
@@ -1,11 +1,74 @@
1
1
  import asyncio
2
- from collections.abc import Awaitable, Iterable
2
+ from collections.abc import Awaitable, Callable, Iterable
3
3
  from typing import TypeVar
4
4
 
5
+ from httpx import (
6
+ CloseError,
7
+ ConnectError,
8
+ ConnectTimeout,
9
+ HTTPStatusError,
10
+ NetworkError,
11
+ ReadError,
12
+ ReadTimeout,
13
+ RemoteProtocolError,
14
+ codes,
15
+ )
16
+ from requests.exceptions import RequestException, Timeout
17
+ from tenacity import RetryCallState
5
18
  from tqdm.asyncio import tqdm
6
19
 
7
20
  T = TypeVar("T")
8
21
 
22
+ RETRYABLE_HTTP_STATUS_CODES = {
23
+ codes.TOO_MANY_REQUESTS,
24
+ codes.INTERNAL_SERVER_ERROR,
25
+ codes.BAD_GATEWAY,
26
+ codes.SERVICE_UNAVAILABLE,
27
+ codes.GATEWAY_TIMEOUT,
28
+ }
29
+
30
+ _BASE_CONNECTION_ERRORS = (
31
+ # From requests
32
+ Timeout,
33
+ ConnectionError,
34
+ RequestException,
35
+ # From httpx
36
+ ConnectError,
37
+ ConnectTimeout,
38
+ ReadTimeout,
39
+ ReadError,
40
+ NetworkError,
41
+ RemoteProtocolError,
42
+ CloseError,
43
+ )
44
+
45
+
46
+ def create_retry_if_connection_error(
47
+ *additional_exceptions,
48
+ ) -> Callable[[RetryCallState], bool]:
49
+ """Create a retry condition with base connection errors, HTTP status errors, plus additional exceptions."""
50
+
51
+ def status_retries_with_exceptions(retry_state: RetryCallState) -> bool:
52
+ if retry_state.outcome is not None and hasattr(
53
+ retry_state.outcome, "exception"
54
+ ):
55
+ exception = retry_state.outcome.exception()
56
+ # connection errors
57
+ if isinstance(exception, _BASE_CONNECTION_ERRORS):
58
+ return True
59
+ # custom exceptions provided
60
+ if additional_exceptions and isinstance(exception, additional_exceptions):
61
+ return True
62
+ # any http exceptions
63
+ if isinstance(exception, HTTPStatusError):
64
+ return exception.response.status_code in RETRYABLE_HTTP_STATUS_CODES
65
+ return False
66
+
67
+ return status_retries_with_exceptions
68
+
69
+
70
+ retry_if_connection_error = create_retry_if_connection_error()
71
+
9
72
 
10
73
  async def gather_with_concurrency(
11
74
  n: int | asyncio.Semaphore, coros: Iterable[Awaitable[T]], progress: bool = False
@@ -5,7 +5,7 @@ from aviary.core import Tool
5
5
 
6
6
  from futurehouse_client.clients.rest_client import RestClient
7
7
  from futurehouse_client.models.app import Stage
8
- from futurehouse_client.models.rest import WorldModel
8
+ from futurehouse_client.models.rest import SearchCriterion, SearchOperator, WorldModel
9
9
 
10
10
 
11
11
  class WorldModelTools:
@@ -56,7 +56,26 @@ class WorldModelTools:
56
56
  Returns:
57
57
  list[str]: A list of world model IDs that match the search query.
58
58
  """
59
- return WorldModelTools._get_client().search_world_models(query, size=size)
59
+ criteria = (
60
+ [
61
+ SearchCriterion(
62
+ field="name", operator=SearchOperator.CONTAINS, value=query
63
+ ),
64
+ SearchCriterion(
65
+ field="description", operator=SearchOperator.CONTAINS, value=query
66
+ ),
67
+ SearchCriterion(
68
+ field="content", operator=SearchOperator.CONTAINS, value=query
69
+ ),
70
+ ]
71
+ if query
72
+ else []
73
+ )
74
+
75
+ results = WorldModelTools._get_client().search_world_models(
76
+ criteria=criteria, size=size
77
+ )
78
+ return [str(model.id) for model in results]
60
79
 
61
80
 
62
81
  create_world_model_tool = Tool.from_function(WorldModelTools.create_world_model)
@@ -1,7 +1,14 @@
1
1
  # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
3
 
4
- __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
5
12
 
6
13
  TYPE_CHECKING = False
7
14
  if TYPE_CHECKING:
@@ -9,13 +16,19 @@ if TYPE_CHECKING:
9
16
  from typing import Union
10
17
 
11
18
  VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
12
20
  else:
13
21
  VERSION_TUPLE = object
22
+ COMMIT_ID = object
14
23
 
15
24
  version: str
16
25
  __version__: str
17
26
  __version_tuple__: VERSION_TUPLE
18
27
  version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
19
30
 
20
- __version__ = version = '0.4.1'
21
- __version_tuple__ = version_tuple = (0, 4, 1)
31
+ __version__ = version = '0.4.2'
32
+ __version_tuple__ = version_tuple = (0, 4, 2)
33
+
34
+ __commit_id__ = commit_id = None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: futurehouse-client
3
- Version: 0.4.1
3
+ Version: 0.4.2
4
4
  Summary: A client for interacting with endpoints of the FutureHouse service.
5
5
  Author-email: FutureHouse technical staff <hello@futurehouse.org>
6
6
  License: Apache License
@@ -213,18 +213,23 @@ Classifier: Programming Language :: Python
213
213
  Requires-Python: <3.14,>=3.11
214
214
  Description-Content-Type: text/markdown
215
215
  License-File: LICENSE
216
+ Requires-Dist: aiofiles
216
217
  Requires-Dist: cloudpickle
217
218
  Requires-Dist: fhaviary
219
+ Requires-Dist: google-resumable-media[aiohttp]
218
220
  Requires-Dist: httpx
219
221
  Requires-Dist: ldp>=0.22.0
220
222
  Requires-Dist: litellm
223
+ Requires-Dist: openai<1.100.0,>=1
221
224
  Requires-Dist: pydantic
222
225
  Requires-Dist: python-dotenv
226
+ Requires-Dist: requests
223
227
  Requires-Dist: tenacity
224
228
  Requires-Dist: tqdm>=4.62
225
229
  Provides-Extra: dev
226
230
  Requires-Dist: black; extra == "dev"
227
231
  Requires-Dist: futurehouse-client[monitoring,typing]; extra == "dev"
232
+ Requires-Dist: ipykernel; extra == "dev"
228
233
  Requires-Dist: jupyter; extra == "dev"
229
234
  Requires-Dist: jupyterlab; extra == "dev"
230
235
  Requires-Dist: mypy; extra == "dev"
@@ -244,6 +249,7 @@ Requires-Dist: setuptools_scm; extra == "dev"
244
249
  Provides-Extra: monitoring
245
250
  Requires-Dist: newrelic>=8.8.0; extra == "monitoring"
246
251
  Provides-Extra: typing
252
+ Requires-Dist: types-PyYAML; extra == "typing"
247
253
  Requires-Dist: types-requests; extra == "typing"
248
254
  Requires-Dist: types-tqdm; extra == "typing"
249
255
  Dynamic: license-file
@@ -0,0 +1,23 @@
1
+ futurehouse_client/__init__.py,sha256=PvFTkocA-hobsWoDEBEdrUgLIbuVbDs_0nvMdImJmHk,707
2
+ futurehouse_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ futurehouse_client/version.py,sha256=A45grTqzrHuDn1CT9K5GVUbY4_Q3OSTcXAl3zdHzcEI,704
4
+ futurehouse_client/clients/__init__.py,sha256=-HXNj-XJ3LRO5XM6MZ709iPs29YpApss0Q2YYg1qMZw,280
5
+ futurehouse_client/clients/data_storage_methods.py,sha256=HcAgNdqzvHs2zgxJKwgo99-L3-nAqnv2XntWf3gH4zY,92447
6
+ futurehouse_client/clients/job_client.py,sha256=b5gpzulZpxpv9R337r3UKItnMdtd6CGlI1sV3_VQJso,13985
7
+ futurehouse_client/clients/rest_client.py,sha256=Ng36P8obNW1WTRYWTqLQ0ka5tefT-r723_Kr0haT9aM,103225
8
+ futurehouse_client/models/__init__.py,sha256=0YlzKGymbY1g4cXxnUc0BUnthTkVBf12bCZlGUcMQqk,701
9
+ futurehouse_client/models/app.py,sha256=UUg17I3zk6cH_7mrdojHGYvQfm_SeDkuUxsPlRyIYz0,31895
10
+ futurehouse_client/models/client.py,sha256=n4HD0KStKLm6Ek9nL9ylP-bkK10yzAaD1uIDF83Qp_A,1828
11
+ futurehouse_client/models/data_storage_methods.py,sha256=fmJ6XLPFNY6S0XdLyzXsrGP3y1tsHQKXrMiO-6zRVJs,12775
12
+ futurehouse_client/models/rest.py,sha256=Fqw0_ypULzd7IV93PKooSG9W5_g7fGFsdW9jNVVImHA,4514
13
+ futurehouse_client/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ futurehouse_client/utils/auth.py,sha256=tgWELjKfg8eWme_qdcRmc8TjQN9DVZuHHaVXZNHLchk,2960
15
+ futurehouse_client/utils/general.py,sha256=PIkGLCSA3kUvc6mwR-prEB7YnMdKILOIm6cPowSZzzs,2532
16
+ futurehouse_client/utils/module_utils.py,sha256=aFyd-X-pDARXz9GWpn8SSViUVYdSbuy9vSkrzcVIaGI,4955
17
+ futurehouse_client/utils/monitoring.py,sha256=UjRlufe67kI3VxRHOd5fLtJmlCbVA2Wqwpd4uZhXkQM,8728
18
+ futurehouse_client/utils/world_model_tools.py,sha256=v2krZGrco0ur2a_pcRMtnQL05SxlIoBXuJ5R1JkQNws,2921
19
+ futurehouse_client-0.4.2.dist-info/licenses/LICENSE,sha256=oQ9ZHjUi-_6GfP3gs14FlPb0OlGwE1QCCKFGnJ4LD2I,11341
20
+ futurehouse_client-0.4.2.dist-info/METADATA,sha256=jDhGg44QEmjf8Br0ETgEdWa_0kyBFqiGjq4IMSNmtAU,27014
21
+ futurehouse_client-0.4.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
22
+ futurehouse_client-0.4.2.dist-info/top_level.txt,sha256=TRuLUCt_qBnggdFHCX4O_BoCu1j2X43lKfIZC-ElwWY,19
23
+ futurehouse_client-0.4.2.dist-info/RECORD,,
@@ -1,21 +0,0 @@
1
- futurehouse_client/__init__.py,sha256=PvFTkocA-hobsWoDEBEdrUgLIbuVbDs_0nvMdImJmHk,707
2
- futurehouse_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- futurehouse_client/version.py,sha256=yF2DwGUoQKNnLhAbpZX8kCQKjw77EZzhRk7_OTftets,511
4
- futurehouse_client/clients/__init__.py,sha256=-HXNj-XJ3LRO5XM6MZ709iPs29YpApss0Q2YYg1qMZw,280
5
- futurehouse_client/clients/job_client.py,sha256=D51_qTxya6g5Wfg_ZfJdP031TV_YDJeXkGMiYAJ1qRc,11962
6
- futurehouse_client/clients/rest_client.py,sha256=NyK6-YDcvswWcRmvMjUWvvfuE4eYMmI0bWM4Qnkgr8Y,99684
7
- futurehouse_client/models/__init__.py,sha256=kQ4R7VEuRxO0IQEW_sk9CndBL7zzl8rUKI24ddyYLM0,647
8
- futurehouse_client/models/app.py,sha256=UUg17I3zk6cH_7mrdojHGYvQfm_SeDkuUxsPlRyIYz0,31895
9
- futurehouse_client/models/client.py,sha256=n4HD0KStKLm6Ek9nL9ylP-bkK10yzAaD1uIDF83Qp_A,1828
10
- futurehouse_client/models/rest.py,sha256=ybelLsyTsKYud7DYUCF0sFF6u81bl8WmS_wWAnbX-0M,3382
11
- futurehouse_client/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- futurehouse_client/utils/auth.py,sha256=tgWELjKfg8eWme_qdcRmc8TjQN9DVZuHHaVXZNHLchk,2960
13
- futurehouse_client/utils/general.py,sha256=A_rtTiYW30ELGEZlWCIArO7q1nEmqi8hUlmBRYkMQ_c,767
14
- futurehouse_client/utils/module_utils.py,sha256=aFyd-X-pDARXz9GWpn8SSViUVYdSbuy9vSkrzcVIaGI,4955
15
- futurehouse_client/utils/monitoring.py,sha256=UjRlufe67kI3VxRHOd5fLtJmlCbVA2Wqwpd4uZhXkQM,8728
16
- futurehouse_client/utils/world_model_tools.py,sha256=Ctiy-EfK7EXrjmKO_nI6V5VhOJyHKWc0sKwa8Q0HAAo,2292
17
- futurehouse_client-0.4.1.dist-info/licenses/LICENSE,sha256=oQ9ZHjUi-_6GfP3gs14FlPb0OlGwE1QCCKFGnJ4LD2I,11341
18
- futurehouse_client-0.4.1.dist-info/METADATA,sha256=ucpuTCGRrKqqgXT8KEhy9XNLT1_e3LQdtPK-IcEKB0g,26797
19
- futurehouse_client-0.4.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
20
- futurehouse_client-0.4.1.dist-info/top_level.txt,sha256=TRuLUCt_qBnggdFHCX4O_BoCu1j2X43lKfIZC-ElwWY,19
21
- futurehouse_client-0.4.1.dist-info/RECORD,,