futurehouse-client 0.4.5.dev49__py3-none-any.whl → 0.4.5.dev160__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,6 +8,13 @@ from .models.app import (
8
8
  TaskResponse,
9
9
  TaskResponseVerbose,
10
10
  )
11
+ from .models.job_event import (
12
+ CostComponent,
13
+ ExecutionType,
14
+ JobEventCreateRequest,
15
+ JobEventCreateResponse,
16
+ JobEventUpdateRequest,
17
+ )
11
18
  from .utils.world_model_tools import (
12
19
  create_world_model_tool,
13
20
  make_world_model_tools,
@@ -15,9 +22,14 @@ from .utils.world_model_tools import (
15
22
  )
16
23
 
17
24
  __all__ = [
25
+ "CostComponent",
26
+ "ExecutionType",
18
27
  "FinchTaskResponse",
19
28
  "FutureHouseClient",
20
29
  "JobClient",
30
+ "JobEventCreateRequest",
31
+ "JobEventCreateResponse",
32
+ "JobEventUpdateRequest",
21
33
  "JobNames",
22
34
  "PQATaskResponse",
23
35
  "PhoenixTaskResponse",
@@ -35,10 +35,12 @@ from futurehouse_client.models.data_storage_methods import (
35
35
  DataStorageResponse,
36
36
  DataStorageType,
37
37
  DirectoryManifest,
38
+ GetDatasetAndEntriesResponse,
38
39
  ManifestEntry,
39
40
  )
40
41
  from futurehouse_client.models.rest import (
41
42
  DataStorageSearchPayload,
43
+ FilterLogic,
42
44
  SearchCriterion,
43
45
  )
44
46
  from futurehouse_client.utils.general import retry_if_connection_error
@@ -1530,7 +1532,33 @@ class DataStorageMethods:
1530
1532
  project_id: ID of the project this data storage entry belongs to
1531
1533
 
1532
1534
  Returns:
1533
- DataStorageResponse containing the created data storage entry and storage locations
1535
+ DataStorageResponse: A Pydantic model containing:
1536
+ - data_storage: DataStorageEntry with fields:
1537
+ - id - Unique identifier for the data storage entry
1538
+ - name - Name of the data storage entry
1539
+ - description - Description of the data storage entry
1540
+ - content - Content of the data storage entry
1541
+ - embedding - Embedding vector for the content
1542
+ - is_collection - Whether this entry is a collection
1543
+ - tags - List of tags associated with the entry
1544
+ - parent_id - ID of the parent entry for hierarchical storage
1545
+ - project_id - ID of the project this entry belongs to
1546
+ - dataset_id - ID of the dataset this entry belongs to
1547
+ - path - Path in the storage system where this entry is located
1548
+ - bigquery_schema - Target BigQuery schema for the entry
1549
+ - user_id - ID of the user who created this entry
1550
+ - created_at - Timestamp when the entry was created
1551
+ - modified_at - Timestamp when the entry was last updated
1552
+ - storage_locations with each location containing:
1553
+ - id - Unique identifier for the storage location
1554
+ - data_storage_id - ID of the associated data storage entry
1555
+ - storage_config pydantic model with fields:
1556
+ - storage_type - Type of storage (e.g., 'gcs', 'pg_table')
1557
+ - content_type - Type of content stored
1558
+ - content_schema - Content schema
1559
+ - metadata - Location metadata
1560
+ - location - Location path or identifier
1561
+ - signed_url - Signed URL for uploading/downloading
1534
1562
 
1535
1563
  Raises:
1536
1564
  DataStorageCreationError: If there's an error creating the data storage entry
@@ -1577,7 +1605,33 @@ class DataStorageMethods:
1577
1605
  project_id: ID of the project this data storage entry belongs to
1578
1606
 
1579
1607
  Returns:
1580
- DataStorageResponse containing the created data storage entry and storage locations
1608
+ DataStorageResponse: A Pydantic model containing:
1609
+ - data_storage: DataStorageEntry with fields:
1610
+ - id - Unique identifier for the data storage entry
1611
+ - name - Name of the data storage entry
1612
+ - description - Description of the data storage entry
1613
+ - content - Content of the data storage entry
1614
+ - embedding - Embedding vector for the content
1615
+ - is_collection - Whether this entry is a collection
1616
+ - tags - List of tags associated with the entry
1617
+ - parent_id - ID of the parent entry for hierarchical storage
1618
+ - project_id - ID of the project this entry belongs to
1619
+ - dataset_id - ID of the dataset this entry belongs to
1620
+ - path - Path in the storage system where this entry is located
1621
+ - bigquery_schema - Target BigQuery schema for the entry
1622
+ - user_id - ID of the user who created this entry
1623
+ - created_at - Timestamp when the entry was created
1624
+ - modified_at - Timestamp when the entry was last updated
1625
+ - storage_locations with each location containing:
1626
+ - id - Unique identifier for the storage location
1627
+ - data_storage_id - ID of the associated data storage entry
1628
+ - storage_config pydantic model with fields:
1629
+ - storage_type - Type of storage (e.g., 'gcs', 'pg_table')
1630
+ - content_type - Type of content stored
1631
+ - content_schema - Content schema
1632
+ - metadata - Location metadata
1633
+ - location - Location path or identifier
1634
+ - signed_url - Signed URL for uploading/downloading
1581
1635
 
1582
1636
  Raises:
1583
1637
  DataStorageCreationError: If there's an error creating the data storage entry
@@ -1740,6 +1794,7 @@ class DataStorageMethods:
1740
1794
  ignore_patterns: list[str] | None = None,
1741
1795
  ignore_filename: str = ".gitignore",
1742
1796
  project_id: UUID | None = None,
1797
+ dataset_id: UUID | None = None,
1743
1798
  ) -> DataStorageResponse:
1744
1799
  """Store file or directory content in the data storage system.
1745
1800
 
@@ -1755,13 +1810,45 @@ class DataStorageMethods:
1755
1810
  description: Optional description of the data storage entry
1756
1811
  path: Optional path for the data storage entry
1757
1812
  as_collection: If true, upload directories as a single zip file collection.
1758
- manifest_filename: Name of manifest file
1813
+ manifest_filename: Name of manifest file (JSON or YAML) containing:
1814
+ - entries - Map of file/directory names to their manifest entries
1815
+ - Each ManifestEntry contains:
1816
+ - description - Description of the file or directory
1817
+ - metadata - Additional metadata for the entry
1818
+ - Each DirectoryManifest contains nested entries following the same structure
1759
1819
  ignore_patterns: List of patterns to ignore when zipping directories
1760
1820
  ignore_filename: Name of ignore file to read from directory (default: .gitignore)
1761
1821
  project_id: ID of the project this data storage entry belongs to
1822
+ dataset_id: ID of the dataset this data storage entry belongs to
1762
1823
 
1763
1824
  Returns:
1764
- DataStorageResponse containing the final data storage entry
1825
+ DataStorageResponse: A Pydantic model containing:
1826
+ - data_storage: DataStorageEntry with fields:
1827
+ - id - Unique identifier for the data storage entry
1828
+ - name - Name of the data storage entry
1829
+ - description - Description of the data storage entry
1830
+ - content - Content of the data storage entry
1831
+ - embedding - Embedding vector for the content
1832
+ - is_collection - Whether this entry is a collection
1833
+ - tags - List of tags associated with the entry
1834
+ - parent_id - ID of the parent entry for hierarchical storage
1835
+ - project_id - ID of the project this entry belongs to
1836
+ - dataset_id - ID of the dataset this entry belongs to
1837
+ - path - Path in the storage system where this entry is located
1838
+ - bigquery_schema - Target BigQuery schema for the entry
1839
+ - user_id - ID of the user who created this entry
1840
+ - created_at - Timestamp when the entry was created
1841
+ - modified_at - Timestamp when the entry was last updated
1842
+ - storage_locations with each location containing:
1843
+ - id - Unique identifier for the storage location
1844
+ - data_storage_id - ID of the associated data storage entry
1845
+ - storage_config pydantic model with fields:
1846
+ - storage_type - Type of storage (e.g., 'gcs', 'pg_table')
1847
+ - content_type - Type of content stored
1848
+ - content_schema - Content schema
1849
+ - metadata - Location metadata
1850
+ - location - Location path or identifier
1851
+ - signed_url - Signed URL for uploading/downloading
1765
1852
 
1766
1853
  Raises:
1767
1854
  DataStorageCreationError: If there's an error in the process
@@ -1788,6 +1875,7 @@ class DataStorageMethods:
1788
1875
  ignore_patterns=ignore_patterns,
1789
1876
  ignore_filename=ignore_filename,
1790
1877
  project_id=project_id,
1878
+ dataset_id=dataset_id,
1791
1879
  )
1792
1880
  if not responses:
1793
1881
  raise DataStorageCreationError(
@@ -1833,15 +1921,47 @@ class DataStorageMethods:
1833
1921
  path: Optional GCS path for the entry.
1834
1922
  as_collection: If uploading a directory, `True` zips it into a single collection,
1835
1923
  `False` uploads it as a hierarchical structure of individual objects.
1836
- manifest_filename: Optional manifest file for hierarchical uploads.
1924
+ manifest_filename: Optional manifest file (JSON or YAML) for hierarchical uploads containing:
1925
+ - entries - Map of file/directory names to their manifest entries
1926
+ - Each ManifestEntry contains:
1927
+ - description - Description of the file or directory
1928
+ - metadata - Additional metadata for the entry
1929
+ - Each DirectoryManifest contains nested entries following the same structure
1837
1930
  ignore_patterns: List of patterns to ignore when zipping.
1838
1931
  ignore_filename: Name of ignore file to read (default: .gitignore).
1839
1932
  dataset_id: Optional dataset ID to add entry to, or None to create new dataset.
1840
1933
  project_id: ID of the project this data storage entry belongs to
1841
1934
 
1842
1935
  Returns:
1843
- The `DataStorageResponse` for the created entry. For hierarchical uploads,
1844
- this is the response for the root directory entry.
1936
+ DataStorageResponse: A Pydantic model containing:
1937
+ - data_storage: DataStorageEntry with fields:
1938
+ - id - Unique identifier for the data storage entry
1939
+ - name - Name of the data storage entry
1940
+ - description - Description of the data storage entry
1941
+ - content - Content of the data storage entry
1942
+ - embedding - Embedding vector for the content
1943
+ - is_collection - Whether this entry is a collection
1944
+ - tags - List of tags associated with the entry
1945
+ - parent_id - ID of the parent entry for hierarchical storage
1946
+ - project_id - ID of the project this entry belongs to
1947
+ - dataset_id - ID of the dataset this entry belongs to
1948
+ - path - Path in the storage system where this entry is located
1949
+ - bigquery_schema - Target BigQuery schema for the entry
1950
+ - user_id - ID of the user who created this entry
1951
+ - created_at - Timestamp when the entry was created
1952
+ - modified_at - Timestamp when the entry was last updated
1953
+ - storage_locations with each location containing:
1954
+ - id - Unique identifier for the storage location
1955
+ - data_storage_id - ID of the associated data storage entry
1956
+ - storage_config pydantic model with fields:
1957
+ - storage_type - Type of storage (e.g., 'gcs', 'pg_table')
1958
+ - content_type - Type of content stored
1959
+ - content_schema - Content schema
1960
+ - metadata - Location metadata
1961
+ - location - Location path or identifier
1962
+ - signed_url - Signed URL for uploading/downloading
1963
+
1964
+ For hierarchical uploads, this is the response for the root directory entry.
1845
1965
  """
1846
1966
  file_path = self._validate_file_path(file_path)
1847
1967
 
@@ -1902,7 +2022,12 @@ class DataStorageMethods:
1902
2022
 
1903
2023
  Args:
1904
2024
  name: Name of the data storage entry
1905
- existing_location: Describes the existing data source location to register
2025
+ existing_location: a pydantic model describing the existing data source location to register, containing:
2026
+ - storage_type - Type of storage (BIGQUERY, GCS, PG_TABLE, RAW_CONTENT, ELASTIC_SEARCH)
2027
+ - content_type - Type of content (BQ_DATASET, BQ_TABLE, TEXT, TEXT_W_EMBEDDINGS, DIRECTORY, FILE, INDEX, INDEX_W_EMBEDDINGS)
2028
+ - content_schema - Content schema for the data
2029
+ - metadata - Additional metadata for the location
2030
+ - location - Location path or identifier
1906
2031
  description: Optional description of the data storage entry
1907
2032
  as_collection: If uploading a directory, `True` creates a single storage entry for
1908
2033
  the whole directory and multiple storage locations for each file, `False` assumes
@@ -1911,7 +2036,33 @@ class DataStorageMethods:
1911
2036
  project_id: ID of the project this data storage entry belongs to
1912
2037
 
1913
2038
  Returns:
1914
- DataStorageResponse containing the created data storage entry and storage locations
2039
+ DataStorageResponse: A Pydantic model containing:
2040
+ - data_storage: DataStorageEntry with fields:
2041
+ - id - Unique identifier for the data storage entry
2042
+ - name - Name of the data storage entry
2043
+ - description - Description of the data storage entry
2044
+ - content - Content of the data storage entry
2045
+ - embedding - Embedding vector for the content
2046
+ - is_collection - Whether this entry is a collection
2047
+ - tags - List of tags associated with the entry
2048
+ - parent_id - ID of the parent entry for hierarchical storage
2049
+ - project_id - ID of the project this entry belongs to
2050
+ - dataset_id - ID of the dataset this entry belongs to
2051
+ - path - Path in the storage system where this entry is located
2052
+ - bigquery_schema - Target BigQuery schema for the entry
2053
+ - user_id - ID of the user who created this entry
2054
+ - created_at - Timestamp when the entry was created
2055
+ - modified_at - Timestamp when the entry was last updated
2056
+ - storage_locations with each location containing:
2057
+ - id - Unique identifier for the storage location
2058
+ - data_storage_id - ID of the associated data storage entry
2059
+ - storage_config pydantic model with fields:
2060
+ - storage_type - Type of storage (e.g., 'gcs', 'pg_table')
2061
+ - content_type - Type of content stored
2062
+ - content_schema - Content schema
2063
+ - metadata - Location metadata
2064
+ - location - Location path or identifier
2065
+ - signed_url - Signed URL for uploading/downloading
1915
2066
 
1916
2067
  Raises:
1917
2068
  DataStorageCreationError: If there's an error creating the data storage entry
@@ -1957,7 +2108,12 @@ class DataStorageMethods:
1957
2108
 
1958
2109
  Args:
1959
2110
  name: Name of the data storage entry
1960
- existing_location: Describes the existing data source location to register
2111
+ existing_location: a pydantic model describing the existing data source location to register, containing:
2112
+ - storage_type - Type of storage (BIGQUERY, GCS, PG_TABLE, RAW_CONTENT, ELASTIC_SEARCH)
2113
+ - content_type - Type of content (BQ_DATASET, BQ_TABLE, TEXT, TEXT_W_EMBEDDINGS, DIRECTORY, FILE, INDEX, INDEX_W_EMBEDDINGS)
2114
+ - content_schema - Content schema for the data
2115
+ - metadata - Additional metadata for the location
2116
+ - location - Location path or identifier
1961
2117
  description: Optional description of the data storage entry
1962
2118
  as_collection: If uploading a directory, `True` creates a single storage entry for
1963
2119
  the whole directory and multiple storage locations for each file, `False` assumes
@@ -1966,7 +2122,33 @@ class DataStorageMethods:
1966
2122
  project_id: ID of the project this data storage entry belongs to
1967
2123
 
1968
2124
  Returns:
1969
- DataStorageResponse containing the created data storage entry and storage locations
2125
+ DataStorageResponse: A Pydantic model containing:
2126
+ - data_storage: DataStorageEntry with fields:
2127
+ - id - Unique identifier for the data storage entry
2128
+ - name - Name of the data storage entry
2129
+ - description - Description of the data storage entry
2130
+ - content - Content of the data storage entry
2131
+ - embedding - Embedding vector for the content
2132
+ - is_collection - Whether this entry is a collection
2133
+ - tags - List of tags associated with the entry
2134
+ - parent_id - ID of the parent entry for hierarchical storage
2135
+ - project_id - ID of the project this entry belongs to
2136
+ - dataset_id - ID of the dataset this entry belongs to
2137
+ - path - Path in the storage system where this entry is located
2138
+ - bigquery_schema - Target BigQuery schema for the entry
2139
+ - user_id - ID of the user who created this entry
2140
+ - created_at - Timestamp when the entry was created
2141
+ - modified_at - Timestamp when the entry was last updated
2142
+ - storage_locations with each location containing:
2143
+ - id - Unique identifier for the storage location
2144
+ - data_storage_id - ID of the associated data storage entry
2145
+ - storage_config pydantic model with fields:
2146
+ - storage_type - Type of storage (e.g., 'gcs', 'pg_table')
2147
+ - content_type - Type of content stored
2148
+ - content_schema - Content schema
2149
+ - metadata - Location metadata
2150
+ - location - Location path or identifier
2151
+ - signed_url - Signed URL for uploading/downloading
1970
2152
 
1971
2153
  Raises:
1972
2154
  DataStorageCreationError: If there's an error creating the data storage entry
@@ -2003,12 +2185,17 @@ class DataStorageMethods:
2003
2185
  self,
2004
2186
  criteria: list[SearchCriterion] | None = None,
2005
2187
  size: int = 10,
2188
+ filter_logic: FilterLogic = FilterLogic.OR,
2006
2189
  ) -> list[dict]:
2007
2190
  """Search data storage objects using structured criteria.
2008
2191
 
2009
2192
  Args:
2010
- criteria: List of search criteria (SearchCriterion objects with field, operator, value)
2193
+ criteria: List of SearchCriterion pydantic models with fields:
2194
+ - field - Field name to search on
2195
+ - operator - Search operator (EQUALS, CONTAINS, STARTS_WITH, ENDS_WITH, GREATER_THAN, LESS_THAN, BETWEEN, IN)
2196
+ - value - Value to search for
2011
2197
  size: Number of results to return (1-100)
2198
+ filter_logic: Either "AND" (all criteria must match) or "OR" (at least one must match)
2012
2199
 
2013
2200
  Returns:
2014
2201
  List of search results with scores and data storage information
@@ -2029,6 +2216,7 @@ class DataStorageMethods:
2029
2216
  payload = DataStorageSearchPayload(
2030
2217
  criteria=criteria or [],
2031
2218
  size=max(1, min(100, size)), # Clamp between 1-100
2219
+ filter_logic=filter_logic,
2032
2220
  )
2033
2221
 
2034
2222
  response = self.client.post(
@@ -2059,12 +2247,17 @@ class DataStorageMethods:
2059
2247
  self,
2060
2248
  criteria: list[SearchCriterion] | None = None,
2061
2249
  size: int = 10,
2250
+ filter_logic: FilterLogic = FilterLogic.OR,
2062
2251
  ) -> list[dict]:
2063
2252
  """Asynchronously search data storage objects using structured criteria.
2064
2253
 
2065
2254
  Args:
2066
- criteria: List of search criteria (SearchCriterion objects with field, operator, value)
2255
+ criteria: List of SearchCriterion pydantic models with fields:
2256
+ - field - Field name to search on
2257
+ - operator - Search operator (EQUALS, CONTAINS, STARTS_WITH, ENDS_WITH, GREATER_THAN, LESS_THAN, BETWEEN, IN)
2258
+ - value - Value to search for
2067
2259
  size: Number of results to return (1-100)
2260
+ filter_logic: Either "AND" (all criteria must match) or "OR" (at least one must match)
2068
2261
 
2069
2262
  Returns:
2070
2263
  List of search results with scores and data storage information
@@ -2085,6 +2278,7 @@ class DataStorageMethods:
2085
2278
  payload = DataStorageSearchPayload(
2086
2279
  criteria=criteria or [],
2087
2280
  size=max(1, min(100, size)), # Clamp between 1-100
2281
+ filter_logic=filter_logic,
2088
2282
  )
2089
2283
 
2090
2284
  response = await self.async_client.post(
@@ -2124,11 +2318,11 @@ class DataStorageMethods:
2124
2318
  """Search data storage objects using vector similarity.
2125
2319
 
2126
2320
  Args:
2127
- embedding: Embedding vector for similarity search
2321
+ embedding: List of float values representing the embedding vector for similarity search
2128
2322
  size: Number of results to return (1-100)
2129
2323
  min_score: Minimum similarity score (0.0-1.0)
2130
2324
  dataset_id: Optional dataset ID filter
2131
- tags: Optional list of tags to filter by
2325
+ tags: Optional list of string tags to filter by
2132
2326
  user_id: Optional user ID filter (admin only)
2133
2327
  project_id: Optional project ID filter
2134
2328
 
@@ -2202,11 +2396,11 @@ class DataStorageMethods:
2202
2396
  """Asynchronously search data storage objects using vector similarity.
2203
2397
 
2204
2398
  Args:
2205
- embedding: Embedding vector for similarity search
2399
+ embedding: List of float values representing the embedding vector for similarity search
2206
2400
  size: Number of results to return (1-100)
2207
2401
  min_score: Minimum similarity score (0.0-1.0)
2208
2402
  dataset_id: Optional dataset ID filter
2209
- tags: Optional list of tags to filter by
2403
+ tags: Optional list of string tags to filter by
2210
2404
  user_id: Optional user ID filter (admin only)
2211
2405
  project_id: Optional project ID filter
2212
2406
 
@@ -2274,12 +2468,12 @@ class DataStorageMethods:
2274
2468
  """Fetch data from the storage system (sync version).
2275
2469
 
2276
2470
  Args:
2277
- data_storage_id: ID of the data storage entry to fetch
2471
+ data_storage_id: UUID of the data storage entry to fetch
2278
2472
 
2279
2473
  Returns:
2280
2474
  For PG_TABLE storage: string content
2281
2475
  For GCS storage: Path to downloaded file (may be unzipped if it was a zip)
2282
- For multi-location entries: dict of location IDs to dicts with signed URL and file name
2476
+ For multi-location entries: list of downloaded files
2283
2477
  None if not found or error occurred
2284
2478
  """
2285
2479
  if not data_storage_id:
@@ -2348,12 +2542,12 @@ class DataStorageMethods:
2348
2542
  """Fetch data from the storage system.
2349
2543
 
2350
2544
  Args:
2351
- data_storage_id: ID of the data storage entry to fetch
2545
+ data_storage_id: UUID of the data storage entry to fetch
2352
2546
 
2353
2547
  Returns:
2354
2548
  For PG_TABLE storage: string content
2355
2549
  For GCS storage: Path to downloaded file (may be unzipped if it was a zip)
2356
- For multi-location entries: dict of location IDs to dicts with signed URL and file name
2550
+ For multi-location entries: list of downloaded files
2357
2551
  None if not found or error occurred
2358
2552
  """
2359
2553
  if not data_storage_id:
@@ -2423,7 +2617,23 @@ class DataStorageMethods:
2423
2617
  name: str,
2424
2618
  description: str | None = None,
2425
2619
  dataset_id: UUID | None = None,
2426
- ):
2620
+ ) -> CreateDatasetPayload:
2621
+ """Asynchronously create a new dataset.
2622
+
2623
+ Args:
2624
+ name: Name of the dataset to create
2625
+ description: Optional description of the dataset
2626
+ dataset_id: Optional UUID to assign to the dataset, or None to auto-generate
2627
+
2628
+ Returns:
2629
+ CreateDatasetPayload: A Pydantic model containing:
2630
+ - id - ID of the created dataset (None if auto-generated)
2631
+ - name - Name of the dataset
2632
+ - description - Description of the dataset
2633
+
2634
+ Raises:
2635
+ DataStorageCreationError: If there's an error creating the dataset
2636
+ """
2427
2637
  try:
2428
2638
  payload = CreateDatasetPayload(
2429
2639
  name=name,
@@ -2454,7 +2664,23 @@ class DataStorageMethods:
2454
2664
  name: str,
2455
2665
  description: str | None = None,
2456
2666
  dataset_id: UUID | None = None,
2457
- ):
2667
+ ) -> CreateDatasetPayload:
2668
+ """Create a new dataset.
2669
+
2670
+ Args:
2671
+ name: Name of the dataset to create
2672
+ description: Optional description of the dataset
2673
+ dataset_id: Optional UUID to assign to the dataset, or None to auto-generate
2674
+
2675
+ Returns:
2676
+ CreateDatasetPayload: A Pydantic model containing:
2677
+ - id - ID of the created dataset (None if auto-generated)
2678
+ - name - Name of the dataset
2679
+ - description - Description of the dataset
2680
+
2681
+ Raises:
2682
+ DataStorageCreationError: If there's an error creating the dataset
2683
+ """
2458
2684
  try:
2459
2685
  payload = CreateDatasetPayload(
2460
2686
  name=name,
@@ -2528,14 +2754,48 @@ class DataStorageMethods:
2528
2754
  retry=retry_if_connection_error,
2529
2755
  before_sleep=before_sleep_log(logger, logging.WARNING),
2530
2756
  )
2531
- async def aget_dataset(self, dataset_id: UUID):
2757
+ async def aget_dataset(self, dataset_id: UUID) -> GetDatasetAndEntriesResponse:
2758
+ """Asynchronously retrieve a dataset by ID.
2759
+
2760
+ Args:
2761
+ dataset_id: UUID of the dataset to retrieve
2762
+
2763
+ Returns:
2764
+ GetDatasetAndEntriesResponse: A dict containing:
2765
+ - dataset: DatasetStorage with fields:
2766
+ - id - Unique identifier for the dataset
2767
+ - name - Name of the dataset
2768
+ - user_id - ID of the user who created the dataset
2769
+ - description - Description of the dataset
2770
+ - created_at - Timestamp when the dataset was created
2771
+ - modified_at - Timestamp when the dataset was last modified
2772
+ - data_storage_entries - List of data storage entries in the dataset, each containing:
2773
+ - id - Unique identifier for the data storage entry
2774
+ - name - Name of the data storage entry
2775
+ - description - Description of the data storage entry
2776
+ - content - Content of the data storage entry
2777
+ - embedding - Embedding vector for the content
2778
+ - is_collection - Whether this entry is a collection
2779
+ - tags - List of tags associated with the entry
2780
+ - parent_id - ID of the parent entry for hierarchical storage
2781
+ - project_id - ID of the project this entry belongs to
2782
+ - dataset_id - ID of the dataset this entry belongs to
2783
+ - path - Path in the storage system where this entry is located
2784
+ - bigquery_schema - Target BigQuery schema for the entry
2785
+ - user_id - ID of the user who created this entry
2786
+ - created_at - Timestamp when the entry was created
2787
+ - modified_at - Timestamp when the entry was last updated
2788
+
2789
+ Raises:
2790
+ DataStorageError: If there's an error retrieving the dataset
2791
+ """
2532
2792
  try:
2533
2793
  response = await self.async_client.get(
2534
2794
  f"/v0.1/data-storage/datasets/{dataset_id}"
2535
2795
  )
2536
2796
  response.raise_for_status()
2537
2797
 
2538
- return response.json()
2798
+ return GetDatasetAndEntriesResponse.model_validate(response.json())
2539
2799
  except HTTPStatusError as e:
2540
2800
  self._handle_http_errors(e, "retrieving")
2541
2801
  except Exception as e:
@@ -2547,12 +2807,46 @@ class DataStorageMethods:
2547
2807
  retry=retry_if_connection_error,
2548
2808
  before_sleep=before_sleep_log(logger, logging.WARNING),
2549
2809
  )
2550
- def get_dataset(self, dataset_id: UUID):
2810
+ def get_dataset(self, dataset_id: UUID) -> GetDatasetAndEntriesResponse:
2811
+ """Retrieve a dataset by ID.
2812
+
2813
+ Args:
2814
+ dataset_id: UUID of the dataset to retrieve
2815
+
2816
+ Returns:
2817
+ GetDatasetAndEntriesResponse: A dict containing:
2818
+ - dataset: DatasetStorage with fields:
2819
+ - id - Unique identifier for the dataset
2820
+ - name - Name of the dataset
2821
+ - user_id - ID of the user who created the dataset
2822
+ - description - Description of the dataset
2823
+ - created_at - Timestamp when the dataset was created
2824
+ - modified_at - Timestamp when the dataset was last modified
2825
+ - data_storage_entries - List of data storage entries in the dataset, each containing:
2826
+ - id - Unique identifier for the data storage entry
2827
+ - name - Name of the data storage entry
2828
+ - description - Description of the data storage entry
2829
+ - content - Content of the data storage entry
2830
+ - embedding - Embedding vector for the content
2831
+ - is_collection - Whether this entry is a collection
2832
+ - tags - List of tags associated with the entry
2833
+ - parent_id - ID of the parent entry for hierarchical storage
2834
+ - project_id - ID of the project this entry belongs to
2835
+ - dataset_id - ID of the dataset this entry belongs to
2836
+ - path - Path in the storage system where this entry is located
2837
+ - bigquery_schema - Target BigQuery schema for the entry
2838
+ - user_id - ID of the user who created this entry
2839
+ - created_at - Timestamp when the entry was created
2840
+ - modified_at - Timestamp when the entry was last updated
2841
+
2842
+ Raises:
2843
+ DataStorageError: If there's an error retrieving the dataset
2844
+ """
2551
2845
  try:
2552
2846
  response = self.client.get(f"/v0.1/data-storage/datasets/{dataset_id}")
2553
2847
  response.raise_for_status()
2554
2848
 
2555
- return response.json()
2849
+ return GetDatasetAndEntriesResponse.model_validate(response.json())
2556
2850
  except HTTPStatusError as e:
2557
2851
  self._handle_http_errors(e, "retrieving")
2558
2852
  except Exception as e:
@@ -2628,7 +2922,15 @@ class DataStorageMethods:
2628
2922
  retry=retry_if_connection_error,
2629
2923
  before_sleep=before_sleep_log(logger, logging.WARNING),
2630
2924
  )
2631
- async def adelete_data_storage_entry(self, data_storage_entry_id: UUID):
2925
+ async def adelete_data_storage_entry(self, data_storage_entry_id: UUID) -> None:
2926
+ """Asynchronously delete a data storage entry.
2927
+
2928
+ Args:
2929
+ data_storage_entry_id: UUID of the data storage entry to delete
2930
+
2931
+ Raises:
2932
+ DataStorageError: If there's an error deleting the data storage entry
2933
+ """
2632
2934
  try:
2633
2935
  await self.async_client.delete(
2634
2936
  f"/v0.1/data-storage/data-entries/{data_storage_entry_id}"
@@ -2644,7 +2946,15 @@ class DataStorageMethods:
2644
2946
  retry=retry_if_connection_error,
2645
2947
  before_sleep=before_sleep_log(logger, logging.WARNING),
2646
2948
  )
2647
- def delete_data_storage_entry(self, data_storage_entry_id: UUID):
2949
+ def delete_data_storage_entry(self, data_storage_entry_id: UUID) -> None:
2950
+ """Delete a data storage entry.
2951
+
2952
+ Args:
2953
+ data_storage_entry_id: UUID of the data storage entry to delete
2954
+
2955
+ Raises:
2956
+ DataStorageError: If there's an error deleting the data storage entry
2957
+ """
2648
2958
  try:
2649
2959
  self.client.delete(
2650
2960
  f"/v0.1/data-storage/data-entries/{data_storage_entry_id}"
@@ -52,6 +52,11 @@ from futurehouse_client.models.app import (
52
52
  TaskResponseVerbose,
53
53
  TrajectoryQueryParams,
54
54
  )
55
+ from futurehouse_client.models.job_event import (
56
+ JobEventCreateRequest,
57
+ JobEventCreateResponse,
58
+ JobEventUpdateRequest,
59
+ )
55
60
  from futurehouse_client.models.rest import (
56
61
  DiscoveryResponse,
57
62
  ExecutionStatus,
@@ -160,6 +165,18 @@ class FileUploadError(RestClientError):
160
165
  """Raised when there's an error uploading a file."""
161
166
 
162
167
 
168
+ class JobEventClientError(RestClientError):
169
+ """Raised when there's an error with job event operations."""
170
+
171
+
172
+ class JobEventCreationError(JobEventClientError):
173
+ """Raised when there's an error creating a job event."""
174
+
175
+
176
+ class JobEventUpdateError(JobEventClientError):
177
+ """Raised when there's an error updating a job event."""
178
+
179
+
163
180
  retry_if_connection_error = create_retry_if_connection_error(FileUploadError)
164
181
 
165
182
  DEFAULT_AGENT_TIMEOUT: int = 2400 # seconds
@@ -2609,6 +2626,176 @@ class RestClient(DataStorageMethods):
2609
2626
  f"Error fetching discoveries for project: {e!r}"
2610
2627
  ) from e
2611
2628
 
2629
+ @retry(
2630
+ stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
2631
+ wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
2632
+ retry=retry_if_connection_error,
2633
+ before_sleep=before_sleep_log(logger, logging.WARNING),
2634
+ )
2635
+ def create_job_event(
2636
+ self, request: JobEventCreateRequest
2637
+ ) -> JobEventCreateResponse:
2638
+ """Create a new job event.
2639
+
2640
+ Args:
2641
+ request: Job event creation request
2642
+
2643
+ Returns:
2644
+ Job event creation response
2645
+
2646
+ Raises:
2647
+ JobEventCreationError: If the API call fails
2648
+ """
2649
+ try:
2650
+ response = self.client.post(
2651
+ "/v0.1/job-events",
2652
+ json=request.model_dump(exclude_none=True, mode="json"),
2653
+ )
2654
+ response.raise_for_status()
2655
+ return JobEventCreateResponse(**response.json())
2656
+ except HTTPStatusError as e:
2657
+ if e.response.status_code == codes.BAD_REQUEST:
2658
+ raise JobEventCreationError(
2659
+ f"Invalid job event creation request: {e.response.text}."
2660
+ ) from e
2661
+ if e.response.status_code == codes.NOT_FOUND:
2662
+ raise JobEventCreationError(
2663
+ f"Execution not found for job event creation: {e.response.text}."
2664
+ ) from e
2665
+ raise JobEventCreationError(
2666
+ f"Error creating job event: {e.response.status_code} - {e.response.text}."
2667
+ ) from e
2668
+ except Exception as e:
2669
+ raise JobEventCreationError(
2670
+ f"An unexpected error occurred during job event creation: {e!r}."
2671
+ ) from e
2672
+
2673
+ @retry(
2674
+ stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
2675
+ wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
2676
+ retry=retry_if_connection_error,
2677
+ before_sleep=before_sleep_log(logger, logging.WARNING),
2678
+ )
2679
+ async def acreate_job_event(
2680
+ self, request: JobEventCreateRequest
2681
+ ) -> JobEventCreateResponse:
2682
+ """Asynchronously create a new job event.
2683
+
2684
+ Args:
2685
+ request: Job event creation request
2686
+
2687
+ Returns:
2688
+ Job event creation response
2689
+
2690
+ Raises:
2691
+ JobEventCreationError: If the API call fails
2692
+ """
2693
+ try:
2694
+ response = await self.async_client.post(
2695
+ "/v0.1/job-events",
2696
+ json=request.model_dump(exclude_none=True, mode="json"),
2697
+ )
2698
+ response.raise_for_status()
2699
+ return JobEventCreateResponse(**response.json())
2700
+ except HTTPStatusError as e:
2701
+ if e.response.status_code == codes.BAD_REQUEST:
2702
+ raise JobEventCreationError(
2703
+ f"Invalid job event creation request: {e.response.text}."
2704
+ ) from e
2705
+ if e.response.status_code == codes.NOT_FOUND:
2706
+ raise JobEventCreationError(
2707
+ f"Execution not found for job event creation: {e.response.text}."
2708
+ ) from e
2709
+ raise JobEventCreationError(
2710
+ f"Error creating job event: {e.response.status_code} - {e.response.text}."
2711
+ ) from e
2712
+ except Exception as e:
2713
+ raise JobEventCreationError(
2714
+ f"An unexpected error occurred during job event creation: {e!r}."
2715
+ ) from e
2716
+
2717
+ @retry(
2718
+ stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
2719
+ wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
2720
+ retry=retry_if_connection_error,
2721
+ before_sleep=before_sleep_log(logger, logging.WARNING),
2722
+ )
2723
+ def update_job_event(
2724
+ self, job_event_id: UUID, request: JobEventUpdateRequest
2725
+ ) -> None:
2726
+ """Update an existing job event.
2727
+
2728
+ Args:
2729
+ job_event_id: ID of the job event to update
2730
+ request: Job event update request
2731
+
2732
+ Raises:
2733
+ JobEventUpdateError: If the API call fails
2734
+ """
2735
+ try:
2736
+ response = self.client.patch(
2737
+ f"/v0.1/job-events/{job_event_id}",
2738
+ json=request.model_dump(exclude_none=True, mode="json"),
2739
+ )
2740
+ response.raise_for_status()
2741
+ except HTTPStatusError as e:
2742
+ if e.response.status_code == codes.NOT_FOUND:
2743
+ raise JobEventUpdateError(
2744
+ f"Job event with ID {job_event_id} not found."
2745
+ ) from e
2746
+ if e.response.status_code == codes.BAD_REQUEST:
2747
+ raise JobEventUpdateError(
2748
+ f"Invalid job event update request: {e.response.text}."
2749
+ ) from e
2750
+ raise JobEventUpdateError(
2751
+ f"Error updating job event: {e.response.status_code} - {e.response.text}."
2752
+ ) from e
2753
+ except Exception as e:
2754
+ raise JobEventUpdateError(
2755
+ f"An unexpected error occurred during job event update: {e!r}."
2756
+ ) from e
2757
+
2758
+ @retry(
2759
+ stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
2760
+ wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
2761
+ retry=retry_if_connection_error,
2762
+ before_sleep=before_sleep_log(logger, logging.WARNING),
2763
+ )
2764
+ async def aupdate_job_event(
2765
+ self, job_event_id: UUID, request: JobEventUpdateRequest
2766
+ ) -> None:
2767
+ """Asynchronously update an existing job event.
2768
+
2769
+ Args:
2770
+ job_event_id: ID of the job event to update
2771
+ request: Job event update request
2772
+
2773
+ Raises:
2774
+ JobEventUpdateError: If the API call fails
2775
+ """
2776
+ try:
2777
+ response = await self.async_client.patch(
2778
+ f"/v0.1/job-events/{job_event_id}",
2779
+ json=request.model_dump(exclude_none=True, mode="json"),
2780
+ )
2781
+ response.raise_for_status()
2782
+ except HTTPStatusError as e:
2783
+ if e.response.status_code == codes.NOT_FOUND:
2784
+ raise JobEventUpdateError(
2785
+ f"Job event with ID {job_event_id} not found."
2786
+ ) from e
2787
+ if e.response.status_code == codes.BAD_REQUEST:
2788
+ raise JobEventUpdateError(
2789
+ f"Invalid job event update request: {e.response.text}."
2790
+ ) from e
2791
+ raise JobEventUpdateError(
2792
+ f"Error updating job event: {e.response.status_code} - {e.response.text}."
2793
+ ) from e
2794
+ except Exception as e:
2795
+ raise JobEventUpdateError(
2796
+ f"An unexpected error occurred during job event update: {e!r}."
2797
+ ) from e
2798
+
2612
2799
 
2613
2800
  def get_installed_packages() -> dict[str, str]:
2614
2801
  """Returns a dictionary of installed packages and their versions."""
@@ -13,13 +13,25 @@ from .app import (
13
13
  TaskResponse,
14
14
  TaskResponseVerbose,
15
15
  )
16
+ from .job_event import (
17
+ CostComponent,
18
+ ExecutionType,
19
+ JobEventCreateRequest,
20
+ JobEventCreateResponse,
21
+ JobEventUpdateRequest,
22
+ )
16
23
  from .rest import TrajectoryPatchRequest, WorldModel, WorldModelResponse
17
24
 
18
25
  __all__ = [
19
26
  "AuthType",
27
+ "CostComponent",
20
28
  "DockerContainerConfiguration",
29
+ "ExecutionType",
21
30
  "FramePath",
22
31
  "JobDeploymentConfig",
32
+ "JobEventCreateRequest",
33
+ "JobEventCreateResponse",
34
+ "JobEventUpdateRequest",
23
35
  "PQATaskResponse",
24
36
  "RuntimeConfig",
25
37
  "Stage",
@@ -3,10 +3,32 @@ from datetime import datetime
3
3
  from enum import StrEnum, auto
4
4
  from os import PathLike
5
5
  from pathlib import Path
6
- from typing import Any
6
+ from typing import Annotated, Any
7
7
  from uuid import UUID
8
8
 
9
- from pydantic import BaseModel, Field, JsonValue
9
+ from pydantic import (
10
+ BaseModel,
11
+ Field,
12
+ JsonValue,
13
+ PlainSerializer,
14
+ PlainValidator,
15
+ WithJsonSchema,
16
+ )
17
+ from sqlalchemy_utils import Ltree
18
+
19
+ LtreeField = Annotated[
20
+ Ltree,
21
+ PlainValidator(Ltree),
22
+ PlainSerializer(lambda v: v.path),
23
+ WithJsonSchema({"type": "string", "examples": ["some.path"]}),
24
+ ]
25
+
26
+
27
+ class DataStorageEntryStatus(StrEnum):
28
+ PENDING = auto()
29
+ ACTIVE = auto()
30
+ FAILED = auto()
31
+ DISABLED = auto()
10
32
 
11
33
 
12
34
  class DataStorageEntry(BaseModel):
@@ -20,6 +42,9 @@ class DataStorageEntry(BaseModel):
20
42
  content: str | None = Field(
21
43
  default=None, description="Content of the data storage entry"
22
44
  )
45
+ status: DataStorageEntryStatus = Field(
46
+ description="Status of the data storage entry"
47
+ )
23
48
  embedding: list[float] | None = Field(
24
49
  default=None, description="Embedding vector for the content"
25
50
  )
@@ -157,6 +182,22 @@ class DataStorageRequestPayload(BaseModel):
157
182
  )
158
183
 
159
184
 
185
+ class DatasetStorage(BaseModel):
186
+ """Pydantic model representing a DatasetStorage record."""
187
+
188
+ id: UUID
189
+ name: str
190
+ user_id: str
191
+ description: str | None = None
192
+ created_at: datetime
193
+ modified_at: datetime
194
+
195
+
196
+ class GetDatasetAndEntriesResponse(BaseModel):
197
+ dataset: DatasetStorage
198
+ data_storage_entries: list[DataStorageEntry]
199
+
200
+
160
201
  class CreateDatasetPayload(BaseModel):
161
202
  """Payload for creating a dataset."""
162
203
 
@@ -0,0 +1,75 @@
1
+ """Job event models for cost and usage tracking."""
2
+
3
+ from datetime import datetime
4
+ from enum import StrEnum, auto
5
+ from typing import Any
6
+ from uuid import UUID
7
+
8
+ from pydantic import BaseModel, Field
9
+
10
+
11
+ class ExecutionType(StrEnum):
12
+ """Type of execution for job events."""
13
+
14
+ TRAJECTORY = auto()
15
+ SESSION = auto()
16
+
17
+
18
+ class CostComponent(StrEnum):
19
+ """Cost component types for job events."""
20
+
21
+ LLM_USAGE = auto()
22
+ EXTERNAL_SERVICE = auto()
23
+ STEP = auto()
24
+
25
+
26
+ class JobEventCreateRequest(BaseModel):
27
+ """Request model for creating a job event matching crow-service schema."""
28
+
29
+ execution_id: UUID = Field(description="UUID for trajectory_id or session_id")
30
+ execution_type: ExecutionType = Field(
31
+ description="Either 'TRAJECTORY' or 'SESSION'"
32
+ )
33
+ cost_component: CostComponent = Field(
34
+ description="Cost component: 'LLM_USAGE', 'EXTERNAL_SERVICE', or 'STEP'"
35
+ )
36
+ started_at: datetime = Field(description="Start time of the job event")
37
+ ended_at: datetime = Field(description="End time of the job event")
38
+ crow: str | None = Field(default=None, description="unique identifier for the crow")
39
+ amount_acu: float | None = Field(default=None, description="Cost amount in ACUs")
40
+ amount_usd: float | None = Field(default=None, description="Cost amount in USD")
41
+ rate: float | None = Field(default=None, description="Rate per token/call in USD")
42
+ input_token_count: int | None = Field(
43
+ default=None, description="Input token count for LLM calls"
44
+ )
45
+ completion_token_count: int | None = Field(
46
+ default=None, description="Completion token count for LLM calls"
47
+ )
48
+ metadata: dict[str, Any] | None = Field(default=None)
49
+
50
+
51
+ class JobEventUpdateRequest(BaseModel):
52
+ """Request model for updating a job event matching crow-service schema."""
53
+
54
+ amount_acu: float | None = Field(default=None, description="Cost amount in ACUs")
55
+ amount_usd: float | None = Field(default=None, description="Cost amount in USD")
56
+ rate: float | None = Field(default=None, description="Rate per token/call in USD")
57
+ input_token_count: int | None = Field(
58
+ default=None, description="Input token count for LLM calls"
59
+ )
60
+ completion_token_count: int | None = Field(
61
+ default=None, description="Completion token count for LLM calls"
62
+ )
63
+ metadata: dict[str, Any] | None = Field(default=None)
64
+ started_at: datetime | None = Field(
65
+ default=None, description="Start time of the job event"
66
+ )
67
+ ended_at: datetime | None = Field(
68
+ default=None, description="End time of the job event"
69
+ )
70
+
71
+
72
+ class JobEventCreateResponse(BaseModel):
73
+ """Response model for job event creation."""
74
+
75
+ id: UUID = Field(description="UUID of the created job event")
@@ -85,6 +85,11 @@ class SearchCriterion(BaseModel):
85
85
  value: str | list[str] | bool
86
86
 
87
87
 
88
+ class FilterLogic(StrEnum):
89
+ AND = "AND"
90
+ OR = "OR"
91
+
92
+
88
93
  class WorldModelSearchPayload(BaseModel):
89
94
  """Payload for structured world model search."""
90
95
 
@@ -174,3 +179,4 @@ class DataStorageSearchPayload(BaseModel):
174
179
 
175
180
  criteria: list[SearchCriterion]
176
181
  size: int = 10
182
+ filter_logic: FilterLogic = FilterLogic.OR
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.4.5.dev49'
32
- __version_tuple__ = version_tuple = (0, 4, 5, 'dev49')
31
+ __version__ = version = '0.4.5.dev160'
32
+ __version_tuple__ = version_tuple = (0, 4, 5, 'dev160')
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: futurehouse-client
3
- Version: 0.4.5.dev49
3
+ Version: 0.4.5.dev160
4
4
  Summary: A client for interacting with endpoints of the FutureHouse service.
5
5
  Author-email: FutureHouse technical staff <hello@futurehouse.org>
6
6
  License: Apache License
@@ -224,6 +224,7 @@ Requires-Dist: openai<1.100.0,>=1
224
224
  Requires-Dist: pydantic
225
225
  Requires-Dist: python-dotenv
226
226
  Requires-Dist: requests
227
+ Requires-Dist: sqlalchemy-utils>=0.41.2
227
228
  Requires-Dist: tenacity
228
229
  Requires-Dist: tqdm>=4.62
229
230
  Provides-Extra: dev
@@ -0,0 +1,24 @@
1
+ futurehouse_client/__init__.py,sha256=q5cpcuPkhTaueXsySsgWpH0F-2EsRxcdJfP91ze6khU,991
2
+ futurehouse_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ futurehouse_client/version.py,sha256=6BA6oRbUzdnpPhiNHEHGYWEa8NjSzYLRAwSlZ3RVS6Y,721
4
+ futurehouse_client/clients/__init__.py,sha256=-HXNj-XJ3LRO5XM6MZ709iPs29YpApss0Q2YYg1qMZw,280
5
+ futurehouse_client/clients/data_storage_methods.py,sha256=f8ZsVicEtO50pRXoPzEB2GpiyqosNofyoW8vJeYvFnM,119266
6
+ futurehouse_client/clients/job_client.py,sha256=b5gpzulZpxpv9R337r3UKItnMdtd6CGlI1sV3_VQJso,13985
7
+ futurehouse_client/clients/rest_client.py,sha256=kLCR4dYduwX_16jaOZ26RGCOR2A_6nk2gpBKUqQ-KVI,110247
8
+ futurehouse_client/models/__init__.py,sha256=N1MwDUYonsMN9NdaShsYcJspyL7H756MYj7VWFeD3fk,978
9
+ futurehouse_client/models/app.py,sha256=UUg17I3zk6cH_7mrdojHGYvQfm_SeDkuUxsPlRyIYz0,31895
10
+ futurehouse_client/models/client.py,sha256=n4HD0KStKLm6Ek9nL9ylP-bkK10yzAaD1uIDF83Qp_A,1828
11
+ futurehouse_client/models/data_storage_methods.py,sha256=cpF2g4y_REECaz--WhaJeLqXA_3m3keRP5XOXiL8GOI,13811
12
+ futurehouse_client/models/job_event.py,sha256=lMrx-lV7BQkKl419ErWZ6Q1EjurmhBFSns0z6zwGaVo,2766
13
+ futurehouse_client/models/rest.py,sha256=SbeXZSPUCM0lQ_gVUPa64vKzMxuUVgqmJ5YThfDWs8g,4726
14
+ futurehouse_client/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
+ futurehouse_client/utils/auth.py,sha256=tgWELjKfg8eWme_qdcRmc8TjQN9DVZuHHaVXZNHLchk,2960
16
+ futurehouse_client/utils/general.py,sha256=PIkGLCSA3kUvc6mwR-prEB7YnMdKILOIm6cPowSZzzs,2532
17
+ futurehouse_client/utils/module_utils.py,sha256=aFyd-X-pDARXz9GWpn8SSViUVYdSbuy9vSkrzcVIaGI,4955
18
+ futurehouse_client/utils/monitoring.py,sha256=UjRlufe67kI3VxRHOd5fLtJmlCbVA2Wqwpd4uZhXkQM,8728
19
+ futurehouse_client/utils/world_model_tools.py,sha256=v2krZGrco0ur2a_pcRMtnQL05SxlIoBXuJ5R1JkQNws,2921
20
+ futurehouse_client-0.4.5.dev160.dist-info/licenses/LICENSE,sha256=oQ9ZHjUi-_6GfP3gs14FlPb0OlGwE1QCCKFGnJ4LD2I,11341
21
+ futurehouse_client-0.4.5.dev160.dist-info/METADATA,sha256=ulzDMOtoPKkLAJxL6JPcqSzmuTqOmP5wxiB7l3bm_qM,27101
22
+ futurehouse_client-0.4.5.dev160.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
23
+ futurehouse_client-0.4.5.dev160.dist-info/top_level.txt,sha256=TRuLUCt_qBnggdFHCX4O_BoCu1j2X43lKfIZC-ElwWY,19
24
+ futurehouse_client-0.4.5.dev160.dist-info/RECORD,,
@@ -1,23 +0,0 @@
1
- futurehouse_client/__init__.py,sha256=PvFTkocA-hobsWoDEBEdrUgLIbuVbDs_0nvMdImJmHk,707
2
- futurehouse_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- futurehouse_client/version.py,sha256=0fTIuRsiUoxr-NFJaezh7gevHU-qV_oY3hDg8BKD-i8,719
4
- futurehouse_client/clients/__init__.py,sha256=-HXNj-XJ3LRO5XM6MZ709iPs29YpApss0Q2YYg1qMZw,280
5
- futurehouse_client/clients/data_storage_methods.py,sha256=yxtrZUFaVw6nwjAoRtOlyqr67M3tDJiFRjFtiFpJXHs,99811
6
- futurehouse_client/clients/job_client.py,sha256=b5gpzulZpxpv9R337r3UKItnMdtd6CGlI1sV3_VQJso,13985
7
- futurehouse_client/clients/rest_client.py,sha256=RdyFEipvADDCHyY5XFy565IoL9-N1myJjF0G8x2wlK8,103183
8
- futurehouse_client/models/__init__.py,sha256=0YlzKGymbY1g4cXxnUc0BUnthTkVBf12bCZlGUcMQqk,701
9
- futurehouse_client/models/app.py,sha256=UUg17I3zk6cH_7mrdojHGYvQfm_SeDkuUxsPlRyIYz0,31895
10
- futurehouse_client/models/client.py,sha256=n4HD0KStKLm6Ek9nL9ylP-bkK10yzAaD1uIDF83Qp_A,1828
11
- futurehouse_client/models/data_storage_methods.py,sha256=9L1C-BDaGJiWhr8Ps4P5kS4f0IuzXowCVeU2hYqore8,12932
12
- futurehouse_client/models/rest.py,sha256=Ze7Jwllkfsvu32ekqYqqBzLqv9LOmWIsjYlxamofM2s,4619
13
- futurehouse_client/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
- futurehouse_client/utils/auth.py,sha256=tgWELjKfg8eWme_qdcRmc8TjQN9DVZuHHaVXZNHLchk,2960
15
- futurehouse_client/utils/general.py,sha256=PIkGLCSA3kUvc6mwR-prEB7YnMdKILOIm6cPowSZzzs,2532
16
- futurehouse_client/utils/module_utils.py,sha256=aFyd-X-pDARXz9GWpn8SSViUVYdSbuy9vSkrzcVIaGI,4955
17
- futurehouse_client/utils/monitoring.py,sha256=UjRlufe67kI3VxRHOd5fLtJmlCbVA2Wqwpd4uZhXkQM,8728
18
- futurehouse_client/utils/world_model_tools.py,sha256=v2krZGrco0ur2a_pcRMtnQL05SxlIoBXuJ5R1JkQNws,2921
19
- futurehouse_client-0.4.5.dev49.dist-info/licenses/LICENSE,sha256=oQ9ZHjUi-_6GfP3gs14FlPb0OlGwE1QCCKFGnJ4LD2I,11341
20
- futurehouse_client-0.4.5.dev49.dist-info/METADATA,sha256=XmAENG6NzYv8fxDYEiilIst3WyNPfS-HawsJjfC3TWU,27060
21
- futurehouse_client-0.4.5.dev49.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
22
- futurehouse_client-0.4.5.dev49.dist-info/top_level.txt,sha256=TRuLUCt_qBnggdFHCX4O_BoCu1j2X43lKfIZC-ElwWY,19
23
- futurehouse_client-0.4.5.dev49.dist-info/RECORD,,