futurehouse-client 0.4.1.dev95__tar.gz → 0.4.2.dev274__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. {futurehouse_client-0.4.1.dev95/futurehouse_client.egg-info → futurehouse_client-0.4.2.dev274}/PKG-INFO +1 -1
  2. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/clients/data_storage_methods.py +72 -16
  3. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/clients/job_client.py +50 -0
  4. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/clients/rest_client.py +20 -20
  5. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/models/__init__.py +2 -1
  6. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/models/app.py +4 -7
  7. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/models/client.py +1 -5
  8. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/models/data_storage_methods.py +8 -0
  9. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/models/rest.py +9 -0
  10. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/utils/world_model_tools.py +3 -2
  11. futurehouse_client-0.4.2.dev274/futurehouse_client/version.py +34 -0
  12. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274/futurehouse_client.egg-info}/PKG-INFO +1 -1
  13. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/tests/test_rest.py +5 -4
  14. futurehouse_client-0.4.1.dev95/futurehouse_client/version.py +0 -21
  15. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/LICENSE +0 -0
  16. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/README.md +0 -0
  17. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/docs/__init__.py +0 -0
  18. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/docs/client_notebook.ipynb +0 -0
  19. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/__init__.py +0 -0
  20. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/clients/__init__.py +0 -0
  21. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/py.typed +0 -0
  22. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/utils/__init__.py +0 -0
  23. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/utils/auth.py +0 -0
  24. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/utils/general.py +0 -0
  25. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/utils/module_utils.py +0 -0
  26. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client/utils/monitoring.py +0 -0
  27. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client.egg-info/SOURCES.txt +0 -0
  28. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client.egg-info/dependency_links.txt +0 -0
  29. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client.egg-info/requires.txt +0 -0
  30. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/futurehouse_client.egg-info/top_level.txt +0 -0
  31. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/pyproject.toml +0 -0
  32. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/setup.cfg +0 -0
  33. {futurehouse_client-0.4.1.dev95 → futurehouse_client-0.4.2.dev274}/tests/test_client.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: futurehouse-client
3
- Version: 0.4.1.dev95
3
+ Version: 0.4.2.dev274
4
4
  Summary: A client for interacting with endpoints of the FutureHouse service.
5
5
  Author-email: FutureHouse technical staff <hello@futurehouse.org>
6
6
  License: Apache License
@@ -469,18 +469,18 @@ class DataStorageMethods:
469
469
  """
470
470
 
471
471
  # needed for mypy `NoReturn`
472
- def _handle_http_errors(self, e: HTTPStatusError) -> NoReturn:
472
+ def _handle_http_errors(self, e: HTTPStatusError, operation: str) -> NoReturn:
473
473
  """Handle common HTTP errors for data storage operations."""
474
474
  if e.response.status_code == codes.FORBIDDEN:
475
475
  raise DataStorageCreationError(
476
- "Not authorized to create data storage entries"
476
+ f"Error {operation} data storage entry, not authorized"
477
477
  ) from e
478
478
  if e.response.status_code == codes.UNPROCESSABLE_ENTITY:
479
479
  raise DataStorageCreationError(
480
480
  f"Invalid request payload: {e.response.text}"
481
481
  ) from e
482
482
  raise DataStorageCreationError(
483
- f"Error creating data storage entry: {e.response.status_code} - {e.response.text}"
483
+ f"Error {operation} data storage entry: {e.response.status_code} - {e.response.text}"
484
484
  ) from e
485
485
 
486
486
  def _validate_file_path(self, file_path: str | Path) -> Path:
@@ -761,6 +761,7 @@ class DataStorageMethods:
761
761
  path: str | None = None,
762
762
  ignore_patterns: list[str] | None = None,
763
763
  ignore_filename: str = ".gitignore",
764
+ project_id: UUID | None = None,
764
765
  ) -> DataStorageResponse:
765
766
  """Upload a directory as a single zip file collection.
766
767
 
@@ -771,6 +772,7 @@ class DataStorageMethods:
771
772
  path: Optional GCS path for the zip file
772
773
  ignore_patterns: List of patterns to ignore when zipping
773
774
  ignore_filename: Name of ignore file to read from directory
775
+ project_id: ID of the project this data storage entry belongs to
774
776
 
775
777
  Returns:
776
778
  DataStorageResponse for the uploaded zip file
@@ -790,6 +792,7 @@ class DataStorageMethods:
790
792
  description=description,
791
793
  path=zip_gcs_path,
792
794
  is_collection=True,
795
+ project_id=project_id,
793
796
  )
794
797
 
795
798
  logger.debug(
@@ -832,6 +835,7 @@ class DataStorageMethods:
832
835
  path: str | None = None,
833
836
  ignore_patterns: list[str] | None = None,
834
837
  ignore_filename: str = ".gitignore",
838
+ project_id: UUID | None = None,
835
839
  ) -> DataStorageResponse:
836
840
  """Asynchronously upload a directory as a single zip file.
837
841
 
@@ -842,6 +846,7 @@ class DataStorageMethods:
842
846
  path: Optional GCS path for the zip file
843
847
  ignore_patterns: List of patterns to ignore when zipping
844
848
  ignore_filename: Name of ignore file to read from directory
849
+ project_id: ID of the project this data storage entry belongs to
845
850
 
846
851
  Returns:
847
852
  DataStorageResponse for the uploaded zip file
@@ -861,6 +866,7 @@ class DataStorageMethods:
861
866
  description=description,
862
867
  path=zip_gcs_path,
863
868
  is_collection=True,
869
+ project_id=project_id,
864
870
  )
865
871
 
866
872
  data_storage_response = await self._acreate_data_storage_entry(payload)
@@ -898,6 +904,7 @@ class DataStorageMethods:
898
904
  file_path: Path,
899
905
  description: str | None,
900
906
  path: str | None = None,
907
+ project_id: UUID | None = None,
901
908
  ) -> DataStorageResponse:
902
909
  """Upload a single file."""
903
910
  file_size = file_path.stat().st_size
@@ -918,6 +925,7 @@ class DataStorageMethods:
918
925
  content=text_content,
919
926
  path=path,
920
927
  is_collection=False,
928
+ project_id=project_id,
921
929
  )
922
930
 
923
931
  logger.debug("Sending file as text content")
@@ -934,6 +942,7 @@ class DataStorageMethods:
934
942
  description=description,
935
943
  path=path,
936
944
  is_collection=False,
945
+ project_id=project_id,
937
946
  )
938
947
 
939
948
  logger.debug(
@@ -980,6 +989,7 @@ class DataStorageMethods:
980
989
  description: str | None,
981
990
  path: str | None = None,
982
991
  dataset_id: UUID | None = None,
992
+ project_id: UUID | None = None,
983
993
  ) -> DataStorageResponse:
984
994
  """Asynchronously upload a single file."""
985
995
  file_size, text_payload = self._prepare_single_file_upload(
@@ -1000,6 +1010,7 @@ class DataStorageMethods:
1000
1010
  path=path,
1001
1011
  is_collection=False,
1002
1012
  dataset_id=dataset_id,
1013
+ project_id=project_id,
1003
1014
  )
1004
1015
 
1005
1016
  data_storage_response = await self._acreate_data_storage_entry(payload)
@@ -1036,6 +1047,7 @@ class DataStorageMethods:
1036
1047
  path: str | None,
1037
1048
  parent_id: UUID | None,
1038
1049
  dataset_id: UUID | None = None,
1050
+ project_id: UUID | None = None,
1039
1051
  ) -> DataStorageResponse:
1040
1052
  """Upload a single file with a parent ID (sync version)."""
1041
1053
  file_size, text_payload = self._prepare_single_file_upload(
@@ -1046,6 +1058,7 @@ class DataStorageMethods:
1046
1058
  logger.debug("Sending file as text content with parent_id")
1047
1059
  text_payload.parent_id = parent_id
1048
1060
  text_payload.dataset_id = dataset_id
1061
+ text_payload.project_id = project_id
1049
1062
  return self._create_data_storage_entry(text_payload)
1050
1063
 
1051
1064
  logger.debug(
@@ -1058,6 +1071,7 @@ class DataStorageMethods:
1058
1071
  is_collection=False,
1059
1072
  parent_id=parent_id,
1060
1073
  dataset_id=dataset_id,
1074
+ project_id=project_id,
1061
1075
  )
1062
1076
  data_storage_response = self._create_data_storage_entry(payload)
1063
1077
 
@@ -1092,6 +1106,7 @@ class DataStorageMethods:
1092
1106
  dir_manifest: DirectoryManifest,
1093
1107
  current_parent_id: UUID,
1094
1108
  dataset_id: UUID | None = None,
1109
+ project_id: UUID | None = None,
1095
1110
  ) -> DataStorageResponse | None:
1096
1111
  """Process a single file item for upload."""
1097
1112
  try:
@@ -1109,6 +1124,7 @@ class DataStorageMethods:
1109
1124
  path=None,
1110
1125
  parent_id=current_parent_id,
1111
1126
  dataset_id=dataset_id,
1127
+ project_id=project_id,
1112
1128
  )
1113
1129
  except Exception as e:
1114
1130
  logger.error(f"Failed to upload file {item}: {e}")
@@ -1126,6 +1142,7 @@ class DataStorageMethods:
1126
1142
  base_dir: Path | None = None,
1127
1143
  dir_manifest: DirectoryManifest | None = None,
1128
1144
  dataset_id: UUID | None = None,
1145
+ project_id: UUID | None = None,
1129
1146
  ) -> list[DataStorageResponse]:
1130
1147
  """Upload a directory with single dataset and individual file storage entries."""
1131
1148
  responses = []
@@ -1141,6 +1158,7 @@ class DataStorageMethods:
1141
1158
  parent_id=None,
1142
1159
  dataset_id=None,
1143
1160
  is_collection=False,
1161
+ project_id=project_id,
1144
1162
  )
1145
1163
 
1146
1164
  dir_response = self._create_data_storage_entry(payload)
@@ -1182,6 +1200,7 @@ class DataStorageMethods:
1182
1200
  parent_id=current_parent_id,
1183
1201
  dataset_id=current_dataset_id,
1184
1202
  is_collection=False,
1203
+ project_id=project_id,
1185
1204
  )
1186
1205
  subdir_response = self._create_data_storage_entry(subdir_payload)
1187
1206
  responses.append(subdir_response)
@@ -1197,6 +1216,7 @@ class DataStorageMethods:
1197
1216
  base_dir=base_dir,
1198
1217
  dir_manifest=subdir_manifest,
1199
1218
  dataset_id=current_dataset_id,
1219
+ project_id=project_id,
1200
1220
  )
1201
1221
  responses.extend(subdir_responses)
1202
1222
  elif item.is_file():
@@ -1247,6 +1267,7 @@ class DataStorageMethods:
1247
1267
  path: str | None,
1248
1268
  parent_id: UUID | None,
1249
1269
  dataset_id: UUID | None = None,
1270
+ project_id: UUID | None = None,
1250
1271
  ) -> DataStorageResponse:
1251
1272
  """Asynchronously upload a single file with a parent ID."""
1252
1273
  file_size, text_payload = self._prepare_single_file_upload(
@@ -1257,6 +1278,7 @@ class DataStorageMethods:
1257
1278
  logger.debug("Sending file as text content with parent_id")
1258
1279
  text_payload.parent_id = parent_id
1259
1280
  text_payload.dataset_id = dataset_id
1281
+ text_payload.project_id = project_id
1260
1282
  return await self._acreate_data_storage_entry(text_payload)
1261
1283
 
1262
1284
  logger.debug(
@@ -1269,6 +1291,7 @@ class DataStorageMethods:
1269
1291
  is_collection=False,
1270
1292
  parent_id=parent_id,
1271
1293
  dataset_id=dataset_id,
1294
+ project_id=project_id,
1272
1295
  )
1273
1296
  data_storage_response = await self._acreate_data_storage_entry(payload)
1274
1297
 
@@ -1302,6 +1325,7 @@ class DataStorageMethods:
1302
1325
  dir_manifest: DirectoryManifest,
1303
1326
  current_parent_id: UUID,
1304
1327
  dataset_id: UUID | None = None,
1328
+ project_id: UUID | None = None,
1305
1329
  ) -> DataStorageResponse | None:
1306
1330
  """Asynchronously process a single file item for upload."""
1307
1331
  try:
@@ -1319,6 +1343,7 @@ class DataStorageMethods:
1319
1343
  path=None,
1320
1344
  parent_id=current_parent_id,
1321
1345
  dataset_id=dataset_id,
1346
+ project_id=project_id,
1322
1347
  )
1323
1348
  except Exception as e:
1324
1349
  logger.error(f"Failed to upload file {item}: {e}")
@@ -1336,6 +1361,7 @@ class DataStorageMethods:
1336
1361
  base_dir: Path | None = None,
1337
1362
  dir_manifest: DirectoryManifest | None = None,
1338
1363
  dataset_id: UUID | None = None,
1364
+ project_id: UUID | None = None,
1339
1365
  ) -> list[DataStorageResponse]:
1340
1366
  """Upload a directory with single dataset and individual file storage entries (async)."""
1341
1367
  responses = []
@@ -1352,6 +1378,7 @@ class DataStorageMethods:
1352
1378
  parent_id=None,
1353
1379
  dataset_id=None,
1354
1380
  is_collection=False,
1381
+ project_id=project_id,
1355
1382
  )
1356
1383
 
1357
1384
  dir_response = await self._acreate_data_storage_entry(payload)
@@ -1392,6 +1419,7 @@ class DataStorageMethods:
1392
1419
  parent_id=current_parent_id,
1393
1420
  dataset_id=current_dataset_id,
1394
1421
  is_collection=False,
1422
+ project_id=project_id,
1395
1423
  )
1396
1424
  subdir_response = await self._acreate_data_storage_entry(subdir_payload)
1397
1425
  responses.append(subdir_response)
@@ -1407,6 +1435,7 @@ class DataStorageMethods:
1407
1435
  base_dir=base_dir,
1408
1436
  dir_manifest=subdir_manifest,
1409
1437
  dataset_id=current_dataset_id,
1438
+ project_id=project_id,
1410
1439
  )
1411
1440
  responses.extend(subdir_responses)
1412
1441
  elif item.is_file():
@@ -1443,6 +1472,7 @@ class DataStorageMethods:
1443
1472
  content: str,
1444
1473
  description: str | None = None,
1445
1474
  path: str | None = None,
1475
+ project_id: UUID | None = None,
1446
1476
  ) -> DataStorageResponse:
1447
1477
  """Store content as a string in the data storage system.
1448
1478
 
@@ -1451,6 +1481,7 @@ class DataStorageMethods:
1451
1481
  content: Content to store as a string
1452
1482
  description: Optional description of the data storage entry
1453
1483
  path: Optional path for the data storage entry
1484
+ project_id: ID of the project this data storage entry belongs to
1454
1485
 
1455
1486
  Returns:
1456
1487
  DataStorageResponse containing the created data storage entry and storage locations
@@ -1464,10 +1495,11 @@ class DataStorageMethods:
1464
1495
  content=content,
1465
1496
  description=description,
1466
1497
  path=path,
1498
+ project_id=project_id,
1467
1499
  )
1468
1500
  return self._create_data_storage_entry(payload)
1469
1501
  except HTTPStatusError as e:
1470
- self._handle_http_errors(e)
1502
+ self._handle_http_errors(e, "creating")
1471
1503
  except Exception as e:
1472
1504
  raise DataStorageCreationError(
1473
1505
  f"An unexpected error occurred: {e!r}"
@@ -1486,6 +1518,7 @@ class DataStorageMethods:
1486
1518
  description: str | None = None,
1487
1519
  path: str | None = None,
1488
1520
  dataset_id: UUID | None = None,
1521
+ project_id: UUID | None = None,
1489
1522
  ) -> DataStorageResponse:
1490
1523
  """Asynchronously store content as a string in the data storage system.
1491
1524
 
@@ -1495,6 +1528,7 @@ class DataStorageMethods:
1495
1528
  description: Optional description of the data storage entry
1496
1529
  path: Optional path for the data storage entry
1497
1530
  dataset_id: Optional dataset ID to add entry to, or None to create new dataset
1531
+ project_id: ID of the project this data storage entry belongs to
1498
1532
 
1499
1533
  Returns:
1500
1534
  DataStorageResponse containing the created data storage entry and storage locations
@@ -1509,10 +1543,11 @@ class DataStorageMethods:
1509
1543
  description=description,
1510
1544
  path=path,
1511
1545
  dataset_id=dataset_id,
1546
+ project_id=project_id,
1512
1547
  )
1513
1548
  return await self._acreate_data_storage_entry(payload)
1514
1549
  except HTTPStatusError as e:
1515
- self._handle_http_errors(e)
1550
+ self._handle_http_errors(e, "creating")
1516
1551
  except Exception as e:
1517
1552
  raise DataStorageCreationError(
1518
1553
  f"An unexpected error occurred: {e!r}"
@@ -1534,6 +1569,7 @@ class DataStorageMethods:
1534
1569
  manifest_filename: str | None = None,
1535
1570
  ignore_patterns: list[str] | None = None,
1536
1571
  ignore_filename: str = ".gitignore",
1572
+ project_id: UUID | None = None,
1537
1573
  ) -> DataStorageResponse:
1538
1574
  """Store file or directory content in the data storage system.
1539
1575
 
@@ -1552,6 +1588,7 @@ class DataStorageMethods:
1552
1588
  manifest_filename: Name of manifest file
1553
1589
  ignore_patterns: List of patterns to ignore when zipping directories
1554
1590
  ignore_filename: Name of ignore file to read from directory (default: .gitignore)
1591
+ project_id: ID of the project this data storage entry belongs to
1555
1592
 
1556
1593
  Returns:
1557
1594
  DataStorageResponse containing the final data storage entry
@@ -1564,7 +1601,13 @@ class DataStorageMethods:
1564
1601
  try:
1565
1602
  if file_path.is_dir() and as_collection:
1566
1603
  return self._upload_data_directory(
1567
- name, file_path, description, path, ignore_patterns, ignore_filename
1604
+ name,
1605
+ file_path,
1606
+ description,
1607
+ path,
1608
+ ignore_patterns,
1609
+ ignore_filename,
1610
+ project_id,
1568
1611
  )
1569
1612
  if file_path.is_dir() and not as_collection:
1570
1613
  responses = self._upload_directory_hierarchically(
@@ -1574,16 +1617,19 @@ class DataStorageMethods:
1574
1617
  manifest_filename=manifest_filename,
1575
1618
  ignore_patterns=ignore_patterns,
1576
1619
  ignore_filename=ignore_filename,
1620
+ project_id=project_id,
1577
1621
  )
1578
1622
  if not responses:
1579
1623
  raise DataStorageCreationError(
1580
1624
  "No data storage entries were created"
1581
1625
  )
1582
1626
  return responses[0]
1583
- return self._upload_data_single_file(name, file_path, description, path)
1627
+ return self._upload_data_single_file(
1628
+ name, file_path, description, path, project_id
1629
+ )
1584
1630
 
1585
1631
  except HTTPStatusError as e:
1586
- self._handle_http_errors(e)
1632
+ self._handle_http_errors(e, "creating")
1587
1633
  except Exception as e:
1588
1634
  raise DataStorageCreationError(
1589
1635
  f"An unexpected error occurred during file upload: {e!r}"
@@ -1606,6 +1652,7 @@ class DataStorageMethods:
1606
1652
  ignore_patterns: list[str] | None = None,
1607
1653
  ignore_filename: str = ".gitignore",
1608
1654
  dataset_id: UUID | None = None,
1655
+ project_id: UUID | None = None,
1609
1656
  ) -> DataStorageResponse:
1610
1657
  """Asynchronously store file or directory content in the data storage system.
1611
1658
 
@@ -1620,6 +1667,7 @@ class DataStorageMethods:
1620
1667
  ignore_patterns: List of patterns to ignore when zipping.
1621
1668
  ignore_filename: Name of ignore file to read (default: .gitignore).
1622
1669
  dataset_id: Optional dataset ID to add entry to, or None to create new dataset.
1670
+ project_id: ID of the project this data storage entry belongs to
1623
1671
 
1624
1672
  Returns:
1625
1673
  The `DataStorageResponse` for the created entry. For hierarchical uploads,
@@ -1637,6 +1685,7 @@ class DataStorageMethods:
1637
1685
  path,
1638
1686
  ignore_patterns,
1639
1687
  ignore_filename,
1688
+ project_id,
1640
1689
  )
1641
1690
  responses = await self._aupload_directory_hierarchically(
1642
1691
  name=name,
@@ -1646,6 +1695,7 @@ class DataStorageMethods:
1646
1695
  ignore_patterns=ignore_patterns,
1647
1696
  ignore_filename=ignore_filename,
1648
1697
  dataset_id=dataset_id,
1698
+ project_id=project_id,
1649
1699
  )
1650
1700
  if not responses:
1651
1701
  raise DataStorageCreationError(
@@ -1653,11 +1703,11 @@ class DataStorageMethods:
1653
1703
  )
1654
1704
  return responses[0]
1655
1705
  return await self._aupload_data_single_file(
1656
- name, file_path, description, path, dataset_id
1706
+ name, file_path, description, path, dataset_id, project_id
1657
1707
  )
1658
1708
 
1659
1709
  except HTTPStatusError as e:
1660
- self._handle_http_errors(e)
1710
+ self._handle_http_errors(e, "creating")
1661
1711
  except Exception as e:
1662
1712
  raise DataStorageCreationError(
1663
1713
  f"An unexpected error occurred during async file upload: {e!r}"
@@ -1675,6 +1725,7 @@ class DataStorageMethods:
1675
1725
  existing_location: DataStorageLocationPayload,
1676
1726
  description: str | None = None,
1677
1727
  path: str | None = None,
1728
+ project_id: UUID | None = None,
1678
1729
  ) -> DataStorageResponse:
1679
1730
  """Store content as a string in the data storage system.
1680
1731
 
@@ -1683,6 +1734,7 @@ class DataStorageMethods:
1683
1734
  existing_location: Describes the existing data source location to register
1684
1735
  description: Optional description of the data storage entry
1685
1736
  path: Optional path for the data storage entry
1737
+ project_id: ID of the project this data storage entry belongs to
1686
1738
 
1687
1739
  Returns:
1688
1740
  DataStorageResponse containing the created data storage entry and storage locations
@@ -1696,6 +1748,7 @@ class DataStorageMethods:
1696
1748
  description=description,
1697
1749
  path=path,
1698
1750
  existing_location=existing_location,
1751
+ project_id=project_id,
1699
1752
  )
1700
1753
  response = self.client.post(
1701
1754
  "/v0.1/data-storage", json=payload.model_dump(exclude_none=True)
@@ -1703,7 +1756,7 @@ class DataStorageMethods:
1703
1756
  response.raise_for_status()
1704
1757
  return DataStorageResponse.model_validate(response.json())
1705
1758
  except HTTPStatusError as e:
1706
- self._handle_http_errors(e)
1759
+ self._handle_http_errors(e, "creating")
1707
1760
  except Exception as e:
1708
1761
  raise DataStorageCreationError(
1709
1762
  f"An unexpected error occurred: {e!r}"
@@ -1721,6 +1774,7 @@ class DataStorageMethods:
1721
1774
  existing_location: DataStorageLocationPayload,
1722
1775
  description: str | None = None,
1723
1776
  path: str | None = None,
1777
+ project_id: UUID | None = None,
1724
1778
  ) -> DataStorageResponse:
1725
1779
  """Store content as a string in the data storage system.
1726
1780
 
@@ -1729,6 +1783,7 @@ class DataStorageMethods:
1729
1783
  existing_location: Describes the existing data source location to register
1730
1784
  description: Optional description of the data storage entry
1731
1785
  path: Optional path for the data storage entry
1786
+ project_id: ID of the project this data storage entry belongs to
1732
1787
 
1733
1788
  Returns:
1734
1789
  DataStorageResponse containing the created data storage entry and storage locations
@@ -1742,6 +1797,7 @@ class DataStorageMethods:
1742
1797
  description=description,
1743
1798
  path=path,
1744
1799
  existing_location=existing_location,
1800
+ project_id=project_id,
1745
1801
  )
1746
1802
  response = await self.async_client.post(
1747
1803
  "/v0.1/data-storage", json=payload.model_dump(exclude_none=True)
@@ -1749,7 +1805,7 @@ class DataStorageMethods:
1749
1805
  response.raise_for_status()
1750
1806
  return DataStorageResponse.model_validate(response.json())
1751
1807
  except HTTPStatusError as e:
1752
- self._handle_http_errors(e)
1808
+ self._handle_http_errors(e, "creating")
1753
1809
  except Exception as e:
1754
1810
  raise DataStorageCreationError(
1755
1811
  f"An unexpected error occurred: {e!r}"
@@ -1797,7 +1853,7 @@ class DataStorageMethods:
1797
1853
 
1798
1854
  return self._download_from_gcs(result.signed_url)
1799
1855
 
1800
- if storage_type == "raw_content":
1856
+ if storage_type in {"raw_content", "pg_table"}:
1801
1857
  content = result.data_storage.content
1802
1858
  if content is None:
1803
1859
  logger.warning(
@@ -1809,7 +1865,7 @@ class DataStorageMethods:
1809
1865
  raise DataStorageCreationError(f"Unsupported storage type: {storage_type}")
1810
1866
 
1811
1867
  except HTTPStatusError as e:
1812
- self._handle_http_errors(e)
1868
+ self._handle_http_errors(e, "retrieving")
1813
1869
  except Exception as e:
1814
1870
  raise DataStorageCreationError(
1815
1871
  f"An unexpected error occurred: {e!r}"
@@ -1857,7 +1913,7 @@ class DataStorageMethods:
1857
1913
 
1858
1914
  return await self._adownload_from_gcs(result.signed_url)
1859
1915
 
1860
- if storage_type == "raw_content":
1916
+ if storage_type in {"raw_content", "pg_table"}:
1861
1917
  content = result.data_storage.content
1862
1918
  if content is None:
1863
1919
  logger.warning(
@@ -1869,7 +1925,7 @@ class DataStorageMethods:
1869
1925
  raise DataStorageCreationError(f"Unsupported storage type: {storage_type}")
1870
1926
 
1871
1927
  except HTTPStatusError as e:
1872
- self._handle_http_errors(e)
1928
+ self._handle_http_errors(e, "retrieving")
1873
1929
  except Exception as e:
1874
1930
  raise DataStorageCreationError(
1875
1931
  f"An unexpected error occurred: {e!r}"
@@ -19,6 +19,7 @@ from futurehouse_client.models.rest import (
19
19
  FinalEnvironmentRequest,
20
20
  StoreAgentStatePostRequest,
21
21
  StoreEnvironmentFrameRequest,
22
+ TrajectoryPatchRequest,
22
23
  )
23
24
  from futurehouse_client.utils.monitoring import (
24
25
  external_trace,
@@ -318,3 +319,52 @@ class JobClient:
318
319
  f"Unexpected error storing environment frame for state {state_identifier}",
319
320
  )
320
321
  raise
322
+
323
+ async def patch_trajectory(
324
+ self,
325
+ public: bool | None = None,
326
+ shared_with: list[int] | None = None,
327
+ notification_enabled: bool | None = None,
328
+ notification_type: str | None = None,
329
+ min_estimated_time: float | None = None,
330
+ max_estimated_time: float | None = None,
331
+ ) -> None:
332
+ data = TrajectoryPatchRequest(
333
+ public=public,
334
+ shared_with=shared_with,
335
+ notification_enabled=notification_enabled,
336
+ notification_type=notification_type,
337
+ min_estimated_time=min_estimated_time,
338
+ max_estimated_time=max_estimated_time,
339
+ )
340
+ try:
341
+ async with httpx.AsyncClient(timeout=self.REQUEST_TIMEOUT) as client:
342
+ url = f"{self.base_uri}/v0.1/trajectories/{self.trajectory_id}"
343
+ headers = {
344
+ "Authorization": f"Bearer {self.oauth_jwt}",
345
+ "x-trajectory-id": self.trajectory_id,
346
+ }
347
+ response = await client.patch(
348
+ url=url,
349
+ json=data.model_dump(mode="json", exclude_none=True),
350
+ headers=headers,
351
+ )
352
+ response.raise_for_status()
353
+ logger.debug("Trajectory updated successfully")
354
+ except httpx.HTTPStatusError as e:
355
+ logger.exception(
356
+ "HTTP error while patching trajectory. "
357
+ f"Status code: {e.response.status_code}, "
358
+ f"Response: {e.response.text}",
359
+ )
360
+ except httpx.TimeoutException:
361
+ logger.exception(
362
+ f"Timeout while patching trajectory after {self.REQUEST_TIMEOUT}s",
363
+ )
364
+ raise
365
+ except httpx.NetworkError:
366
+ logger.exception("Network error while patching trajectory")
367
+ raise
368
+ except Exception:
369
+ logger.exception("Unexpected error while patching trajectory")
370
+ raise
@@ -1630,16 +1630,17 @@ class RestClient(DataStorageMethods):
1630
1630
  A list of world model names.
1631
1631
  """
1632
1632
  try:
1633
+ # Use the consolidated endpoint with search parameters
1633
1634
  response = self.client.get(
1634
- "/v0.1/world-models/search/",
1635
+ "/v0.1/world-models",
1635
1636
  params={
1636
- "query": query,
1637
+ "q": query,
1637
1638
  "size": size,
1638
- "total_search_size": total_search_size,
1639
1639
  "search_all_versions": search_all_versions,
1640
1640
  },
1641
1641
  )
1642
1642
  response.raise_for_status()
1643
+ # The new endpoint returns a list of models directly
1643
1644
  return response.json()
1644
1645
  except HTTPStatusError as e:
1645
1646
  raise WorldModelFetchError(
@@ -1754,22 +1755,19 @@ class RestClient(DataStorageMethods):
1754
1755
  wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
1755
1756
  retry=retry_if_connection_error,
1756
1757
  )
1757
- def get_project_by_name(self, name: str) -> UUID:
1758
+ def get_project_by_name(self, name: str, limit: int = 2) -> UUID | list[UUID]:
1758
1759
  """Get a project UUID by name.
1759
1760
 
1760
1761
  Args:
1761
1762
  name: The name of the project to find
1763
+ limit: Maximum number of projects to return
1762
1764
 
1763
1765
  Returns:
1764
- UUID of the project as a string
1765
-
1766
- Raises:
1767
- ProjectError: If no project is found, multiple projects are found, or there's an error
1766
+ UUID of the project as a string or a list of UUIDs if multiple projects are found
1768
1767
  """
1769
1768
  try:
1770
- # Get projects filtered by name (backend now filters by name and owner)
1771
1769
  response = self.client.get(
1772
- "/v0.1/projects", params={"limit": 2, "name": name}
1770
+ "/v0.1/projects", params={"limit": limit, "name": name}
1773
1771
  )
1774
1772
  response.raise_for_status()
1775
1773
  projects = response.json()
@@ -1782,32 +1780,33 @@ class RestClient(DataStorageMethods):
1782
1780
  if len(projects) == 0:
1783
1781
  raise ProjectError(f"No project found with name '{name}'")
1784
1782
  if len(projects) > 1:
1785
- raise ProjectError(
1783
+ logger.warning(
1786
1784
  f"Multiple projects found with name '{name}'. Found {len(projects)} projects."
1787
1785
  )
1788
1786
 
1789
- return UUID(projects[0]["id"])
1787
+ ids = [UUID(project["id"]) for project in projects]
1788
+ return ids[0] if len(ids) == 1 else ids
1790
1789
 
1791
1790
  @retry(
1792
1791
  stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
1793
1792
  wait=wait_exponential(multiplier=RETRY_MULTIPLIER, max=MAX_RETRY_WAIT),
1794
1793
  retry=retry_if_connection_error,
1795
1794
  )
1796
- async def aget_project_by_name(self, name: str) -> UUID:
1795
+ async def aget_project_by_name(
1796
+ self, name: str, limit: int = 2
1797
+ ) -> UUID | list[UUID]:
1797
1798
  """Asynchronously get a project UUID by name.
1798
1799
 
1799
1800
  Args:
1800
1801
  name: The name of the project to find
1802
+ limit: Maximum number of projects to return
1801
1803
 
1802
1804
  Returns:
1803
- UUID of the project as a string
1804
-
1805
- Raises:
1806
- ProjectError: If no project is found, multiple projects are found, or there's an error
1805
+ UUID of the project as a string or a list of UUIDs if multiple projects are found
1807
1806
  """
1808
1807
  try:
1809
1808
  response = await self.async_client.get(
1810
- "/v0.1/projects", params={"limit": 2, "name": name}
1809
+ "/v0.1/projects", params={"limit": limit, "name": name}
1811
1810
  )
1812
1811
  response.raise_for_status()
1813
1812
  projects = response.json()
@@ -1816,11 +1815,12 @@ class RestClient(DataStorageMethods):
1816
1815
  if len(projects) == 0:
1817
1816
  raise ProjectError(f"No project found with name '{name}'")
1818
1817
  if len(projects) > 1:
1819
- raise ProjectError(
1818
+ logger.warning(
1820
1819
  f"Multiple projects found with name '{name}'. Found {len(projects)} projects."
1821
1820
  )
1822
1821
 
1823
- return UUID(projects[0]["id"])
1822
+ ids = [UUID(project["id"]) for project in projects]
1823
+ return ids[0] if len(ids) == 1 else ids
1824
1824
 
1825
1825
  @retry(
1826
1826
  stop=stop_after_attempt(MAX_RETRY_ATTEMPTS),
@@ -13,7 +13,7 @@ from .app import (
13
13
  TaskResponse,
14
14
  TaskResponseVerbose,
15
15
  )
16
- from .rest import WorldModel, WorldModelResponse
16
+ from .rest import TrajectoryPatchRequest, WorldModel, WorldModelResponse
17
17
 
18
18
  __all__ = [
19
19
  "AuthType",
@@ -29,6 +29,7 @@ __all__ = [
29
29
  "TaskRequest",
30
30
  "TaskResponse",
31
31
  "TaskResponseVerbose",
32
+ "TrajectoryPatchRequest",
32
33
  "WorldModel",
33
34
  "WorldModelResponse",
34
35
  ]
@@ -27,10 +27,7 @@ if TYPE_CHECKING:
27
27
  MAX_CROW_JOB_RUN_TIMEOUT = 60 * 60 * 24 # 24 hours in sec
28
28
  MIN_CROW_JOB_RUN_TIMEOUT = 0 # sec
29
29
 
30
-
31
- class PythonVersion(StrEnum):
32
- V3_11 = "3.11"
33
- V3_12 = "3.12"
30
+ DEFAULT_PYTHON_VERSION_USED_FOR_JOB_BUILDS = "3.13"
34
31
 
35
32
 
36
33
  class AuthType(StrEnum):
@@ -420,9 +417,9 @@ class JobDeploymentConfig(BaseModel):
420
417
  description="The configuration for the cloud run container.",
421
418
  )
422
419
 
423
- python_version: PythonVersion = Field(
424
- default=PythonVersion.V3_12,
425
- description="The python version your docker image should build with.",
420
+ python_version: str = Field(
421
+ default=DEFAULT_PYTHON_VERSION_USED_FOR_JOB_BUILDS,
422
+ description="The python version your docker image should build with (e.g., '3.11', '3.12', '3.13').",
426
423
  )
427
424
 
428
425
  agent: Agent | AgentConfig | str = Field(
@@ -27,17 +27,13 @@ class InitialState(BaseState):
27
27
 
28
28
  class ASVState(BaseState, Generic[T]):
29
29
  action: OpResult[T] = Field()
30
- next_state: Any = Field()
30
+ next_agent_state: Any = Field()
31
31
  value: float = Field()
32
32
 
33
33
  @field_serializer("action")
34
34
  def serialize_action(self, action: OpResult[T]) -> dict:
35
35
  return action.to_dict()
36
36
 
37
- @field_serializer("next_state")
38
- def serialize_next_state(self, state: Any) -> str:
39
- return str(state)
40
-
41
37
 
42
38
  class EnvResetState(BaseState):
43
39
  observations: list[Message] = Field()
@@ -34,6 +34,10 @@ class DataStorageEntry(BaseModel):
34
34
  default=None,
35
35
  description="ID of the parent entry if this is a sub-entry for hierarchical storage",
36
36
  )
37
+ project_id: UUID | None = Field(
38
+ default=None,
39
+ description="ID of the project this data storage entry belongs to",
40
+ )
37
41
  dataset_id: UUID | None = Field(
38
42
  default=None,
39
43
  description="ID of the dataset this entry belongs to",
@@ -131,6 +135,10 @@ class DataStorageRequestPayload(BaseModel):
131
135
  parent_id: UUID | None = Field(
132
136
  default=None, description="ID of the parent entry for hierarchical storage"
133
137
  )
138
+ project_id: UUID | None = Field(
139
+ default=None,
140
+ description="ID of the project this data storage entry belongs to",
141
+ )
134
142
  dataset_id: UUID | None = Field(
135
143
  default=None,
136
144
  description="ID of existing dataset to add entry to, or None to create new dataset",
@@ -23,6 +23,15 @@ class StoreEnvironmentFrameRequest(BaseModel):
23
23
  trajectory_timestep: int
24
24
 
25
25
 
26
+ class TrajectoryPatchRequest(BaseModel):
27
+ public: bool | None = None
28
+ shared_with: list[int] | None = None
29
+ notification_enabled: bool | None = None
30
+ notification_type: str | None = None
31
+ min_estimated_time: float | None = None
32
+ max_estimated_time: float | None = None
33
+
34
+
26
35
  class ExecutionStatus(StrEnum):
27
36
  QUEUED = auto()
28
37
  IN_PROGRESS = "in progress"
@@ -46,16 +46,17 @@ class WorldModelTools:
46
46
  return WorldModelTools._get_client().create_world_model(world_model)
47
47
 
48
48
  @staticmethod
49
- def search_world_models(query: str) -> list[str]:
49
+ def search_world_models(query: str, size: int = 10) -> list[str]:
50
50
  """Search for world models using a text query.
51
51
 
52
52
  Args:
53
53
  query: The search query string to match against world model content.
54
+ size: The number of results to return (default: 10).
54
55
 
55
56
  Returns:
56
57
  list[str]: A list of world model IDs that match the search query.
57
58
  """
58
- return WorldModelTools._get_client().search_world_models(query, size=1)
59
+ return WorldModelTools._get_client().search_world_models(query, size=size)
59
60
 
60
61
 
61
62
  create_world_model_tool = Tool.from_function(WorldModelTools.create_world_model)
@@ -0,0 +1,34 @@
1
+ # file generated by setuptools-scm
2
+ # don't change, don't track in version control
3
+
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
12
+
13
+ TYPE_CHECKING = False
14
+ if TYPE_CHECKING:
15
+ from typing import Tuple
16
+ from typing import Union
17
+
18
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
20
+ else:
21
+ VERSION_TUPLE = object
22
+ COMMIT_ID = object
23
+
24
+ version: str
25
+ __version__: str
26
+ __version_tuple__: VERSION_TUPLE
27
+ version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
30
+
31
+ __version__ = version = '0.4.2.dev274'
32
+ __version_tuple__ = version_tuple = (0, 4, 2, 'dev274')
33
+
34
+ __commit_id__ = commit_id = 'g5ebce206a'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: futurehouse-client
3
- Version: 0.4.1.dev95
3
+ Version: 0.4.2.dev274
4
4
  Summary: A client for interacting with endpoints of the FutureHouse service.
5
5
  Author-email: FutureHouse technical staff <hello@futurehouse.org>
6
6
  License: Apache License
@@ -820,13 +820,14 @@ class TestProjectOperations:
820
820
  mock_response = MagicMock()
821
821
  mock_response.raise_for_status.return_value = None
822
822
  mock_response.json.return_value = [
823
- {"id": "uuid1", "name": "test"},
824
- {"id": "uuid2", "name": "test"},
823
+ {"id": str(uuid4()), "name": "test"},
824
+ {"id": str(uuid4()), "name": "test"},
825
825
  ]
826
826
  mock_get.return_value = mock_response
827
827
 
828
- with pytest.raises(ProjectError, match="Multiple projects found"):
829
- admin_client.get_project_by_name("test")
828
+ projects = admin_client.get_project_by_name("test")
829
+ assert isinstance(projects, list)
830
+ assert len(projects) == 2
830
831
 
831
832
  def test_add_task_to_project_success(
832
833
  self, admin_client: RestClient, test_project_name: str, task_req: TaskRequest
@@ -1,21 +0,0 @@
1
- # file generated by setuptools-scm
2
- # don't change, don't track in version control
3
-
4
- __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
5
-
6
- TYPE_CHECKING = False
7
- if TYPE_CHECKING:
8
- from typing import Tuple
9
- from typing import Union
10
-
11
- VERSION_TUPLE = Tuple[Union[int, str], ...]
12
- else:
13
- VERSION_TUPLE = object
14
-
15
- version: str
16
- __version__: str
17
- __version_tuple__: VERSION_TUPLE
18
- version_tuple: VERSION_TUPLE
19
-
20
- __version__ = version = '0.4.1.dev95'
21
- __version_tuple__ = version_tuple = (0, 4, 1, 'dev95')