llama-cloud 0.1.36__py3-none-any.whl → 0.1.38__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (47) hide show
  1. llama_cloud/__init__.py +20 -4
  2. llama_cloud/client.py +3 -0
  3. llama_cloud/resources/__init__.py +3 -3
  4. llama_cloud/resources/admin/client.py +57 -0
  5. llama_cloud/resources/alpha/__init__.py +2 -0
  6. llama_cloud/resources/alpha/client.py +118 -0
  7. llama_cloud/resources/beta/client.py +576 -20
  8. llama_cloud/resources/chat_apps/client.py +32 -8
  9. llama_cloud/resources/classifier/client.py +139 -11
  10. llama_cloud/resources/data_sinks/client.py +32 -8
  11. llama_cloud/resources/data_sources/client.py +32 -8
  12. llama_cloud/resources/data_sources/types/data_source_update_component.py +2 -0
  13. llama_cloud/resources/embedding_model_configs/client.py +48 -12
  14. llama_cloud/resources/files/__init__.py +2 -2
  15. llama_cloud/resources/files/client.py +189 -113
  16. llama_cloud/resources/files/types/__init__.py +1 -3
  17. llama_cloud/resources/jobs/client.py +12 -6
  18. llama_cloud/resources/llama_extract/client.py +138 -32
  19. llama_cloud/resources/organizations/client.py +18 -4
  20. llama_cloud/resources/parsing/client.py +16 -4
  21. llama_cloud/resources/pipelines/client.py +32 -8
  22. llama_cloud/resources/projects/client.py +78 -18
  23. llama_cloud/resources/reports/client.py +126 -30
  24. llama_cloud/resources/retrievers/client.py +48 -12
  25. llama_cloud/types/__init__.py +20 -2
  26. llama_cloud/types/agent_deployment_summary.py +1 -0
  27. llama_cloud/types/classify_job.py +2 -0
  28. llama_cloud/types/cloud_jira_data_source_v_2.py +52 -0
  29. llama_cloud/types/cloud_jira_data_source_v_2_api_version.py +21 -0
  30. llama_cloud/types/configurable_data_source_names.py +4 -0
  31. llama_cloud/types/data_source_component.py +2 -0
  32. llama_cloud/types/data_source_create_component.py +2 -0
  33. llama_cloud/types/data_source_reader_version_metadata_reader_version.py +9 -1
  34. llama_cloud/types/file_create.py +41 -0
  35. llama_cloud/types/{classify_job_with_status.py → file_filter.py} +8 -15
  36. llama_cloud/types/file_query_response.py +38 -0
  37. llama_cloud/types/llama_extract_mode_availability.py +37 -0
  38. llama_cloud/types/llama_extract_mode_availability_status.py +17 -0
  39. llama_cloud/types/paginated_response_classify_job.py +34 -0
  40. llama_cloud/types/pipeline_data_source_component.py +2 -0
  41. llama_cloud/types/usage_response_active_alerts_item.py +4 -0
  42. {llama_cloud-0.1.36.dist-info → llama_cloud-0.1.38.dist-info}/METADATA +2 -1
  43. {llama_cloud-0.1.36.dist-info → llama_cloud-0.1.38.dist-info}/RECORD +47 -38
  44. {llama_cloud-0.1.36.dist-info → llama_cloud-0.1.38.dist-info}/WHEEL +1 -1
  45. /llama_cloud/{resources/files/types → types}/file_create_permission_info_value.py +0 -0
  46. /llama_cloud/{resources/files/types → types}/file_create_resource_info_value.py +0 -0
  47. {llama_cloud-0.1.36.dist-info → llama_cloud-0.1.38.dist-info}/LICENSE +0 -0
@@ -657,7 +657,12 @@ class OrganizationsClient:
657
657
  raise ApiError(status_code=_response.status_code, body=_response_json)
658
658
 
659
659
  def add_user_to_project(
660
- self, organization_id: typing.Optional[str], user_id: str, *, project_id: typing.Optional[str] = None
660
+ self,
661
+ organization_id: typing.Optional[str],
662
+ user_id: str,
663
+ *,
664
+ project_id: typing.Optional[str] = None,
665
+ project_id: typing.Optional[str] = None,
661
666
  ) -> typing.Any:
662
667
  """
663
668
  Add a user to a project.
@@ -668,6 +673,8 @@ class OrganizationsClient:
668
673
  - user_id: str.
669
674
 
670
675
  - project_id: typing.Optional[str].
676
+
677
+ - project_id: typing.Optional[str].
671
678
  ---
672
679
  from llama_cloud.client import LlamaCloud
673
680
 
@@ -685,7 +692,7 @@ class OrganizationsClient:
685
692
  f"api/v1/organizations/{organization_id}/users/{user_id}/projects",
686
693
  ),
687
694
  params=remove_none_from_dict({"project_id": project_id}),
688
- headers=self._client_wrapper.get_headers(),
695
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
689
696
  timeout=60,
690
697
  )
691
698
  if 200 <= _response.status_code < 300:
@@ -1365,7 +1372,12 @@ class AsyncOrganizationsClient:
1365
1372
  raise ApiError(status_code=_response.status_code, body=_response_json)
1366
1373
 
1367
1374
  async def add_user_to_project(
1368
- self, organization_id: typing.Optional[str], user_id: str, *, project_id: typing.Optional[str] = None
1375
+ self,
1376
+ organization_id: typing.Optional[str],
1377
+ user_id: str,
1378
+ *,
1379
+ project_id: typing.Optional[str] = None,
1380
+ project_id: typing.Optional[str] = None,
1369
1381
  ) -> typing.Any:
1370
1382
  """
1371
1383
  Add a user to a project.
@@ -1376,6 +1388,8 @@ class AsyncOrganizationsClient:
1376
1388
  - user_id: str.
1377
1389
 
1378
1390
  - project_id: typing.Optional[str].
1391
+
1392
+ - project_id: typing.Optional[str].
1379
1393
  ---
1380
1394
  from llama_cloud.client import AsyncLlamaCloud
1381
1395
 
@@ -1393,7 +1407,7 @@ class AsyncOrganizationsClient:
1393
1407
  f"api/v1/organizations/{organization_id}/users/{user_id}/projects",
1394
1408
  ),
1395
1409
  params=remove_none_from_dict({"project_id": project_id}),
1396
- headers=self._client_wrapper.get_headers(),
1410
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1397
1411
  timeout=60,
1398
1412
  )
1399
1413
  if 200 <= _response.status_code < 300:
@@ -121,6 +121,7 @@ class ParsingClient:
121
121
  webhook_configurations: str,
122
122
  job_timeout_in_seconds: float,
123
123
  job_timeout_extra_time_per_page_in_seconds: float,
124
+ project_id: typing.Optional[str] = None,
124
125
  ) -> ParsingJob:
125
126
  """
126
127
  Parameters:
@@ -157,6 +158,8 @@ class ParsingClient:
157
158
  - job_timeout_in_seconds: float.
158
159
 
159
160
  - job_timeout_extra_time_per_page_in_seconds: float.
161
+
162
+ - project_id: typing.Optional[str].
160
163
  """
161
164
  _request: typing.Dict[str, typing.Any] = {
162
165
  "do_not_cache": do_not_cache,
@@ -182,7 +185,7 @@ class ParsingClient:
182
185
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/parsing/screenshot"),
183
186
  params=remove_none_from_dict({"organization_id": organization_id, "project_id": project_id}),
184
187
  json=jsonable_encoder(_request),
185
- headers=self._client_wrapper.get_headers(),
188
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
186
189
  timeout=60,
187
190
  )
188
191
  if 200 <= _response.status_code < 300:
@@ -296,6 +299,7 @@ class ParsingClient:
296
299
  page_header_suffix: str,
297
300
  page_footer_prefix: str,
298
301
  page_footer_suffix: str,
302
+ project_id: typing.Optional[str] = None,
299
303
  ) -> ParsingJob:
300
304
  """
301
305
  Parameters:
@@ -494,6 +498,8 @@ class ParsingClient:
494
498
  - page_footer_prefix: str.
495
499
 
496
500
  - page_footer_suffix: str.
501
+
502
+ - project_id: typing.Optional[str].
497
503
  """
498
504
  _request: typing.Dict[str, typing.Any] = {
499
505
  "adaptive_long_table": adaptive_long_table,
@@ -602,7 +608,7 @@ class ParsingClient:
602
608
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/parsing/upload"),
603
609
  params=remove_none_from_dict({"organization_id": organization_id, "project_id": project_id}),
604
610
  json=jsonable_encoder(_request),
605
- headers=self._client_wrapper.get_headers(),
611
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
606
612
  timeout=60,
607
613
  )
608
614
  if 200 <= _response.status_code < 300:
@@ -1293,6 +1299,7 @@ class AsyncParsingClient:
1293
1299
  webhook_configurations: str,
1294
1300
  job_timeout_in_seconds: float,
1295
1301
  job_timeout_extra_time_per_page_in_seconds: float,
1302
+ project_id: typing.Optional[str] = None,
1296
1303
  ) -> ParsingJob:
1297
1304
  """
1298
1305
  Parameters:
@@ -1329,6 +1336,8 @@ class AsyncParsingClient:
1329
1336
  - job_timeout_in_seconds: float.
1330
1337
 
1331
1338
  - job_timeout_extra_time_per_page_in_seconds: float.
1339
+
1340
+ - project_id: typing.Optional[str].
1332
1341
  """
1333
1342
  _request: typing.Dict[str, typing.Any] = {
1334
1343
  "do_not_cache": do_not_cache,
@@ -1354,7 +1363,7 @@ class AsyncParsingClient:
1354
1363
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/parsing/screenshot"),
1355
1364
  params=remove_none_from_dict({"organization_id": organization_id, "project_id": project_id}),
1356
1365
  json=jsonable_encoder(_request),
1357
- headers=self._client_wrapper.get_headers(),
1366
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1358
1367
  timeout=60,
1359
1368
  )
1360
1369
  if 200 <= _response.status_code < 300:
@@ -1468,6 +1477,7 @@ class AsyncParsingClient:
1468
1477
  page_header_suffix: str,
1469
1478
  page_footer_prefix: str,
1470
1479
  page_footer_suffix: str,
1480
+ project_id: typing.Optional[str] = None,
1471
1481
  ) -> ParsingJob:
1472
1482
  """
1473
1483
  Parameters:
@@ -1666,6 +1676,8 @@ class AsyncParsingClient:
1666
1676
  - page_footer_prefix: str.
1667
1677
 
1668
1678
  - page_footer_suffix: str.
1679
+
1680
+ - project_id: typing.Optional[str].
1669
1681
  """
1670
1682
  _request: typing.Dict[str, typing.Any] = {
1671
1683
  "adaptive_long_table": adaptive_long_table,
@@ -1774,7 +1786,7 @@ class AsyncParsingClient:
1774
1786
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/parsing/upload"),
1775
1787
  params=remove_none_from_dict({"organization_id": organization_id, "project_id": project_id}),
1776
1788
  json=jsonable_encoder(_request),
1777
- headers=self._client_wrapper.get_headers(),
1789
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1778
1790
  timeout=60,
1779
1791
  )
1780
1792
  if 200 <= _response.status_code < 300:
@@ -67,6 +67,7 @@ class PipelinesClient:
67
67
  pipeline_name: typing.Optional[str] = None,
68
68
  pipeline_type: typing.Optional[PipelineType] = None,
69
69
  organization_id: typing.Optional[str] = None,
70
+ project_id: typing.Optional[str] = None,
70
71
  ) -> typing.List[Pipeline]:
71
72
  """
72
73
  Search for pipelines by various parameters.
@@ -81,6 +82,8 @@ class PipelinesClient:
81
82
  - pipeline_type: typing.Optional[PipelineType].
82
83
 
83
84
  - organization_id: typing.Optional[str].
85
+
86
+ - project_id: typing.Optional[str].
84
87
  ---
85
88
  from llama_cloud import PipelineType
86
89
  from llama_cloud.client import LlamaCloud
@@ -104,7 +107,7 @@ class PipelinesClient:
104
107
  "organization_id": organization_id,
105
108
  }
106
109
  ),
107
- headers=self._client_wrapper.get_headers(),
110
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
108
111
  timeout=60,
109
112
  )
110
113
  if 200 <= _response.status_code < 300:
@@ -123,6 +126,7 @@ class PipelinesClient:
123
126
  project_id: typing.Optional[str] = None,
124
127
  organization_id: typing.Optional[str] = None,
125
128
  request: PipelineCreate,
129
+ project_id: typing.Optional[str] = None,
126
130
  ) -> Pipeline:
127
131
  """
128
132
  Create a new pipeline for a project.
@@ -133,13 +137,15 @@ class PipelinesClient:
133
137
  - organization_id: typing.Optional[str].
134
138
 
135
139
  - request: PipelineCreate.
140
+
141
+ - project_id: typing.Optional[str].
136
142
  """
137
143
  _response = self._client_wrapper.httpx_client.request(
138
144
  "POST",
139
145
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/pipelines"),
140
146
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
141
147
  json=jsonable_encoder(request),
142
- headers=self._client_wrapper.get_headers(),
148
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
143
149
  timeout=60,
144
150
  )
145
151
  if 200 <= _response.status_code < 300:
@@ -158,6 +164,7 @@ class PipelinesClient:
158
164
  project_id: typing.Optional[str] = None,
159
165
  organization_id: typing.Optional[str] = None,
160
166
  request: PipelineCreate,
167
+ project_id: typing.Optional[str] = None,
161
168
  ) -> Pipeline:
162
169
  """
163
170
  Upsert a pipeline for a project.
@@ -169,13 +176,15 @@ class PipelinesClient:
169
176
  - organization_id: typing.Optional[str].
170
177
 
171
178
  - request: PipelineCreate.
179
+
180
+ - project_id: typing.Optional[str].
172
181
  """
173
182
  _response = self._client_wrapper.httpx_client.request(
174
183
  "PUT",
175
184
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/pipelines"),
176
185
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
177
186
  json=jsonable_encoder(request),
178
- headers=self._client_wrapper.get_headers(),
187
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
179
188
  timeout=60,
180
189
  )
181
190
  if 200 <= _response.status_code < 300:
@@ -1086,6 +1095,7 @@ class PipelinesClient:
1086
1095
  retrieve_page_figure_nodes: typing.Optional[bool] = OMIT,
1087
1096
  query: str,
1088
1097
  class_name: typing.Optional[str] = OMIT,
1098
+ project_id: typing.Optional[str] = None,
1089
1099
  ) -> RetrieveResults:
1090
1100
  """
1091
1101
  Get retrieval results for a managed pipeline and a query
@@ -1126,6 +1136,8 @@ class PipelinesClient:
1126
1136
  - query: str. The query to retrieve against.
1127
1137
 
1128
1138
  - class_name: typing.Optional[str].
1139
+
1140
+ - project_id: typing.Optional[str].
1129
1141
  ---
1130
1142
  from llama_cloud import FilterCondition, MetadataFilters, RetrievalMode
1131
1143
  from llama_cloud.client import LlamaCloud
@@ -1177,7 +1189,7 @@ class PipelinesClient:
1177
1189
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/retrieve"),
1178
1190
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1179
1191
  json=jsonable_encoder(_request),
1180
- headers=self._client_wrapper.get_headers(),
1192
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1181
1193
  timeout=60,
1182
1194
  )
1183
1195
  if 200 <= _response.status_code < 300:
@@ -1738,6 +1750,7 @@ class AsyncPipelinesClient:
1738
1750
  pipeline_name: typing.Optional[str] = None,
1739
1751
  pipeline_type: typing.Optional[PipelineType] = None,
1740
1752
  organization_id: typing.Optional[str] = None,
1753
+ project_id: typing.Optional[str] = None,
1741
1754
  ) -> typing.List[Pipeline]:
1742
1755
  """
1743
1756
  Search for pipelines by various parameters.
@@ -1752,6 +1765,8 @@ class AsyncPipelinesClient:
1752
1765
  - pipeline_type: typing.Optional[PipelineType].
1753
1766
 
1754
1767
  - organization_id: typing.Optional[str].
1768
+
1769
+ - project_id: typing.Optional[str].
1755
1770
  ---
1756
1771
  from llama_cloud import PipelineType
1757
1772
  from llama_cloud.client import AsyncLlamaCloud
@@ -1775,7 +1790,7 @@ class AsyncPipelinesClient:
1775
1790
  "organization_id": organization_id,
1776
1791
  }
1777
1792
  ),
1778
- headers=self._client_wrapper.get_headers(),
1793
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1779
1794
  timeout=60,
1780
1795
  )
1781
1796
  if 200 <= _response.status_code < 300:
@@ -1794,6 +1809,7 @@ class AsyncPipelinesClient:
1794
1809
  project_id: typing.Optional[str] = None,
1795
1810
  organization_id: typing.Optional[str] = None,
1796
1811
  request: PipelineCreate,
1812
+ project_id: typing.Optional[str] = None,
1797
1813
  ) -> Pipeline:
1798
1814
  """
1799
1815
  Create a new pipeline for a project.
@@ -1804,13 +1820,15 @@ class AsyncPipelinesClient:
1804
1820
  - organization_id: typing.Optional[str].
1805
1821
 
1806
1822
  - request: PipelineCreate.
1823
+
1824
+ - project_id: typing.Optional[str].
1807
1825
  """
1808
1826
  _response = await self._client_wrapper.httpx_client.request(
1809
1827
  "POST",
1810
1828
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/pipelines"),
1811
1829
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1812
1830
  json=jsonable_encoder(request),
1813
- headers=self._client_wrapper.get_headers(),
1831
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1814
1832
  timeout=60,
1815
1833
  )
1816
1834
  if 200 <= _response.status_code < 300:
@@ -1829,6 +1847,7 @@ class AsyncPipelinesClient:
1829
1847
  project_id: typing.Optional[str] = None,
1830
1848
  organization_id: typing.Optional[str] = None,
1831
1849
  request: PipelineCreate,
1850
+ project_id: typing.Optional[str] = None,
1832
1851
  ) -> Pipeline:
1833
1852
  """
1834
1853
  Upsert a pipeline for a project.
@@ -1840,13 +1859,15 @@ class AsyncPipelinesClient:
1840
1859
  - organization_id: typing.Optional[str].
1841
1860
 
1842
1861
  - request: PipelineCreate.
1862
+
1863
+ - project_id: typing.Optional[str].
1843
1864
  """
1844
1865
  _response = await self._client_wrapper.httpx_client.request(
1845
1866
  "PUT",
1846
1867
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/pipelines"),
1847
1868
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1848
1869
  json=jsonable_encoder(request),
1849
- headers=self._client_wrapper.get_headers(),
1870
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1850
1871
  timeout=60,
1851
1872
  )
1852
1873
  if 200 <= _response.status_code < 300:
@@ -2759,6 +2780,7 @@ class AsyncPipelinesClient:
2759
2780
  retrieve_page_figure_nodes: typing.Optional[bool] = OMIT,
2760
2781
  query: str,
2761
2782
  class_name: typing.Optional[str] = OMIT,
2783
+ project_id: typing.Optional[str] = None,
2762
2784
  ) -> RetrieveResults:
2763
2785
  """
2764
2786
  Get retrieval results for a managed pipeline and a query
@@ -2799,6 +2821,8 @@ class AsyncPipelinesClient:
2799
2821
  - query: str. The query to retrieve against.
2800
2822
 
2801
2823
  - class_name: typing.Optional[str].
2824
+
2825
+ - project_id: typing.Optional[str].
2802
2826
  ---
2803
2827
  from llama_cloud import FilterCondition, MetadataFilters, RetrievalMode
2804
2828
  from llama_cloud.client import AsyncLlamaCloud
@@ -2850,7 +2874,7 @@ class AsyncPipelinesClient:
2850
2874
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/retrieve"),
2851
2875
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
2852
2876
  json=jsonable_encoder(_request),
2853
- headers=self._client_wrapper.get_headers(),
2877
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
2854
2878
  timeout=60,
2855
2879
  )
2856
2880
  if 200 <= _response.status_code < 300:
@@ -144,7 +144,13 @@ class ProjectsClient:
144
144
  raise ApiError(status_code=_response.status_code, body=_response.text)
145
145
  raise ApiError(status_code=_response.status_code, body=_response_json)
146
146
 
147
- def get_project(self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None) -> Project:
147
+ def get_project(
148
+ self,
149
+ project_id: typing.Optional[str],
150
+ *,
151
+ organization_id: typing.Optional[str] = None,
152
+ project_id: typing.Optional[str] = None,
153
+ ) -> Project:
148
154
  """
149
155
  Get a project by ID.
150
156
 
@@ -152,6 +158,8 @@ class ProjectsClient:
152
158
  - project_id: typing.Optional[str].
153
159
 
154
160
  - organization_id: typing.Optional[str].
161
+
162
+ - project_id: typing.Optional[str].
155
163
  ---
156
164
  from llama_cloud.client import LlamaCloud
157
165
 
@@ -164,7 +172,7 @@ class ProjectsClient:
164
172
  "GET",
165
173
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
166
174
  params=remove_none_from_dict({"organization_id": organization_id}),
167
- headers=self._client_wrapper.get_headers(),
175
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
168
176
  timeout=60,
169
177
  )
170
178
  if 200 <= _response.status_code < 300:
@@ -178,7 +186,12 @@ class ProjectsClient:
178
186
  raise ApiError(status_code=_response.status_code, body=_response_json)
179
187
 
180
188
  def update_existing_project(
181
- self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None, name: str
189
+ self,
190
+ project_id: typing.Optional[str],
191
+ *,
192
+ organization_id: typing.Optional[str] = None,
193
+ name: str,
194
+ project_id: typing.Optional[str] = None,
182
195
  ) -> Project:
183
196
  """
184
197
  Update an existing project.
@@ -189,6 +202,8 @@ class ProjectsClient:
189
202
  - organization_id: typing.Optional[str].
190
203
 
191
204
  - name: str.
205
+
206
+ - project_id: typing.Optional[str].
192
207
  ---
193
208
  from llama_cloud.client import LlamaCloud
194
209
 
@@ -204,7 +219,7 @@ class ProjectsClient:
204
219
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
205
220
  params=remove_none_from_dict({"organization_id": organization_id}),
206
221
  json=jsonable_encoder({"name": name}),
207
- headers=self._client_wrapper.get_headers(),
222
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
208
223
  timeout=60,
209
224
  )
210
225
  if 200 <= _response.status_code < 300:
@@ -217,7 +232,13 @@ class ProjectsClient:
217
232
  raise ApiError(status_code=_response.status_code, body=_response.text)
218
233
  raise ApiError(status_code=_response.status_code, body=_response_json)
219
234
 
220
- def delete_project(self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None) -> None:
235
+ def delete_project(
236
+ self,
237
+ project_id: typing.Optional[str],
238
+ *,
239
+ organization_id: typing.Optional[str] = None,
240
+ project_id: typing.Optional[str] = None,
241
+ ) -> None:
221
242
  """
222
243
  Delete a project by ID.
223
244
 
@@ -225,6 +246,8 @@ class ProjectsClient:
225
246
  - project_id: typing.Optional[str].
226
247
 
227
248
  - organization_id: typing.Optional[str].
249
+
250
+ - project_id: typing.Optional[str].
228
251
  ---
229
252
  from llama_cloud.client import LlamaCloud
230
253
 
@@ -237,7 +260,7 @@ class ProjectsClient:
237
260
  "DELETE",
238
261
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
239
262
  params=remove_none_from_dict({"organization_id": organization_id}),
240
- headers=self._client_wrapper.get_headers(),
263
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
241
264
  timeout=60,
242
265
  )
243
266
  if 200 <= _response.status_code < 300:
@@ -251,7 +274,11 @@ class ProjectsClient:
251
274
  raise ApiError(status_code=_response.status_code, body=_response_json)
252
275
 
253
276
  def get_current_project(
254
- self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
277
+ self,
278
+ *,
279
+ project_id: typing.Optional[str] = None,
280
+ organization_id: typing.Optional[str] = None,
281
+ project_id: typing.Optional[str] = None,
255
282
  ) -> Project:
256
283
  """
257
284
  Get the current project.
@@ -260,6 +287,8 @@ class ProjectsClient:
260
287
  - project_id: typing.Optional[str].
261
288
 
262
289
  - organization_id: typing.Optional[str].
290
+
291
+ - project_id: typing.Optional[str].
263
292
  ---
264
293
  from llama_cloud.client import LlamaCloud
265
294
 
@@ -272,7 +301,7 @@ class ProjectsClient:
272
301
  "GET",
273
302
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/projects/current"),
274
303
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
275
- headers=self._client_wrapper.get_headers(),
304
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
276
305
  timeout=60,
277
306
  )
278
307
  if 200 <= _response.status_code < 300:
@@ -291,6 +320,7 @@ class ProjectsClient:
291
320
  *,
292
321
  get_current_invoice_total: typing.Optional[bool] = None,
293
322
  organization_id: typing.Optional[str] = None,
323
+ project_id: typing.Optional[str] = None,
294
324
  ) -> UsageAndPlan:
295
325
  """
296
326
  Get usage for a project
@@ -301,6 +331,8 @@ class ProjectsClient:
301
331
  - get_current_invoice_total: typing.Optional[bool].
302
332
 
303
333
  - organization_id: typing.Optional[str].
334
+
335
+ - project_id: typing.Optional[str].
304
336
  ---
305
337
  from llama_cloud.client import LlamaCloud
306
338
 
@@ -315,7 +347,7 @@ class ProjectsClient:
315
347
  params=remove_none_from_dict(
316
348
  {"get_current_invoice_total": get_current_invoice_total, "organization_id": organization_id}
317
349
  ),
318
- headers=self._client_wrapper.get_headers(),
350
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
319
351
  timeout=60,
320
352
  )
321
353
  if 200 <= _response.status_code < 300:
@@ -448,7 +480,11 @@ class AsyncProjectsClient:
448
480
  raise ApiError(status_code=_response.status_code, body=_response_json)
449
481
 
450
482
  async def get_project(
451
- self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
483
+ self,
484
+ project_id: typing.Optional[str],
485
+ *,
486
+ organization_id: typing.Optional[str] = None,
487
+ project_id: typing.Optional[str] = None,
452
488
  ) -> Project:
453
489
  """
454
490
  Get a project by ID.
@@ -457,6 +493,8 @@ class AsyncProjectsClient:
457
493
  - project_id: typing.Optional[str].
458
494
 
459
495
  - organization_id: typing.Optional[str].
496
+
497
+ - project_id: typing.Optional[str].
460
498
  ---
461
499
  from llama_cloud.client import AsyncLlamaCloud
462
500
 
@@ -469,7 +507,7 @@ class AsyncProjectsClient:
469
507
  "GET",
470
508
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
471
509
  params=remove_none_from_dict({"organization_id": organization_id}),
472
- headers=self._client_wrapper.get_headers(),
510
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
473
511
  timeout=60,
474
512
  )
475
513
  if 200 <= _response.status_code < 300:
@@ -483,7 +521,12 @@ class AsyncProjectsClient:
483
521
  raise ApiError(status_code=_response.status_code, body=_response_json)
484
522
 
485
523
  async def update_existing_project(
486
- self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None, name: str
524
+ self,
525
+ project_id: typing.Optional[str],
526
+ *,
527
+ organization_id: typing.Optional[str] = None,
528
+ name: str,
529
+ project_id: typing.Optional[str] = None,
487
530
  ) -> Project:
488
531
  """
489
532
  Update an existing project.
@@ -494,6 +537,8 @@ class AsyncProjectsClient:
494
537
  - organization_id: typing.Optional[str].
495
538
 
496
539
  - name: str.
540
+
541
+ - project_id: typing.Optional[str].
497
542
  ---
498
543
  from llama_cloud.client import AsyncLlamaCloud
499
544
 
@@ -509,7 +554,7 @@ class AsyncProjectsClient:
509
554
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
510
555
  params=remove_none_from_dict({"organization_id": organization_id}),
511
556
  json=jsonable_encoder({"name": name}),
512
- headers=self._client_wrapper.get_headers(),
557
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
513
558
  timeout=60,
514
559
  )
515
560
  if 200 <= _response.status_code < 300:
@@ -523,7 +568,11 @@ class AsyncProjectsClient:
523
568
  raise ApiError(status_code=_response.status_code, body=_response_json)
524
569
 
525
570
  async def delete_project(
526
- self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
571
+ self,
572
+ project_id: typing.Optional[str],
573
+ *,
574
+ organization_id: typing.Optional[str] = None,
575
+ project_id: typing.Optional[str] = None,
527
576
  ) -> None:
528
577
  """
529
578
  Delete a project by ID.
@@ -532,6 +581,8 @@ class AsyncProjectsClient:
532
581
  - project_id: typing.Optional[str].
533
582
 
534
583
  - organization_id: typing.Optional[str].
584
+
585
+ - project_id: typing.Optional[str].
535
586
  ---
536
587
  from llama_cloud.client import AsyncLlamaCloud
537
588
 
@@ -544,7 +595,7 @@ class AsyncProjectsClient:
544
595
  "DELETE",
545
596
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
546
597
  params=remove_none_from_dict({"organization_id": organization_id}),
547
- headers=self._client_wrapper.get_headers(),
598
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
548
599
  timeout=60,
549
600
  )
550
601
  if 200 <= _response.status_code < 300:
@@ -558,7 +609,11 @@ class AsyncProjectsClient:
558
609
  raise ApiError(status_code=_response.status_code, body=_response_json)
559
610
 
560
611
  async def get_current_project(
561
- self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
612
+ self,
613
+ *,
614
+ project_id: typing.Optional[str] = None,
615
+ organization_id: typing.Optional[str] = None,
616
+ project_id: typing.Optional[str] = None,
562
617
  ) -> Project:
563
618
  """
564
619
  Get the current project.
@@ -567,6 +622,8 @@ class AsyncProjectsClient:
567
622
  - project_id: typing.Optional[str].
568
623
 
569
624
  - organization_id: typing.Optional[str].
625
+
626
+ - project_id: typing.Optional[str].
570
627
  ---
571
628
  from llama_cloud.client import AsyncLlamaCloud
572
629
 
@@ -579,7 +636,7 @@ class AsyncProjectsClient:
579
636
  "GET",
580
637
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/projects/current"),
581
638
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
582
- headers=self._client_wrapper.get_headers(),
639
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
583
640
  timeout=60,
584
641
  )
585
642
  if 200 <= _response.status_code < 300:
@@ -598,6 +655,7 @@ class AsyncProjectsClient:
598
655
  *,
599
656
  get_current_invoice_total: typing.Optional[bool] = None,
600
657
  organization_id: typing.Optional[str] = None,
658
+ project_id: typing.Optional[str] = None,
601
659
  ) -> UsageAndPlan:
602
660
  """
603
661
  Get usage for a project
@@ -608,6 +666,8 @@ class AsyncProjectsClient:
608
666
  - get_current_invoice_total: typing.Optional[bool].
609
667
 
610
668
  - organization_id: typing.Optional[str].
669
+
670
+ - project_id: typing.Optional[str].
611
671
  ---
612
672
  from llama_cloud.client import AsyncLlamaCloud
613
673
 
@@ -622,7 +682,7 @@ class AsyncProjectsClient:
622
682
  params=remove_none_from_dict(
623
683
  {"get_current_invoice_total": get_current_invoice_total, "organization_id": organization_id}
624
684
  ),
625
- headers=self._client_wrapper.get_headers(),
685
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
626
686
  timeout=60,
627
687
  )
628
688
  if 200 <= _response.status_code < 300: