llama-cloud 0.1.37__py3-none-any.whl → 0.1.38__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (37) hide show
  1. llama_cloud/__init__.py +8 -2
  2. llama_cloud/client.py +3 -0
  3. llama_cloud/resources/__init__.py +2 -0
  4. llama_cloud/resources/alpha/__init__.py +2 -0
  5. llama_cloud/resources/alpha/client.py +118 -0
  6. llama_cloud/resources/beta/client.py +126 -30
  7. llama_cloud/resources/chat_apps/client.py +32 -8
  8. llama_cloud/resources/classifier/client.py +139 -11
  9. llama_cloud/resources/data_sinks/client.py +32 -8
  10. llama_cloud/resources/data_sources/client.py +32 -8
  11. llama_cloud/resources/data_sources/types/data_source_update_component.py +2 -0
  12. llama_cloud/resources/embedding_model_configs/client.py +48 -12
  13. llama_cloud/resources/files/client.py +176 -42
  14. llama_cloud/resources/jobs/client.py +12 -6
  15. llama_cloud/resources/llama_extract/client.py +138 -32
  16. llama_cloud/resources/organizations/client.py +18 -4
  17. llama_cloud/resources/parsing/client.py +16 -4
  18. llama_cloud/resources/pipelines/client.py +32 -8
  19. llama_cloud/resources/projects/client.py +78 -18
  20. llama_cloud/resources/reports/client.py +126 -30
  21. llama_cloud/resources/retrievers/client.py +48 -12
  22. llama_cloud/types/__init__.py +6 -2
  23. llama_cloud/types/agent_deployment_summary.py +1 -0
  24. llama_cloud/types/classify_job.py +2 -0
  25. llama_cloud/types/cloud_jira_data_source_v_2.py +52 -0
  26. llama_cloud/types/cloud_jira_data_source_v_2_api_version.py +21 -0
  27. llama_cloud/types/configurable_data_source_names.py +4 -0
  28. llama_cloud/types/data_source_component.py +2 -0
  29. llama_cloud/types/data_source_create_component.py +2 -0
  30. llama_cloud/types/data_source_reader_version_metadata_reader_version.py +9 -1
  31. llama_cloud/types/{classify_job_with_status.py → paginated_response_classify_job.py} +5 -18
  32. llama_cloud/types/pipeline_data_source_component.py +2 -0
  33. llama_cloud/types/usage_response_active_alerts_item.py +4 -0
  34. {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.38.dist-info}/METADATA +2 -3
  35. {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.38.dist-info}/RECORD +37 -33
  36. {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.38.dist-info}/WHEEL +1 -1
  37. {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.38.dist-info}/LICENSE +0 -0
@@ -49,6 +49,7 @@ class BetaClient:
49
49
  offset: typing.Optional[int] = None,
50
50
  project_id: typing.Optional[str] = None,
51
51
  organization_id: typing.Optional[str] = None,
52
+ project_id: typing.Optional[str] = None,
52
53
  ) -> BatchPaginatedList:
53
54
  """
54
55
  Parameters:
@@ -59,6 +60,8 @@ class BetaClient:
59
60
  - project_id: typing.Optional[str].
60
61
 
61
62
  - organization_id: typing.Optional[str].
63
+
64
+ - project_id: typing.Optional[str].
62
65
  ---
63
66
  from llama_cloud.client import LlamaCloud
64
67
 
@@ -73,7 +76,7 @@ class BetaClient:
73
76
  params=remove_none_from_dict(
74
77
  {"limit": limit, "offset": offset, "project_id": project_id, "organization_id": organization_id}
75
78
  ),
76
- headers=self._client_wrapper.get_headers(),
79
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
77
80
  timeout=60,
78
81
  )
79
82
  if 200 <= _response.status_code < 300:
@@ -100,6 +103,7 @@ class BetaClient:
100
103
  batch_create_project_id: str,
101
104
  external_id: str,
102
105
  completion_window: typing.Optional[int] = OMIT,
106
+ project_id: typing.Optional[str] = None,
103
107
  ) -> Batch:
104
108
  """
105
109
  Parameters:
@@ -124,6 +128,8 @@ class BetaClient:
124
128
  - external_id: str. A developer-provided ID for the batch. This ID will be returned in the response.
125
129
 
126
130
  - completion_window: typing.Optional[int]. The time frame within which the batch should be processed. Currently only 24h is supported.
131
+
132
+ - project_id: typing.Optional[str].
127
133
  ---
128
134
  from llama_cloud import (
129
135
  FailPageMode,
@@ -169,7 +175,7 @@ class BetaClient:
169
175
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/batches"),
170
176
  params=remove_none_from_dict({"organization_id": organization_id, "project_id": project_id}),
171
177
  json=jsonable_encoder(_request),
172
- headers=self._client_wrapper.get_headers(),
178
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
173
179
  timeout=60,
174
180
  )
175
181
  if 200 <= _response.status_code < 300:
@@ -216,7 +222,12 @@ class BetaClient:
216
222
  raise ApiError(status_code=_response.status_code, body=_response_json)
217
223
 
218
224
  def get_agent_data(
219
- self, item_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
225
+ self,
226
+ item_id: str,
227
+ *,
228
+ project_id: typing.Optional[str] = None,
229
+ organization_id: typing.Optional[str] = None,
230
+ project_id: typing.Optional[str] = None,
220
231
  ) -> AgentData:
221
232
  """
222
233
  Get agent data by ID.
@@ -227,6 +238,8 @@ class BetaClient:
227
238
  - project_id: typing.Optional[str].
228
239
 
229
240
  - organization_id: typing.Optional[str].
241
+
242
+ - project_id: typing.Optional[str].
230
243
  ---
231
244
  from llama_cloud.client import LlamaCloud
232
245
 
@@ -241,7 +254,7 @@ class BetaClient:
241
254
  "GET",
242
255
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
243
256
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
244
- headers=self._client_wrapper.get_headers(),
257
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
245
258
  timeout=60,
246
259
  )
247
260
  if 200 <= _response.status_code < 300:
@@ -261,6 +274,7 @@ class BetaClient:
261
274
  project_id: typing.Optional[str] = None,
262
275
  organization_id: typing.Optional[str] = None,
263
276
  data: typing.Dict[str, typing.Any],
277
+ project_id: typing.Optional[str] = None,
264
278
  ) -> AgentData:
265
279
  """
266
280
  Update agent data by ID (overwrites).
@@ -273,6 +287,8 @@ class BetaClient:
273
287
  - organization_id: typing.Optional[str].
274
288
 
275
289
  - data: typing.Dict[str, typing.Any].
290
+
291
+ - project_id: typing.Optional[str].
276
292
  ---
277
293
  from llama_cloud.client import LlamaCloud
278
294
 
@@ -289,7 +305,7 @@ class BetaClient:
289
305
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
290
306
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
291
307
  json=jsonable_encoder({"data": data}),
292
- headers=self._client_wrapper.get_headers(),
308
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
293
309
  timeout=60,
294
310
  )
295
311
  if 200 <= _response.status_code < 300:
@@ -303,7 +319,12 @@ class BetaClient:
303
319
  raise ApiError(status_code=_response.status_code, body=_response_json)
304
320
 
305
321
  def delete_agent_data(
306
- self, item_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
322
+ self,
323
+ item_id: str,
324
+ *,
325
+ project_id: typing.Optional[str] = None,
326
+ organization_id: typing.Optional[str] = None,
327
+ project_id: typing.Optional[str] = None,
307
328
  ) -> typing.Dict[str, str]:
308
329
  """
309
330
  Delete agent data by ID.
@@ -314,6 +335,8 @@ class BetaClient:
314
335
  - project_id: typing.Optional[str].
315
336
 
316
337
  - organization_id: typing.Optional[str].
338
+
339
+ - project_id: typing.Optional[str].
317
340
  ---
318
341
  from llama_cloud.client import LlamaCloud
319
342
 
@@ -328,7 +351,7 @@ class BetaClient:
328
351
  "DELETE",
329
352
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
330
353
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
331
- headers=self._client_wrapper.get_headers(),
354
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
332
355
  timeout=60,
333
356
  )
334
357
  if 200 <= _response.status_code < 300:
@@ -349,6 +372,7 @@ class BetaClient:
349
372
  agent_slug: str,
350
373
  collection: typing.Optional[str] = OMIT,
351
374
  data: typing.Dict[str, typing.Any],
375
+ project_id: typing.Optional[str] = None,
352
376
  ) -> AgentData:
353
377
  """
354
378
  Create new agent data.
@@ -363,6 +387,8 @@ class BetaClient:
363
387
  - collection: typing.Optional[str].
364
388
 
365
389
  - data: typing.Dict[str, typing.Any].
390
+
391
+ - project_id: typing.Optional[str].
366
392
  ---
367
393
  from llama_cloud.client import LlamaCloud
368
394
 
@@ -382,7 +408,7 @@ class BetaClient:
382
408
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data"),
383
409
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
384
410
  json=jsonable_encoder(_request),
385
- headers=self._client_wrapper.get_headers(),
411
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
386
412
  timeout=60,
387
413
  )
388
414
  if 200 <= _response.status_code < 300:
@@ -408,6 +434,7 @@ class BetaClient:
408
434
  collection: typing.Optional[str] = OMIT,
409
435
  include_total: typing.Optional[bool] = OMIT,
410
436
  offset: typing.Optional[int] = OMIT,
437
+ project_id: typing.Optional[str] = None,
411
438
  ) -> PaginatedResponseAgentData:
412
439
  """
413
440
  Search agent data with filtering, sorting, and pagination.
@@ -432,6 +459,8 @@ class BetaClient:
432
459
  - include_total: typing.Optional[bool]. Whether to include the total number of items in the response
433
460
 
434
461
  - offset: typing.Optional[int].
462
+
463
+ - project_id: typing.Optional[str].
435
464
  ---
436
465
  from llama_cloud.client import LlamaCloud
437
466
 
@@ -462,7 +491,7 @@ class BetaClient:
462
491
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:search"),
463
492
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
464
493
  json=jsonable_encoder(_request),
465
- headers=self._client_wrapper.get_headers(),
494
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
466
495
  timeout=60,
467
496
  )
468
497
  if 200 <= _response.status_code < 300:
@@ -490,6 +519,7 @@ class BetaClient:
490
519
  count: typing.Optional[bool] = OMIT,
491
520
  first: typing.Optional[bool] = OMIT,
492
521
  offset: typing.Optional[int] = OMIT,
522
+ project_id: typing.Optional[str] = None,
493
523
  ) -> PaginatedResponseAggregateGroup:
494
524
  """
495
525
  Aggregate agent data with grouping and optional counting/first item retrieval.
@@ -518,6 +548,8 @@ class BetaClient:
518
548
  - first: typing.Optional[bool].
519
549
 
520
550
  - offset: typing.Optional[int].
551
+
552
+ - project_id: typing.Optional[str].
521
553
  ---
522
554
  from llama_cloud.client import LlamaCloud
523
555
 
@@ -552,7 +584,7 @@ class BetaClient:
552
584
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:aggregate"),
553
585
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
554
586
  json=jsonable_encoder(_request),
555
- headers=self._client_wrapper.get_headers(),
587
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
556
588
  timeout=60,
557
589
  )
558
590
  if 200 <= _response.status_code < 300:
@@ -620,6 +652,7 @@ class BetaClient:
620
652
  project_id: typing.Optional[str] = None,
621
653
  organization_id: typing.Optional[str] = None,
622
654
  request: FileCreate,
655
+ project_id: typing.Optional[str] = None,
623
656
  ) -> File:
624
657
  """
625
658
  Create a new file in the project.
@@ -638,6 +671,8 @@ class BetaClient:
638
671
  - organization_id: typing.Optional[str].
639
672
 
640
673
  - request: FileCreate.
674
+
675
+ - project_id: typing.Optional[str].
641
676
  ---
642
677
  from llama_cloud import FileCreate
643
678
  from llama_cloud.client import LlamaCloud
@@ -656,7 +691,7 @@ class BetaClient:
656
691
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
657
692
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
658
693
  json=jsonable_encoder(request),
659
- headers=self._client_wrapper.get_headers(),
694
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
660
695
  timeout=60,
661
696
  )
662
697
  if 200 <= _response.status_code < 300:
@@ -675,6 +710,7 @@ class BetaClient:
675
710
  project_id: typing.Optional[str] = None,
676
711
  organization_id: typing.Optional[str] = None,
677
712
  request: FileCreate,
713
+ project_id: typing.Optional[str] = None,
678
714
  ) -> File:
679
715
  """
680
716
  Upsert a file (create or update if exists) in the project.
@@ -693,6 +729,8 @@ class BetaClient:
693
729
  - organization_id: typing.Optional[str].
694
730
 
695
731
  - request: FileCreate.
732
+
733
+ - project_id: typing.Optional[str].
696
734
  ---
697
735
  from llama_cloud import FileCreate
698
736
  from llama_cloud.client import LlamaCloud
@@ -711,7 +749,7 @@ class BetaClient:
711
749
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
712
750
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
713
751
  json=jsonable_encoder(request),
714
- headers=self._client_wrapper.get_headers(),
752
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
715
753
  timeout=60,
716
754
  )
717
755
  if 200 <= _response.status_code < 300:
@@ -733,6 +771,7 @@ class BetaClient:
733
771
  page_token: typing.Optional[str] = OMIT,
734
772
  filter: typing.Optional[FileFilter] = OMIT,
735
773
  order_by: typing.Optional[str] = OMIT,
774
+ project_id: typing.Optional[str] = None,
736
775
  ) -> FileQueryResponse:
737
776
  """
738
777
  Query files with flexible filtering and pagination.
@@ -757,6 +796,8 @@ class BetaClient:
757
796
  - filter: typing.Optional[FileFilter].
758
797
 
759
798
  - order_by: typing.Optional[str].
799
+
800
+ - project_id: typing.Optional[str].
760
801
  ---
761
802
  from llama_cloud import FileFilter
762
803
  from llama_cloud.client import LlamaCloud
@@ -782,7 +823,7 @@ class BetaClient:
782
823
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files/query"),
783
824
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
784
825
  json=jsonable_encoder(_request),
785
- headers=self._client_wrapper.get_headers(),
826
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
786
827
  timeout=60,
787
828
  )
788
829
  if 200 <= _response.status_code < 300:
@@ -796,7 +837,12 @@ class BetaClient:
796
837
  raise ApiError(status_code=_response.status_code, body=_response_json)
797
838
 
798
839
  def delete_file(
799
- self, file_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
840
+ self,
841
+ file_id: str,
842
+ *,
843
+ project_id: typing.Optional[str] = None,
844
+ organization_id: typing.Optional[str] = None,
845
+ project_id: typing.Optional[str] = None,
800
846
  ) -> None:
801
847
  """
802
848
  Delete a single file from the project.
@@ -815,6 +861,8 @@ class BetaClient:
815
861
  - project_id: typing.Optional[str].
816
862
 
817
863
  - organization_id: typing.Optional[str].
864
+
865
+ - project_id: typing.Optional[str].
818
866
  ---
819
867
  from llama_cloud.client import LlamaCloud
820
868
 
@@ -829,7 +877,7 @@ class BetaClient:
829
877
  "DELETE",
830
878
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/files/{file_id}"),
831
879
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
832
- headers=self._client_wrapper.get_headers(),
880
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
833
881
  timeout=60,
834
882
  )
835
883
  if 200 <= _response.status_code < 300:
@@ -854,6 +902,7 @@ class AsyncBetaClient:
854
902
  offset: typing.Optional[int] = None,
855
903
  project_id: typing.Optional[str] = None,
856
904
  organization_id: typing.Optional[str] = None,
905
+ project_id: typing.Optional[str] = None,
857
906
  ) -> BatchPaginatedList:
858
907
  """
859
908
  Parameters:
@@ -864,6 +913,8 @@ class AsyncBetaClient:
864
913
  - project_id: typing.Optional[str].
865
914
 
866
915
  - organization_id: typing.Optional[str].
916
+
917
+ - project_id: typing.Optional[str].
867
918
  ---
868
919
  from llama_cloud.client import AsyncLlamaCloud
869
920
 
@@ -878,7 +929,7 @@ class AsyncBetaClient:
878
929
  params=remove_none_from_dict(
879
930
  {"limit": limit, "offset": offset, "project_id": project_id, "organization_id": organization_id}
880
931
  ),
881
- headers=self._client_wrapper.get_headers(),
932
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
882
933
  timeout=60,
883
934
  )
884
935
  if 200 <= _response.status_code < 300:
@@ -905,6 +956,7 @@ class AsyncBetaClient:
905
956
  batch_create_project_id: str,
906
957
  external_id: str,
907
958
  completion_window: typing.Optional[int] = OMIT,
959
+ project_id: typing.Optional[str] = None,
908
960
  ) -> Batch:
909
961
  """
910
962
  Parameters:
@@ -929,6 +981,8 @@ class AsyncBetaClient:
929
981
  - external_id: str. A developer-provided ID for the batch. This ID will be returned in the response.
930
982
 
931
983
  - completion_window: typing.Optional[int]. The time frame within which the batch should be processed. Currently only 24h is supported.
984
+
985
+ - project_id: typing.Optional[str].
932
986
  ---
933
987
  from llama_cloud import (
934
988
  FailPageMode,
@@ -974,7 +1028,7 @@ class AsyncBetaClient:
974
1028
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/batches"),
975
1029
  params=remove_none_from_dict({"organization_id": organization_id, "project_id": project_id}),
976
1030
  json=jsonable_encoder(_request),
977
- headers=self._client_wrapper.get_headers(),
1031
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
978
1032
  timeout=60,
979
1033
  )
980
1034
  if 200 <= _response.status_code < 300:
@@ -1021,7 +1075,12 @@ class AsyncBetaClient:
1021
1075
  raise ApiError(status_code=_response.status_code, body=_response_json)
1022
1076
 
1023
1077
  async def get_agent_data(
1024
- self, item_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1078
+ self,
1079
+ item_id: str,
1080
+ *,
1081
+ project_id: typing.Optional[str] = None,
1082
+ organization_id: typing.Optional[str] = None,
1083
+ project_id: typing.Optional[str] = None,
1025
1084
  ) -> AgentData:
1026
1085
  """
1027
1086
  Get agent data by ID.
@@ -1032,6 +1091,8 @@ class AsyncBetaClient:
1032
1091
  - project_id: typing.Optional[str].
1033
1092
 
1034
1093
  - organization_id: typing.Optional[str].
1094
+
1095
+ - project_id: typing.Optional[str].
1035
1096
  ---
1036
1097
  from llama_cloud.client import AsyncLlamaCloud
1037
1098
 
@@ -1046,7 +1107,7 @@ class AsyncBetaClient:
1046
1107
  "GET",
1047
1108
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
1048
1109
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1049
- headers=self._client_wrapper.get_headers(),
1110
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1050
1111
  timeout=60,
1051
1112
  )
1052
1113
  if 200 <= _response.status_code < 300:
@@ -1066,6 +1127,7 @@ class AsyncBetaClient:
1066
1127
  project_id: typing.Optional[str] = None,
1067
1128
  organization_id: typing.Optional[str] = None,
1068
1129
  data: typing.Dict[str, typing.Any],
1130
+ project_id: typing.Optional[str] = None,
1069
1131
  ) -> AgentData:
1070
1132
  """
1071
1133
  Update agent data by ID (overwrites).
@@ -1078,6 +1140,8 @@ class AsyncBetaClient:
1078
1140
  - organization_id: typing.Optional[str].
1079
1141
 
1080
1142
  - data: typing.Dict[str, typing.Any].
1143
+
1144
+ - project_id: typing.Optional[str].
1081
1145
  ---
1082
1146
  from llama_cloud.client import AsyncLlamaCloud
1083
1147
 
@@ -1094,7 +1158,7 @@ class AsyncBetaClient:
1094
1158
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
1095
1159
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1096
1160
  json=jsonable_encoder({"data": data}),
1097
- headers=self._client_wrapper.get_headers(),
1161
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1098
1162
  timeout=60,
1099
1163
  )
1100
1164
  if 200 <= _response.status_code < 300:
@@ -1108,7 +1172,12 @@ class AsyncBetaClient:
1108
1172
  raise ApiError(status_code=_response.status_code, body=_response_json)
1109
1173
 
1110
1174
  async def delete_agent_data(
1111
- self, item_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1175
+ self,
1176
+ item_id: str,
1177
+ *,
1178
+ project_id: typing.Optional[str] = None,
1179
+ organization_id: typing.Optional[str] = None,
1180
+ project_id: typing.Optional[str] = None,
1112
1181
  ) -> typing.Dict[str, str]:
1113
1182
  """
1114
1183
  Delete agent data by ID.
@@ -1119,6 +1188,8 @@ class AsyncBetaClient:
1119
1188
  - project_id: typing.Optional[str].
1120
1189
 
1121
1190
  - organization_id: typing.Optional[str].
1191
+
1192
+ - project_id: typing.Optional[str].
1122
1193
  ---
1123
1194
  from llama_cloud.client import AsyncLlamaCloud
1124
1195
 
@@ -1133,7 +1204,7 @@ class AsyncBetaClient:
1133
1204
  "DELETE",
1134
1205
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
1135
1206
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1136
- headers=self._client_wrapper.get_headers(),
1207
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1137
1208
  timeout=60,
1138
1209
  )
1139
1210
  if 200 <= _response.status_code < 300:
@@ -1154,6 +1225,7 @@ class AsyncBetaClient:
1154
1225
  agent_slug: str,
1155
1226
  collection: typing.Optional[str] = OMIT,
1156
1227
  data: typing.Dict[str, typing.Any],
1228
+ project_id: typing.Optional[str] = None,
1157
1229
  ) -> AgentData:
1158
1230
  """
1159
1231
  Create new agent data.
@@ -1168,6 +1240,8 @@ class AsyncBetaClient:
1168
1240
  - collection: typing.Optional[str].
1169
1241
 
1170
1242
  - data: typing.Dict[str, typing.Any].
1243
+
1244
+ - project_id: typing.Optional[str].
1171
1245
  ---
1172
1246
  from llama_cloud.client import AsyncLlamaCloud
1173
1247
 
@@ -1187,7 +1261,7 @@ class AsyncBetaClient:
1187
1261
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data"),
1188
1262
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1189
1263
  json=jsonable_encoder(_request),
1190
- headers=self._client_wrapper.get_headers(),
1264
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1191
1265
  timeout=60,
1192
1266
  )
1193
1267
  if 200 <= _response.status_code < 300:
@@ -1213,6 +1287,7 @@ class AsyncBetaClient:
1213
1287
  collection: typing.Optional[str] = OMIT,
1214
1288
  include_total: typing.Optional[bool] = OMIT,
1215
1289
  offset: typing.Optional[int] = OMIT,
1290
+ project_id: typing.Optional[str] = None,
1216
1291
  ) -> PaginatedResponseAgentData:
1217
1292
  """
1218
1293
  Search agent data with filtering, sorting, and pagination.
@@ -1237,6 +1312,8 @@ class AsyncBetaClient:
1237
1312
  - include_total: typing.Optional[bool]. Whether to include the total number of items in the response
1238
1313
 
1239
1314
  - offset: typing.Optional[int].
1315
+
1316
+ - project_id: typing.Optional[str].
1240
1317
  ---
1241
1318
  from llama_cloud.client import AsyncLlamaCloud
1242
1319
 
@@ -1267,7 +1344,7 @@ class AsyncBetaClient:
1267
1344
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:search"),
1268
1345
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1269
1346
  json=jsonable_encoder(_request),
1270
- headers=self._client_wrapper.get_headers(),
1347
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1271
1348
  timeout=60,
1272
1349
  )
1273
1350
  if 200 <= _response.status_code < 300:
@@ -1295,6 +1372,7 @@ class AsyncBetaClient:
1295
1372
  count: typing.Optional[bool] = OMIT,
1296
1373
  first: typing.Optional[bool] = OMIT,
1297
1374
  offset: typing.Optional[int] = OMIT,
1375
+ project_id: typing.Optional[str] = None,
1298
1376
  ) -> PaginatedResponseAggregateGroup:
1299
1377
  """
1300
1378
  Aggregate agent data with grouping and optional counting/first item retrieval.
@@ -1323,6 +1401,8 @@ class AsyncBetaClient:
1323
1401
  - first: typing.Optional[bool].
1324
1402
 
1325
1403
  - offset: typing.Optional[int].
1404
+
1405
+ - project_id: typing.Optional[str].
1326
1406
  ---
1327
1407
  from llama_cloud.client import AsyncLlamaCloud
1328
1408
 
@@ -1357,7 +1437,7 @@ class AsyncBetaClient:
1357
1437
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:aggregate"),
1358
1438
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1359
1439
  json=jsonable_encoder(_request),
1360
- headers=self._client_wrapper.get_headers(),
1440
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1361
1441
  timeout=60,
1362
1442
  )
1363
1443
  if 200 <= _response.status_code < 300:
@@ -1425,6 +1505,7 @@ class AsyncBetaClient:
1425
1505
  project_id: typing.Optional[str] = None,
1426
1506
  organization_id: typing.Optional[str] = None,
1427
1507
  request: FileCreate,
1508
+ project_id: typing.Optional[str] = None,
1428
1509
  ) -> File:
1429
1510
  """
1430
1511
  Create a new file in the project.
@@ -1443,6 +1524,8 @@ class AsyncBetaClient:
1443
1524
  - organization_id: typing.Optional[str].
1444
1525
 
1445
1526
  - request: FileCreate.
1527
+
1528
+ - project_id: typing.Optional[str].
1446
1529
  ---
1447
1530
  from llama_cloud import FileCreate
1448
1531
  from llama_cloud.client import AsyncLlamaCloud
@@ -1461,7 +1544,7 @@ class AsyncBetaClient:
1461
1544
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
1462
1545
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1463
1546
  json=jsonable_encoder(request),
1464
- headers=self._client_wrapper.get_headers(),
1547
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1465
1548
  timeout=60,
1466
1549
  )
1467
1550
  if 200 <= _response.status_code < 300:
@@ -1480,6 +1563,7 @@ class AsyncBetaClient:
1480
1563
  project_id: typing.Optional[str] = None,
1481
1564
  organization_id: typing.Optional[str] = None,
1482
1565
  request: FileCreate,
1566
+ project_id: typing.Optional[str] = None,
1483
1567
  ) -> File:
1484
1568
  """
1485
1569
  Upsert a file (create or update if exists) in the project.
@@ -1498,6 +1582,8 @@ class AsyncBetaClient:
1498
1582
  - organization_id: typing.Optional[str].
1499
1583
 
1500
1584
  - request: FileCreate.
1585
+
1586
+ - project_id: typing.Optional[str].
1501
1587
  ---
1502
1588
  from llama_cloud import FileCreate
1503
1589
  from llama_cloud.client import AsyncLlamaCloud
@@ -1516,7 +1602,7 @@ class AsyncBetaClient:
1516
1602
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
1517
1603
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1518
1604
  json=jsonable_encoder(request),
1519
- headers=self._client_wrapper.get_headers(),
1605
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1520
1606
  timeout=60,
1521
1607
  )
1522
1608
  if 200 <= _response.status_code < 300:
@@ -1538,6 +1624,7 @@ class AsyncBetaClient:
1538
1624
  page_token: typing.Optional[str] = OMIT,
1539
1625
  filter: typing.Optional[FileFilter] = OMIT,
1540
1626
  order_by: typing.Optional[str] = OMIT,
1627
+ project_id: typing.Optional[str] = None,
1541
1628
  ) -> FileQueryResponse:
1542
1629
  """
1543
1630
  Query files with flexible filtering and pagination.
@@ -1562,6 +1649,8 @@ class AsyncBetaClient:
1562
1649
  - filter: typing.Optional[FileFilter].
1563
1650
 
1564
1651
  - order_by: typing.Optional[str].
1652
+
1653
+ - project_id: typing.Optional[str].
1565
1654
  ---
1566
1655
  from llama_cloud import FileFilter
1567
1656
  from llama_cloud.client import AsyncLlamaCloud
@@ -1587,7 +1676,7 @@ class AsyncBetaClient:
1587
1676
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files/query"),
1588
1677
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1589
1678
  json=jsonable_encoder(_request),
1590
- headers=self._client_wrapper.get_headers(),
1679
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1591
1680
  timeout=60,
1592
1681
  )
1593
1682
  if 200 <= _response.status_code < 300:
@@ -1601,7 +1690,12 @@ class AsyncBetaClient:
1601
1690
  raise ApiError(status_code=_response.status_code, body=_response_json)
1602
1691
 
1603
1692
  async def delete_file(
1604
- self, file_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1693
+ self,
1694
+ file_id: str,
1695
+ *,
1696
+ project_id: typing.Optional[str] = None,
1697
+ organization_id: typing.Optional[str] = None,
1698
+ project_id: typing.Optional[str] = None,
1605
1699
  ) -> None:
1606
1700
  """
1607
1701
  Delete a single file from the project.
@@ -1620,6 +1714,8 @@ class AsyncBetaClient:
1620
1714
  - project_id: typing.Optional[str].
1621
1715
 
1622
1716
  - organization_id: typing.Optional[str].
1717
+
1718
+ - project_id: typing.Optional[str].
1623
1719
  ---
1624
1720
  from llama_cloud.client import AsyncLlamaCloud
1625
1721
 
@@ -1634,7 +1730,7 @@ class AsyncBetaClient:
1634
1730
  "DELETE",
1635
1731
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/files/{file_id}"),
1636
1732
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1637
- headers=self._client_wrapper.get_headers(),
1733
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1638
1734
  timeout=60,
1639
1735
  )
1640
1736
  if 200 <= _response.status_code < 300: