llama-cloud 0.1.38__py3-none-any.whl → 0.1.39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

@@ -35,7 +35,6 @@ class AlphaClient:
35
35
  organization_id: typing.Optional[str] = None,
36
36
  configuration: str,
37
37
  file: typing.Optional[str] = OMIT,
38
- project_id: typing.Optional[str] = None,
39
38
  ) -> ParsingJob:
40
39
  """
41
40
  Parameters:
@@ -46,8 +45,6 @@ class AlphaClient:
46
45
  - configuration: str.
47
46
 
48
47
  - file: typing.Optional[str].
49
-
50
- - project_id: typing.Optional[str].
51
48
  """
52
49
  _request: typing.Dict[str, typing.Any] = {"configuration": configuration}
53
50
  if file is not OMIT:
@@ -57,7 +54,7 @@ class AlphaClient:
57
54
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v2alpha1/parse/upload"),
58
55
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
59
56
  json=jsonable_encoder(_request),
60
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
57
+ headers=self._client_wrapper.get_headers(),
61
58
  timeout=60,
62
59
  )
63
60
  if 200 <= _response.status_code < 300:
@@ -82,7 +79,6 @@ class AsyncAlphaClient:
82
79
  organization_id: typing.Optional[str] = None,
83
80
  configuration: str,
84
81
  file: typing.Optional[str] = OMIT,
85
- project_id: typing.Optional[str] = None,
86
82
  ) -> ParsingJob:
87
83
  """
88
84
  Parameters:
@@ -93,8 +89,6 @@ class AsyncAlphaClient:
93
89
  - configuration: str.
94
90
 
95
91
  - file: typing.Optional[str].
96
-
97
- - project_id: typing.Optional[str].
98
92
  """
99
93
  _request: typing.Dict[str, typing.Any] = {"configuration": configuration}
100
94
  if file is not OMIT:
@@ -104,7 +98,7 @@ class AsyncAlphaClient:
104
98
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v2alpha1/parse/upload"),
105
99
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
106
100
  json=jsonable_encoder(_request),
107
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
101
+ headers=self._client_wrapper.get_headers(),
108
102
  timeout=60,
109
103
  )
110
104
  if 200 <= _response.status_code < 300:
@@ -49,7 +49,6 @@ class BetaClient:
49
49
  offset: typing.Optional[int] = None,
50
50
  project_id: typing.Optional[str] = None,
51
51
  organization_id: typing.Optional[str] = None,
52
- project_id: typing.Optional[str] = None,
53
52
  ) -> BatchPaginatedList:
54
53
  """
55
54
  Parameters:
@@ -60,8 +59,6 @@ class BetaClient:
60
59
  - project_id: typing.Optional[str].
61
60
 
62
61
  - organization_id: typing.Optional[str].
63
-
64
- - project_id: typing.Optional[str].
65
62
  ---
66
63
  from llama_cloud.client import LlamaCloud
67
64
 
@@ -76,7 +73,7 @@ class BetaClient:
76
73
  params=remove_none_from_dict(
77
74
  {"limit": limit, "offset": offset, "project_id": project_id, "organization_id": organization_id}
78
75
  ),
79
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
76
+ headers=self._client_wrapper.get_headers(),
80
77
  timeout=60,
81
78
  )
82
79
  if 200 <= _response.status_code < 300:
@@ -103,7 +100,6 @@ class BetaClient:
103
100
  batch_create_project_id: str,
104
101
  external_id: str,
105
102
  completion_window: typing.Optional[int] = OMIT,
106
- project_id: typing.Optional[str] = None,
107
103
  ) -> Batch:
108
104
  """
109
105
  Parameters:
@@ -128,8 +124,6 @@ class BetaClient:
128
124
  - external_id: str. A developer-provided ID for the batch. This ID will be returned in the response.
129
125
 
130
126
  - completion_window: typing.Optional[int]. The time frame within which the batch should be processed. Currently only 24h is supported.
131
-
132
- - project_id: typing.Optional[str].
133
127
  ---
134
128
  from llama_cloud import (
135
129
  FailPageMode,
@@ -175,7 +169,7 @@ class BetaClient:
175
169
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/batches"),
176
170
  params=remove_none_from_dict({"organization_id": organization_id, "project_id": project_id}),
177
171
  json=jsonable_encoder(_request),
178
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
172
+ headers=self._client_wrapper.get_headers(),
179
173
  timeout=60,
180
174
  )
181
175
  if 200 <= _response.status_code < 300:
@@ -222,12 +216,7 @@ class BetaClient:
222
216
  raise ApiError(status_code=_response.status_code, body=_response_json)
223
217
 
224
218
  def get_agent_data(
225
- self,
226
- item_id: str,
227
- *,
228
- project_id: typing.Optional[str] = None,
229
- organization_id: typing.Optional[str] = None,
230
- project_id: typing.Optional[str] = None,
219
+ self, item_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
231
220
  ) -> AgentData:
232
221
  """
233
222
  Get agent data by ID.
@@ -238,8 +227,6 @@ class BetaClient:
238
227
  - project_id: typing.Optional[str].
239
228
 
240
229
  - organization_id: typing.Optional[str].
241
-
242
- - project_id: typing.Optional[str].
243
230
  ---
244
231
  from llama_cloud.client import LlamaCloud
245
232
 
@@ -254,7 +241,7 @@ class BetaClient:
254
241
  "GET",
255
242
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
256
243
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
257
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
244
+ headers=self._client_wrapper.get_headers(),
258
245
  timeout=60,
259
246
  )
260
247
  if 200 <= _response.status_code < 300:
@@ -274,7 +261,6 @@ class BetaClient:
274
261
  project_id: typing.Optional[str] = None,
275
262
  organization_id: typing.Optional[str] = None,
276
263
  data: typing.Dict[str, typing.Any],
277
- project_id: typing.Optional[str] = None,
278
264
  ) -> AgentData:
279
265
  """
280
266
  Update agent data by ID (overwrites).
@@ -287,8 +273,6 @@ class BetaClient:
287
273
  - organization_id: typing.Optional[str].
288
274
 
289
275
  - data: typing.Dict[str, typing.Any].
290
-
291
- - project_id: typing.Optional[str].
292
276
  ---
293
277
  from llama_cloud.client import LlamaCloud
294
278
 
@@ -305,7 +289,7 @@ class BetaClient:
305
289
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
306
290
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
307
291
  json=jsonable_encoder({"data": data}),
308
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
292
+ headers=self._client_wrapper.get_headers(),
309
293
  timeout=60,
310
294
  )
311
295
  if 200 <= _response.status_code < 300:
@@ -319,12 +303,7 @@ class BetaClient:
319
303
  raise ApiError(status_code=_response.status_code, body=_response_json)
320
304
 
321
305
  def delete_agent_data(
322
- self,
323
- item_id: str,
324
- *,
325
- project_id: typing.Optional[str] = None,
326
- organization_id: typing.Optional[str] = None,
327
- project_id: typing.Optional[str] = None,
306
+ self, item_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
328
307
  ) -> typing.Dict[str, str]:
329
308
  """
330
309
  Delete agent data by ID.
@@ -335,8 +314,6 @@ class BetaClient:
335
314
  - project_id: typing.Optional[str].
336
315
 
337
316
  - organization_id: typing.Optional[str].
338
-
339
- - project_id: typing.Optional[str].
340
317
  ---
341
318
  from llama_cloud.client import LlamaCloud
342
319
 
@@ -351,7 +328,7 @@ class BetaClient:
351
328
  "DELETE",
352
329
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
353
330
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
354
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
331
+ headers=self._client_wrapper.get_headers(),
355
332
  timeout=60,
356
333
  )
357
334
  if 200 <= _response.status_code < 300:
@@ -372,7 +349,6 @@ class BetaClient:
372
349
  agent_slug: str,
373
350
  collection: typing.Optional[str] = OMIT,
374
351
  data: typing.Dict[str, typing.Any],
375
- project_id: typing.Optional[str] = None,
376
352
  ) -> AgentData:
377
353
  """
378
354
  Create new agent data.
@@ -387,8 +363,6 @@ class BetaClient:
387
363
  - collection: typing.Optional[str].
388
364
 
389
365
  - data: typing.Dict[str, typing.Any].
390
-
391
- - project_id: typing.Optional[str].
392
366
  ---
393
367
  from llama_cloud.client import LlamaCloud
394
368
 
@@ -408,7 +382,7 @@ class BetaClient:
408
382
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data"),
409
383
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
410
384
  json=jsonable_encoder(_request),
411
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
385
+ headers=self._client_wrapper.get_headers(),
412
386
  timeout=60,
413
387
  )
414
388
  if 200 <= _response.status_code < 300:
@@ -434,7 +408,6 @@ class BetaClient:
434
408
  collection: typing.Optional[str] = OMIT,
435
409
  include_total: typing.Optional[bool] = OMIT,
436
410
  offset: typing.Optional[int] = OMIT,
437
- project_id: typing.Optional[str] = None,
438
411
  ) -> PaginatedResponseAgentData:
439
412
  """
440
413
  Search agent data with filtering, sorting, and pagination.
@@ -459,8 +432,6 @@ class BetaClient:
459
432
  - include_total: typing.Optional[bool]. Whether to include the total number of items in the response
460
433
 
461
434
  - offset: typing.Optional[int].
462
-
463
- - project_id: typing.Optional[str].
464
435
  ---
465
436
  from llama_cloud.client import LlamaCloud
466
437
 
@@ -491,7 +462,7 @@ class BetaClient:
491
462
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:search"),
492
463
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
493
464
  json=jsonable_encoder(_request),
494
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
465
+ headers=self._client_wrapper.get_headers(),
495
466
  timeout=60,
496
467
  )
497
468
  if 200 <= _response.status_code < 300:
@@ -519,7 +490,6 @@ class BetaClient:
519
490
  count: typing.Optional[bool] = OMIT,
520
491
  first: typing.Optional[bool] = OMIT,
521
492
  offset: typing.Optional[int] = OMIT,
522
- project_id: typing.Optional[str] = None,
523
493
  ) -> PaginatedResponseAggregateGroup:
524
494
  """
525
495
  Aggregate agent data with grouping and optional counting/first item retrieval.
@@ -548,8 +518,6 @@ class BetaClient:
548
518
  - first: typing.Optional[bool].
549
519
 
550
520
  - offset: typing.Optional[int].
551
-
552
- - project_id: typing.Optional[str].
553
521
  ---
554
522
  from llama_cloud.client import LlamaCloud
555
523
 
@@ -584,7 +552,7 @@ class BetaClient:
584
552
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:aggregate"),
585
553
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
586
554
  json=jsonable_encoder(_request),
587
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
555
+ headers=self._client_wrapper.get_headers(),
588
556
  timeout=60,
589
557
  )
590
558
  if 200 <= _response.status_code < 300:
@@ -652,7 +620,6 @@ class BetaClient:
652
620
  project_id: typing.Optional[str] = None,
653
621
  organization_id: typing.Optional[str] = None,
654
622
  request: FileCreate,
655
- project_id: typing.Optional[str] = None,
656
623
  ) -> File:
657
624
  """
658
625
  Create a new file in the project.
@@ -671,8 +638,6 @@ class BetaClient:
671
638
  - organization_id: typing.Optional[str].
672
639
 
673
640
  - request: FileCreate.
674
-
675
- - project_id: typing.Optional[str].
676
641
  ---
677
642
  from llama_cloud import FileCreate
678
643
  from llama_cloud.client import LlamaCloud
@@ -691,7 +656,7 @@ class BetaClient:
691
656
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
692
657
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
693
658
  json=jsonable_encoder(request),
694
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
659
+ headers=self._client_wrapper.get_headers(),
695
660
  timeout=60,
696
661
  )
697
662
  if 200 <= _response.status_code < 300:
@@ -710,7 +675,6 @@ class BetaClient:
710
675
  project_id: typing.Optional[str] = None,
711
676
  organization_id: typing.Optional[str] = None,
712
677
  request: FileCreate,
713
- project_id: typing.Optional[str] = None,
714
678
  ) -> File:
715
679
  """
716
680
  Upsert a file (create or update if exists) in the project.
@@ -729,8 +693,6 @@ class BetaClient:
729
693
  - organization_id: typing.Optional[str].
730
694
 
731
695
  - request: FileCreate.
732
-
733
- - project_id: typing.Optional[str].
734
696
  ---
735
697
  from llama_cloud import FileCreate
736
698
  from llama_cloud.client import LlamaCloud
@@ -749,7 +711,7 @@ class BetaClient:
749
711
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
750
712
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
751
713
  json=jsonable_encoder(request),
752
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
714
+ headers=self._client_wrapper.get_headers(),
753
715
  timeout=60,
754
716
  )
755
717
  if 200 <= _response.status_code < 300:
@@ -771,7 +733,6 @@ class BetaClient:
771
733
  page_token: typing.Optional[str] = OMIT,
772
734
  filter: typing.Optional[FileFilter] = OMIT,
773
735
  order_by: typing.Optional[str] = OMIT,
774
- project_id: typing.Optional[str] = None,
775
736
  ) -> FileQueryResponse:
776
737
  """
777
738
  Query files with flexible filtering and pagination.
@@ -796,8 +757,6 @@ class BetaClient:
796
757
  - filter: typing.Optional[FileFilter].
797
758
 
798
759
  - order_by: typing.Optional[str].
799
-
800
- - project_id: typing.Optional[str].
801
760
  ---
802
761
  from llama_cloud import FileFilter
803
762
  from llama_cloud.client import LlamaCloud
@@ -823,7 +782,7 @@ class BetaClient:
823
782
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files/query"),
824
783
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
825
784
  json=jsonable_encoder(_request),
826
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
785
+ headers=self._client_wrapper.get_headers(),
827
786
  timeout=60,
828
787
  )
829
788
  if 200 <= _response.status_code < 300:
@@ -837,12 +796,7 @@ class BetaClient:
837
796
  raise ApiError(status_code=_response.status_code, body=_response_json)
838
797
 
839
798
  def delete_file(
840
- self,
841
- file_id: str,
842
- *,
843
- project_id: typing.Optional[str] = None,
844
- organization_id: typing.Optional[str] = None,
845
- project_id: typing.Optional[str] = None,
799
+ self, file_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
846
800
  ) -> None:
847
801
  """
848
802
  Delete a single file from the project.
@@ -861,8 +815,6 @@ class BetaClient:
861
815
  - project_id: typing.Optional[str].
862
816
 
863
817
  - organization_id: typing.Optional[str].
864
-
865
- - project_id: typing.Optional[str].
866
818
  ---
867
819
  from llama_cloud.client import LlamaCloud
868
820
 
@@ -877,7 +829,7 @@ class BetaClient:
877
829
  "DELETE",
878
830
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/files/{file_id}"),
879
831
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
880
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
832
+ headers=self._client_wrapper.get_headers(),
881
833
  timeout=60,
882
834
  )
883
835
  if 200 <= _response.status_code < 300:
@@ -902,7 +854,6 @@ class AsyncBetaClient:
902
854
  offset: typing.Optional[int] = None,
903
855
  project_id: typing.Optional[str] = None,
904
856
  organization_id: typing.Optional[str] = None,
905
- project_id: typing.Optional[str] = None,
906
857
  ) -> BatchPaginatedList:
907
858
  """
908
859
  Parameters:
@@ -913,8 +864,6 @@ class AsyncBetaClient:
913
864
  - project_id: typing.Optional[str].
914
865
 
915
866
  - organization_id: typing.Optional[str].
916
-
917
- - project_id: typing.Optional[str].
918
867
  ---
919
868
  from llama_cloud.client import AsyncLlamaCloud
920
869
 
@@ -929,7 +878,7 @@ class AsyncBetaClient:
929
878
  params=remove_none_from_dict(
930
879
  {"limit": limit, "offset": offset, "project_id": project_id, "organization_id": organization_id}
931
880
  ),
932
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
881
+ headers=self._client_wrapper.get_headers(),
933
882
  timeout=60,
934
883
  )
935
884
  if 200 <= _response.status_code < 300:
@@ -956,7 +905,6 @@ class AsyncBetaClient:
956
905
  batch_create_project_id: str,
957
906
  external_id: str,
958
907
  completion_window: typing.Optional[int] = OMIT,
959
- project_id: typing.Optional[str] = None,
960
908
  ) -> Batch:
961
909
  """
962
910
  Parameters:
@@ -981,8 +929,6 @@ class AsyncBetaClient:
981
929
  - external_id: str. A developer-provided ID for the batch. This ID will be returned in the response.
982
930
 
983
931
  - completion_window: typing.Optional[int]. The time frame within which the batch should be processed. Currently only 24h is supported.
984
-
985
- - project_id: typing.Optional[str].
986
932
  ---
987
933
  from llama_cloud import (
988
934
  FailPageMode,
@@ -1028,7 +974,7 @@ class AsyncBetaClient:
1028
974
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/batches"),
1029
975
  params=remove_none_from_dict({"organization_id": organization_id, "project_id": project_id}),
1030
976
  json=jsonable_encoder(_request),
1031
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
977
+ headers=self._client_wrapper.get_headers(),
1032
978
  timeout=60,
1033
979
  )
1034
980
  if 200 <= _response.status_code < 300:
@@ -1075,12 +1021,7 @@ class AsyncBetaClient:
1075
1021
  raise ApiError(status_code=_response.status_code, body=_response_json)
1076
1022
 
1077
1023
  async def get_agent_data(
1078
- self,
1079
- item_id: str,
1080
- *,
1081
- project_id: typing.Optional[str] = None,
1082
- organization_id: typing.Optional[str] = None,
1083
- project_id: typing.Optional[str] = None,
1024
+ self, item_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1084
1025
  ) -> AgentData:
1085
1026
  """
1086
1027
  Get agent data by ID.
@@ -1091,8 +1032,6 @@ class AsyncBetaClient:
1091
1032
  - project_id: typing.Optional[str].
1092
1033
 
1093
1034
  - organization_id: typing.Optional[str].
1094
-
1095
- - project_id: typing.Optional[str].
1096
1035
  ---
1097
1036
  from llama_cloud.client import AsyncLlamaCloud
1098
1037
 
@@ -1107,7 +1046,7 @@ class AsyncBetaClient:
1107
1046
  "GET",
1108
1047
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
1109
1048
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1110
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1049
+ headers=self._client_wrapper.get_headers(),
1111
1050
  timeout=60,
1112
1051
  )
1113
1052
  if 200 <= _response.status_code < 300:
@@ -1127,7 +1066,6 @@ class AsyncBetaClient:
1127
1066
  project_id: typing.Optional[str] = None,
1128
1067
  organization_id: typing.Optional[str] = None,
1129
1068
  data: typing.Dict[str, typing.Any],
1130
- project_id: typing.Optional[str] = None,
1131
1069
  ) -> AgentData:
1132
1070
  """
1133
1071
  Update agent data by ID (overwrites).
@@ -1140,8 +1078,6 @@ class AsyncBetaClient:
1140
1078
  - organization_id: typing.Optional[str].
1141
1079
 
1142
1080
  - data: typing.Dict[str, typing.Any].
1143
-
1144
- - project_id: typing.Optional[str].
1145
1081
  ---
1146
1082
  from llama_cloud.client import AsyncLlamaCloud
1147
1083
 
@@ -1158,7 +1094,7 @@ class AsyncBetaClient:
1158
1094
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
1159
1095
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1160
1096
  json=jsonable_encoder({"data": data}),
1161
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1097
+ headers=self._client_wrapper.get_headers(),
1162
1098
  timeout=60,
1163
1099
  )
1164
1100
  if 200 <= _response.status_code < 300:
@@ -1172,12 +1108,7 @@ class AsyncBetaClient:
1172
1108
  raise ApiError(status_code=_response.status_code, body=_response_json)
1173
1109
 
1174
1110
  async def delete_agent_data(
1175
- self,
1176
- item_id: str,
1177
- *,
1178
- project_id: typing.Optional[str] = None,
1179
- organization_id: typing.Optional[str] = None,
1180
- project_id: typing.Optional[str] = None,
1111
+ self, item_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1181
1112
  ) -> typing.Dict[str, str]:
1182
1113
  """
1183
1114
  Delete agent data by ID.
@@ -1188,8 +1119,6 @@ class AsyncBetaClient:
1188
1119
  - project_id: typing.Optional[str].
1189
1120
 
1190
1121
  - organization_id: typing.Optional[str].
1191
-
1192
- - project_id: typing.Optional[str].
1193
1122
  ---
1194
1123
  from llama_cloud.client import AsyncLlamaCloud
1195
1124
 
@@ -1204,7 +1133,7 @@ class AsyncBetaClient:
1204
1133
  "DELETE",
1205
1134
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
1206
1135
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1207
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1136
+ headers=self._client_wrapper.get_headers(),
1208
1137
  timeout=60,
1209
1138
  )
1210
1139
  if 200 <= _response.status_code < 300:
@@ -1225,7 +1154,6 @@ class AsyncBetaClient:
1225
1154
  agent_slug: str,
1226
1155
  collection: typing.Optional[str] = OMIT,
1227
1156
  data: typing.Dict[str, typing.Any],
1228
- project_id: typing.Optional[str] = None,
1229
1157
  ) -> AgentData:
1230
1158
  """
1231
1159
  Create new agent data.
@@ -1240,8 +1168,6 @@ class AsyncBetaClient:
1240
1168
  - collection: typing.Optional[str].
1241
1169
 
1242
1170
  - data: typing.Dict[str, typing.Any].
1243
-
1244
- - project_id: typing.Optional[str].
1245
1171
  ---
1246
1172
  from llama_cloud.client import AsyncLlamaCloud
1247
1173
 
@@ -1261,7 +1187,7 @@ class AsyncBetaClient:
1261
1187
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data"),
1262
1188
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1263
1189
  json=jsonable_encoder(_request),
1264
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1190
+ headers=self._client_wrapper.get_headers(),
1265
1191
  timeout=60,
1266
1192
  )
1267
1193
  if 200 <= _response.status_code < 300:
@@ -1287,7 +1213,6 @@ class AsyncBetaClient:
1287
1213
  collection: typing.Optional[str] = OMIT,
1288
1214
  include_total: typing.Optional[bool] = OMIT,
1289
1215
  offset: typing.Optional[int] = OMIT,
1290
- project_id: typing.Optional[str] = None,
1291
1216
  ) -> PaginatedResponseAgentData:
1292
1217
  """
1293
1218
  Search agent data with filtering, sorting, and pagination.
@@ -1312,8 +1237,6 @@ class AsyncBetaClient:
1312
1237
  - include_total: typing.Optional[bool]. Whether to include the total number of items in the response
1313
1238
 
1314
1239
  - offset: typing.Optional[int].
1315
-
1316
- - project_id: typing.Optional[str].
1317
1240
  ---
1318
1241
  from llama_cloud.client import AsyncLlamaCloud
1319
1242
 
@@ -1344,7 +1267,7 @@ class AsyncBetaClient:
1344
1267
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:search"),
1345
1268
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1346
1269
  json=jsonable_encoder(_request),
1347
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1270
+ headers=self._client_wrapper.get_headers(),
1348
1271
  timeout=60,
1349
1272
  )
1350
1273
  if 200 <= _response.status_code < 300:
@@ -1372,7 +1295,6 @@ class AsyncBetaClient:
1372
1295
  count: typing.Optional[bool] = OMIT,
1373
1296
  first: typing.Optional[bool] = OMIT,
1374
1297
  offset: typing.Optional[int] = OMIT,
1375
- project_id: typing.Optional[str] = None,
1376
1298
  ) -> PaginatedResponseAggregateGroup:
1377
1299
  """
1378
1300
  Aggregate agent data with grouping and optional counting/first item retrieval.
@@ -1401,8 +1323,6 @@ class AsyncBetaClient:
1401
1323
  - first: typing.Optional[bool].
1402
1324
 
1403
1325
  - offset: typing.Optional[int].
1404
-
1405
- - project_id: typing.Optional[str].
1406
1326
  ---
1407
1327
  from llama_cloud.client import AsyncLlamaCloud
1408
1328
 
@@ -1437,7 +1357,7 @@ class AsyncBetaClient:
1437
1357
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:aggregate"),
1438
1358
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1439
1359
  json=jsonable_encoder(_request),
1440
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1360
+ headers=self._client_wrapper.get_headers(),
1441
1361
  timeout=60,
1442
1362
  )
1443
1363
  if 200 <= _response.status_code < 300:
@@ -1505,7 +1425,6 @@ class AsyncBetaClient:
1505
1425
  project_id: typing.Optional[str] = None,
1506
1426
  organization_id: typing.Optional[str] = None,
1507
1427
  request: FileCreate,
1508
- project_id: typing.Optional[str] = None,
1509
1428
  ) -> File:
1510
1429
  """
1511
1430
  Create a new file in the project.
@@ -1524,8 +1443,6 @@ class AsyncBetaClient:
1524
1443
  - organization_id: typing.Optional[str].
1525
1444
 
1526
1445
  - request: FileCreate.
1527
-
1528
- - project_id: typing.Optional[str].
1529
1446
  ---
1530
1447
  from llama_cloud import FileCreate
1531
1448
  from llama_cloud.client import AsyncLlamaCloud
@@ -1544,7 +1461,7 @@ class AsyncBetaClient:
1544
1461
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
1545
1462
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1546
1463
  json=jsonable_encoder(request),
1547
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1464
+ headers=self._client_wrapper.get_headers(),
1548
1465
  timeout=60,
1549
1466
  )
1550
1467
  if 200 <= _response.status_code < 300:
@@ -1563,7 +1480,6 @@ class AsyncBetaClient:
1563
1480
  project_id: typing.Optional[str] = None,
1564
1481
  organization_id: typing.Optional[str] = None,
1565
1482
  request: FileCreate,
1566
- project_id: typing.Optional[str] = None,
1567
1483
  ) -> File:
1568
1484
  """
1569
1485
  Upsert a file (create or update if exists) in the project.
@@ -1582,8 +1498,6 @@ class AsyncBetaClient:
1582
1498
  - organization_id: typing.Optional[str].
1583
1499
 
1584
1500
  - request: FileCreate.
1585
-
1586
- - project_id: typing.Optional[str].
1587
1501
  ---
1588
1502
  from llama_cloud import FileCreate
1589
1503
  from llama_cloud.client import AsyncLlamaCloud
@@ -1602,7 +1516,7 @@ class AsyncBetaClient:
1602
1516
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
1603
1517
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1604
1518
  json=jsonable_encoder(request),
1605
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1519
+ headers=self._client_wrapper.get_headers(),
1606
1520
  timeout=60,
1607
1521
  )
1608
1522
  if 200 <= _response.status_code < 300:
@@ -1624,7 +1538,6 @@ class AsyncBetaClient:
1624
1538
  page_token: typing.Optional[str] = OMIT,
1625
1539
  filter: typing.Optional[FileFilter] = OMIT,
1626
1540
  order_by: typing.Optional[str] = OMIT,
1627
- project_id: typing.Optional[str] = None,
1628
1541
  ) -> FileQueryResponse:
1629
1542
  """
1630
1543
  Query files with flexible filtering and pagination.
@@ -1649,8 +1562,6 @@ class AsyncBetaClient:
1649
1562
  - filter: typing.Optional[FileFilter].
1650
1563
 
1651
1564
  - order_by: typing.Optional[str].
1652
-
1653
- - project_id: typing.Optional[str].
1654
1565
  ---
1655
1566
  from llama_cloud import FileFilter
1656
1567
  from llama_cloud.client import AsyncLlamaCloud
@@ -1676,7 +1587,7 @@ class AsyncBetaClient:
1676
1587
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files/query"),
1677
1588
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1678
1589
  json=jsonable_encoder(_request),
1679
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1590
+ headers=self._client_wrapper.get_headers(),
1680
1591
  timeout=60,
1681
1592
  )
1682
1593
  if 200 <= _response.status_code < 300:
@@ -1690,12 +1601,7 @@ class AsyncBetaClient:
1690
1601
  raise ApiError(status_code=_response.status_code, body=_response_json)
1691
1602
 
1692
1603
  async def delete_file(
1693
- self,
1694
- file_id: str,
1695
- *,
1696
- project_id: typing.Optional[str] = None,
1697
- organization_id: typing.Optional[str] = None,
1698
- project_id: typing.Optional[str] = None,
1604
+ self, file_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1699
1605
  ) -> None:
1700
1606
  """
1701
1607
  Delete a single file from the project.
@@ -1714,8 +1620,6 @@ class AsyncBetaClient:
1714
1620
  - project_id: typing.Optional[str].
1715
1621
 
1716
1622
  - organization_id: typing.Optional[str].
1717
-
1718
- - project_id: typing.Optional[str].
1719
1623
  ---
1720
1624
  from llama_cloud.client import AsyncLlamaCloud
1721
1625
 
@@ -1730,7 +1634,7 @@ class AsyncBetaClient:
1730
1634
  "DELETE",
1731
1635
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/files/{file_id}"),
1732
1636
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1733
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1637
+ headers=self._client_wrapper.get_headers(),
1734
1638
  timeout=60,
1735
1639
  )
1736
1640
  if 200 <= _response.status_code < 300: