llama-cloud 0.1.38__py3-none-any.whl → 0.1.39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

@@ -35,12 +35,7 @@ class FilesClient:
35
35
  self._client_wrapper = client_wrapper
36
36
 
37
37
  def get_file(
38
- self,
39
- id: str,
40
- *,
41
- project_id: typing.Optional[str] = None,
42
- organization_id: typing.Optional[str] = None,
43
- project_id: typing.Optional[str] = None,
38
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
44
39
  ) -> File:
45
40
  """
46
41
  Read File metadata objects.
@@ -51,8 +46,6 @@ class FilesClient:
51
46
  - project_id: typing.Optional[str].
52
47
 
53
48
  - organization_id: typing.Optional[str].
54
-
55
- - project_id: typing.Optional[str].
56
49
  ---
57
50
  from llama_cloud.client import LlamaCloud
58
51
 
@@ -67,7 +60,7 @@ class FilesClient:
67
60
  "GET",
68
61
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
69
62
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
70
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
63
+ headers=self._client_wrapper.get_headers(),
71
64
  timeout=60,
72
65
  )
73
66
  if 200 <= _response.status_code < 300:
@@ -81,12 +74,7 @@ class FilesClient:
81
74
  raise ApiError(status_code=_response.status_code, body=_response_json)
82
75
 
83
76
  def delete_file(
84
- self,
85
- id: str,
86
- *,
87
- project_id: typing.Optional[str] = None,
88
- organization_id: typing.Optional[str] = None,
89
- project_id: typing.Optional[str] = None,
77
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
90
78
  ) -> None:
91
79
  """
92
80
  Delete the file from S3.
@@ -97,8 +85,6 @@ class FilesClient:
97
85
  - project_id: typing.Optional[str].
98
86
 
99
87
  - organization_id: typing.Optional[str].
100
-
101
- - project_id: typing.Optional[str].
102
88
  ---
103
89
  from llama_cloud.client import LlamaCloud
104
90
 
@@ -113,7 +99,7 @@ class FilesClient:
113
99
  "DELETE",
114
100
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
115
101
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
116
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
102
+ headers=self._client_wrapper.get_headers(),
117
103
  timeout=60,
118
104
  )
119
105
  if 200 <= _response.status_code < 300:
@@ -127,11 +113,7 @@ class FilesClient:
127
113
  raise ApiError(status_code=_response.status_code, body=_response_json)
128
114
 
129
115
  def list_files(
130
- self,
131
- *,
132
- project_id: typing.Optional[str] = None,
133
- organization_id: typing.Optional[str] = None,
134
- project_id: typing.Optional[str] = None,
116
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
135
117
  ) -> typing.List[File]:
136
118
  """
137
119
  Read File metadata objects.
@@ -140,8 +122,6 @@ class FilesClient:
140
122
  - project_id: typing.Optional[str].
141
123
 
142
124
  - organization_id: typing.Optional[str].
143
-
144
- - project_id: typing.Optional[str].
145
125
  ---
146
126
  from llama_cloud.client import LlamaCloud
147
127
 
@@ -154,7 +134,7 @@ class FilesClient:
154
134
  "GET",
155
135
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
156
136
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
157
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
137
+ headers=self._client_wrapper.get_headers(),
158
138
  timeout=60,
159
139
  )
160
140
  if 200 <= _response.status_code < 300:
@@ -174,7 +154,6 @@ class FilesClient:
174
154
  project_id: typing.Optional[str] = None,
175
155
  organization_id: typing.Optional[str] = None,
176
156
  upload_file: typing.IO,
177
- project_id: typing.Optional[str] = None,
178
157
  ) -> File:
179
158
  """
180
159
  Upload a file to S3.
@@ -187,8 +166,6 @@ class FilesClient:
187
166
  - organization_id: typing.Optional[str].
188
167
 
189
168
  - upload_file: typing.IO.
190
-
191
- - project_id: typing.Optional[str].
192
169
  """
193
170
  _response = self._client_wrapper.httpx_client.request(
194
171
  "POST",
@@ -198,7 +175,7 @@ class FilesClient:
198
175
  ),
199
176
  data=jsonable_encoder({}),
200
177
  files={"upload_file": upload_file},
201
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
178
+ headers=self._client_wrapper.get_headers(),
202
179
  timeout=60,
203
180
  )
204
181
  if 200 <= _response.status_code < 300:
@@ -218,7 +195,6 @@ class FilesClient:
218
195
  project_id: typing.Optional[str] = None,
219
196
  organization_id: typing.Optional[str] = None,
220
197
  request: FileCreate,
221
- project_id: typing.Optional[str] = None,
222
198
  ) -> FileIdPresignedUrl:
223
199
  """
224
200
  Create a presigned url for uploading a file.
@@ -234,8 +210,6 @@ class FilesClient:
234
210
  - organization_id: typing.Optional[str].
235
211
 
236
212
  - request: FileCreate.
237
-
238
- - project_id: typing.Optional[str].
239
213
  ---
240
214
  from llama_cloud import FileCreate
241
215
  from llama_cloud.client import LlamaCloud
@@ -256,7 +230,7 @@ class FilesClient:
256
230
  {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
257
231
  ),
258
232
  json=jsonable_encoder(request),
259
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
233
+ headers=self._client_wrapper.get_headers(),
260
234
  timeout=60,
261
235
  )
262
236
  if 200 <= _response.status_code < 300:
@@ -270,11 +244,7 @@ class FilesClient:
270
244
  raise ApiError(status_code=_response.status_code, body=_response_json)
271
245
 
272
246
  def sync_files(
273
- self,
274
- *,
275
- project_id: typing.Optional[str] = None,
276
- organization_id: typing.Optional[str] = None,
277
- project_id: typing.Optional[str] = None,
247
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
278
248
  ) -> typing.List[File]:
279
249
  """
280
250
  Sync Files API against file contents uploaded via S3 presigned urls.
@@ -283,8 +253,6 @@ class FilesClient:
283
253
  - project_id: typing.Optional[str].
284
254
 
285
255
  - organization_id: typing.Optional[str].
286
-
287
- - project_id: typing.Optional[str].
288
256
  ---
289
257
  from llama_cloud.client import LlamaCloud
290
258
 
@@ -297,7 +265,7 @@ class FilesClient:
297
265
  "PUT",
298
266
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/sync"),
299
267
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
300
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
268
+ headers=self._client_wrapper.get_headers(),
301
269
  timeout=60,
302
270
  )
303
271
  if 200 <= _response.status_code < 300:
@@ -322,7 +290,6 @@ class FilesClient:
322
290
  verify_ssl: typing.Optional[bool] = OMIT,
323
291
  follow_redirects: typing.Optional[bool] = OMIT,
324
292
  resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]] = OMIT,
325
- project_id: typing.Optional[str] = None,
326
293
  ) -> File:
327
294
  """
328
295
  Upload a file to the project from a URL.
@@ -352,8 +319,6 @@ class FilesClient:
352
319
  - follow_redirects: typing.Optional[bool]. Whether to follow redirects when downloading the file
353
320
 
354
321
  - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]].
355
-
356
- - project_id: typing.Optional[str].
357
322
  ---
358
323
  from llama_cloud.client import LlamaCloud
359
324
 
@@ -382,7 +347,7 @@ class FilesClient:
382
347
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/upload_from_url"),
383
348
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
384
349
  json=jsonable_encoder(_request),
385
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
350
+ headers=self._client_wrapper.get_headers(),
386
351
  timeout=60,
387
352
  )
388
353
  if 200 <= _response.status_code < 300:
@@ -402,7 +367,6 @@ class FilesClient:
402
367
  expires_at_seconds: typing.Optional[int] = None,
403
368
  project_id: typing.Optional[str] = None,
404
369
  organization_id: typing.Optional[str] = None,
405
- project_id: typing.Optional[str] = None,
406
370
  ) -> PresignedUrl:
407
371
  """
408
372
  Returns a presigned url to read the file content.
@@ -415,8 +379,6 @@ class FilesClient:
415
379
  - project_id: typing.Optional[str].
416
380
 
417
381
  - organization_id: typing.Optional[str].
418
-
419
- - project_id: typing.Optional[str].
420
382
  ---
421
383
  from llama_cloud.client import LlamaCloud
422
384
 
@@ -433,7 +395,7 @@ class FilesClient:
433
395
  params=remove_none_from_dict(
434
396
  {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
435
397
  ),
436
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
398
+ headers=self._client_wrapper.get_headers(),
437
399
  timeout=60,
438
400
  )
439
401
  if 200 <= _response.status_code < 300:
@@ -447,12 +409,7 @@ class FilesClient:
447
409
  raise ApiError(status_code=_response.status_code, body=_response_json)
448
410
 
449
411
  def list_file_page_screenshots(
450
- self,
451
- id: str,
452
- *,
453
- project_id: typing.Optional[str] = None,
454
- organization_id: typing.Optional[str] = None,
455
- project_id: typing.Optional[str] = None,
412
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
456
413
  ) -> typing.List[PageScreenshotMetadata]:
457
414
  """
458
415
  List metadata for all screenshots of pages from a file.
@@ -463,8 +420,6 @@ class FilesClient:
463
420
  - project_id: typing.Optional[str].
464
421
 
465
422
  - organization_id: typing.Optional[str].
466
-
467
- - project_id: typing.Optional[str].
468
423
  ---
469
424
  from llama_cloud.client import LlamaCloud
470
425
 
@@ -479,7 +434,7 @@ class FilesClient:
479
434
  "GET",
480
435
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots"),
481
436
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
482
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
437
+ headers=self._client_wrapper.get_headers(),
483
438
  timeout=60,
484
439
  )
485
440
  if 200 <= _response.status_code < 300:
@@ -499,7 +454,6 @@ class FilesClient:
499
454
  *,
500
455
  project_id: typing.Optional[str] = None,
501
456
  organization_id: typing.Optional[str] = None,
502
- project_id: typing.Optional[str] = None,
503
457
  ) -> typing.Any:
504
458
  """
505
459
  Get screenshot of a page from a file.
@@ -512,8 +466,6 @@ class FilesClient:
512
466
  - project_id: typing.Optional[str].
513
467
 
514
468
  - organization_id: typing.Optional[str].
515
-
516
- - project_id: typing.Optional[str].
517
469
  ---
518
470
  from llama_cloud.client import LlamaCloud
519
471
 
@@ -531,7 +483,7 @@ class FilesClient:
531
483
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots/{page_index}"
532
484
  ),
533
485
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
534
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
486
+ headers=self._client_wrapper.get_headers(),
535
487
  timeout=60,
536
488
  )
537
489
  if 200 <= _response.status_code < 300:
@@ -545,12 +497,7 @@ class FilesClient:
545
497
  raise ApiError(status_code=_response.status_code, body=_response_json)
546
498
 
547
499
  def list_file_pages_figures(
548
- self,
549
- id: str,
550
- *,
551
- project_id: typing.Optional[str] = None,
552
- organization_id: typing.Optional[str] = None,
553
- project_id: typing.Optional[str] = None,
500
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
554
501
  ) -> typing.List[PageFigureMetadata]:
555
502
  """
556
503
  Parameters:
@@ -559,8 +506,6 @@ class FilesClient:
559
506
  - project_id: typing.Optional[str].
560
507
 
561
508
  - organization_id: typing.Optional[str].
562
-
563
- - project_id: typing.Optional[str].
564
509
  ---
565
510
  from llama_cloud.client import LlamaCloud
566
511
 
@@ -575,7 +520,7 @@ class FilesClient:
575
520
  "GET",
576
521
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures"),
577
522
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
578
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
523
+ headers=self._client_wrapper.get_headers(),
579
524
  timeout=60,
580
525
  )
581
526
  if 200 <= _response.status_code < 300:
@@ -595,7 +540,6 @@ class FilesClient:
595
540
  *,
596
541
  project_id: typing.Optional[str] = None,
597
542
  organization_id: typing.Optional[str] = None,
598
- project_id: typing.Optional[str] = None,
599
543
  ) -> typing.List[PageFigureMetadata]:
600
544
  """
601
545
  Parameters:
@@ -606,8 +550,6 @@ class FilesClient:
606
550
  - project_id: typing.Optional[str].
607
551
 
608
552
  - organization_id: typing.Optional[str].
609
-
610
- - project_id: typing.Optional[str].
611
553
  ---
612
554
  from llama_cloud.client import LlamaCloud
613
555
 
@@ -625,7 +567,7 @@ class FilesClient:
625
567
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures/{page_index}"
626
568
  ),
627
569
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
628
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
570
+ headers=self._client_wrapper.get_headers(),
629
571
  timeout=60,
630
572
  )
631
573
  if 200 <= _response.status_code < 300:
@@ -646,7 +588,6 @@ class FilesClient:
646
588
  *,
647
589
  project_id: typing.Optional[str] = None,
648
590
  organization_id: typing.Optional[str] = None,
649
- project_id: typing.Optional[str] = None,
650
591
  ) -> typing.Any:
651
592
  """
652
593
  Parameters:
@@ -659,8 +600,6 @@ class FilesClient:
659
600
  - project_id: typing.Optional[str].
660
601
 
661
602
  - organization_id: typing.Optional[str].
662
-
663
- - project_id: typing.Optional[str].
664
603
  ---
665
604
  from llama_cloud.client import LlamaCloud
666
605
 
@@ -679,7 +618,7 @@ class FilesClient:
679
618
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures/{page_index}/{figure_name}"
680
619
  ),
681
620
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
682
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
621
+ headers=self._client_wrapper.get_headers(),
683
622
  timeout=60,
684
623
  )
685
624
  if 200 <= _response.status_code < 300:
@@ -699,7 +638,6 @@ class FilesClient:
699
638
  *,
700
639
  project_id: typing.Optional[str] = None,
701
640
  organization_id: typing.Optional[str] = None,
702
- project_id: typing.Optional[str] = None,
703
641
  ) -> PresignedUrl:
704
642
  """
705
643
  Returns a presigned url to read a page screenshot.
@@ -715,8 +653,6 @@ class FilesClient:
715
653
  - project_id: typing.Optional[str].
716
654
 
717
655
  - organization_id: typing.Optional[str].
718
-
719
- - project_id: typing.Optional[str].
720
656
  ---
721
657
  from llama_cloud.client import LlamaCloud
722
658
 
@@ -735,7 +671,7 @@ class FilesClient:
735
671
  f"api/v1/files/{id}/page_screenshots/{page_index}/presigned_url",
736
672
  ),
737
673
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
738
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
674
+ headers=self._client_wrapper.get_headers(),
739
675
  timeout=60,
740
676
  )
741
677
  if 200 <= _response.status_code < 300:
@@ -756,7 +692,6 @@ class FilesClient:
756
692
  *,
757
693
  project_id: typing.Optional[str] = None,
758
694
  organization_id: typing.Optional[str] = None,
759
- project_id: typing.Optional[str] = None,
760
695
  ) -> PresignedUrl:
761
696
  """
762
697
  Returns a presigned url to read a page figure.
@@ -774,8 +709,6 @@ class FilesClient:
774
709
  - project_id: typing.Optional[str].
775
710
 
776
711
  - organization_id: typing.Optional[str].
777
-
778
- - project_id: typing.Optional[str].
779
712
  ---
780
713
  from llama_cloud.client import LlamaCloud
781
714
 
@@ -795,7 +728,7 @@ class FilesClient:
795
728
  f"api/v1/files/{id}/page-figures/{page_index}/{figure_name}/presigned_url",
796
729
  ),
797
730
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
798
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
731
+ headers=self._client_wrapper.get_headers(),
799
732
  timeout=60,
800
733
  )
801
734
  if 200 <= _response.status_code < 300:
@@ -814,12 +747,7 @@ class AsyncFilesClient:
814
747
  self._client_wrapper = client_wrapper
815
748
 
816
749
  async def get_file(
817
- self,
818
- id: str,
819
- *,
820
- project_id: typing.Optional[str] = None,
821
- organization_id: typing.Optional[str] = None,
822
- project_id: typing.Optional[str] = None,
750
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
823
751
  ) -> File:
824
752
  """
825
753
  Read File metadata objects.
@@ -830,8 +758,6 @@ class AsyncFilesClient:
830
758
  - project_id: typing.Optional[str].
831
759
 
832
760
  - organization_id: typing.Optional[str].
833
-
834
- - project_id: typing.Optional[str].
835
761
  ---
836
762
  from llama_cloud.client import AsyncLlamaCloud
837
763
 
@@ -846,7 +772,7 @@ class AsyncFilesClient:
846
772
  "GET",
847
773
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
848
774
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
849
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
775
+ headers=self._client_wrapper.get_headers(),
850
776
  timeout=60,
851
777
  )
852
778
  if 200 <= _response.status_code < 300:
@@ -860,12 +786,7 @@ class AsyncFilesClient:
860
786
  raise ApiError(status_code=_response.status_code, body=_response_json)
861
787
 
862
788
  async def delete_file(
863
- self,
864
- id: str,
865
- *,
866
- project_id: typing.Optional[str] = None,
867
- organization_id: typing.Optional[str] = None,
868
- project_id: typing.Optional[str] = None,
789
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
869
790
  ) -> None:
870
791
  """
871
792
  Delete the file from S3.
@@ -876,8 +797,6 @@ class AsyncFilesClient:
876
797
  - project_id: typing.Optional[str].
877
798
 
878
799
  - organization_id: typing.Optional[str].
879
-
880
- - project_id: typing.Optional[str].
881
800
  ---
882
801
  from llama_cloud.client import AsyncLlamaCloud
883
802
 
@@ -892,7 +811,7 @@ class AsyncFilesClient:
892
811
  "DELETE",
893
812
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
894
813
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
895
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
814
+ headers=self._client_wrapper.get_headers(),
896
815
  timeout=60,
897
816
  )
898
817
  if 200 <= _response.status_code < 300:
@@ -906,11 +825,7 @@ class AsyncFilesClient:
906
825
  raise ApiError(status_code=_response.status_code, body=_response_json)
907
826
 
908
827
  async def list_files(
909
- self,
910
- *,
911
- project_id: typing.Optional[str] = None,
912
- organization_id: typing.Optional[str] = None,
913
- project_id: typing.Optional[str] = None,
828
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
914
829
  ) -> typing.List[File]:
915
830
  """
916
831
  Read File metadata objects.
@@ -919,8 +834,6 @@ class AsyncFilesClient:
919
834
  - project_id: typing.Optional[str].
920
835
 
921
836
  - organization_id: typing.Optional[str].
922
-
923
- - project_id: typing.Optional[str].
924
837
  ---
925
838
  from llama_cloud.client import AsyncLlamaCloud
926
839
 
@@ -933,7 +846,7 @@ class AsyncFilesClient:
933
846
  "GET",
934
847
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
935
848
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
936
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
849
+ headers=self._client_wrapper.get_headers(),
937
850
  timeout=60,
938
851
  )
939
852
  if 200 <= _response.status_code < 300:
@@ -953,7 +866,6 @@ class AsyncFilesClient:
953
866
  project_id: typing.Optional[str] = None,
954
867
  organization_id: typing.Optional[str] = None,
955
868
  upload_file: typing.IO,
956
- project_id: typing.Optional[str] = None,
957
869
  ) -> File:
958
870
  """
959
871
  Upload a file to S3.
@@ -966,8 +878,6 @@ class AsyncFilesClient:
966
878
  - organization_id: typing.Optional[str].
967
879
 
968
880
  - upload_file: typing.IO.
969
-
970
- - project_id: typing.Optional[str].
971
881
  """
972
882
  _response = await self._client_wrapper.httpx_client.request(
973
883
  "POST",
@@ -977,7 +887,7 @@ class AsyncFilesClient:
977
887
  ),
978
888
  data=jsonable_encoder({}),
979
889
  files={"upload_file": upload_file},
980
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
890
+ headers=self._client_wrapper.get_headers(),
981
891
  timeout=60,
982
892
  )
983
893
  if 200 <= _response.status_code < 300:
@@ -997,7 +907,6 @@ class AsyncFilesClient:
997
907
  project_id: typing.Optional[str] = None,
998
908
  organization_id: typing.Optional[str] = None,
999
909
  request: FileCreate,
1000
- project_id: typing.Optional[str] = None,
1001
910
  ) -> FileIdPresignedUrl:
1002
911
  """
1003
912
  Create a presigned url for uploading a file.
@@ -1013,8 +922,6 @@ class AsyncFilesClient:
1013
922
  - organization_id: typing.Optional[str].
1014
923
 
1015
924
  - request: FileCreate.
1016
-
1017
- - project_id: typing.Optional[str].
1018
925
  ---
1019
926
  from llama_cloud import FileCreate
1020
927
  from llama_cloud.client import AsyncLlamaCloud
@@ -1035,7 +942,7 @@ class AsyncFilesClient:
1035
942
  {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
1036
943
  ),
1037
944
  json=jsonable_encoder(request),
1038
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
945
+ headers=self._client_wrapper.get_headers(),
1039
946
  timeout=60,
1040
947
  )
1041
948
  if 200 <= _response.status_code < 300:
@@ -1049,11 +956,7 @@ class AsyncFilesClient:
1049
956
  raise ApiError(status_code=_response.status_code, body=_response_json)
1050
957
 
1051
958
  async def sync_files(
1052
- self,
1053
- *,
1054
- project_id: typing.Optional[str] = None,
1055
- organization_id: typing.Optional[str] = None,
1056
- project_id: typing.Optional[str] = None,
959
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1057
960
  ) -> typing.List[File]:
1058
961
  """
1059
962
  Sync Files API against file contents uploaded via S3 presigned urls.
@@ -1062,8 +965,6 @@ class AsyncFilesClient:
1062
965
  - project_id: typing.Optional[str].
1063
966
 
1064
967
  - organization_id: typing.Optional[str].
1065
-
1066
- - project_id: typing.Optional[str].
1067
968
  ---
1068
969
  from llama_cloud.client import AsyncLlamaCloud
1069
970
 
@@ -1076,7 +977,7 @@ class AsyncFilesClient:
1076
977
  "PUT",
1077
978
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/sync"),
1078
979
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1079
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
980
+ headers=self._client_wrapper.get_headers(),
1080
981
  timeout=60,
1081
982
  )
1082
983
  if 200 <= _response.status_code < 300:
@@ -1101,7 +1002,6 @@ class AsyncFilesClient:
1101
1002
  verify_ssl: typing.Optional[bool] = OMIT,
1102
1003
  follow_redirects: typing.Optional[bool] = OMIT,
1103
1004
  resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]] = OMIT,
1104
- project_id: typing.Optional[str] = None,
1105
1005
  ) -> File:
1106
1006
  """
1107
1007
  Upload a file to the project from a URL.
@@ -1131,8 +1031,6 @@ class AsyncFilesClient:
1131
1031
  - follow_redirects: typing.Optional[bool]. Whether to follow redirects when downloading the file
1132
1032
 
1133
1033
  - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]].
1134
-
1135
- - project_id: typing.Optional[str].
1136
1034
  ---
1137
1035
  from llama_cloud.client import AsyncLlamaCloud
1138
1036
 
@@ -1161,7 +1059,7 @@ class AsyncFilesClient:
1161
1059
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/upload_from_url"),
1162
1060
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1163
1061
  json=jsonable_encoder(_request),
1164
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1062
+ headers=self._client_wrapper.get_headers(),
1165
1063
  timeout=60,
1166
1064
  )
1167
1065
  if 200 <= _response.status_code < 300:
@@ -1181,7 +1079,6 @@ class AsyncFilesClient:
1181
1079
  expires_at_seconds: typing.Optional[int] = None,
1182
1080
  project_id: typing.Optional[str] = None,
1183
1081
  organization_id: typing.Optional[str] = None,
1184
- project_id: typing.Optional[str] = None,
1185
1082
  ) -> PresignedUrl:
1186
1083
  """
1187
1084
  Returns a presigned url to read the file content.
@@ -1194,8 +1091,6 @@ class AsyncFilesClient:
1194
1091
  - project_id: typing.Optional[str].
1195
1092
 
1196
1093
  - organization_id: typing.Optional[str].
1197
-
1198
- - project_id: typing.Optional[str].
1199
1094
  ---
1200
1095
  from llama_cloud.client import AsyncLlamaCloud
1201
1096
 
@@ -1212,7 +1107,7 @@ class AsyncFilesClient:
1212
1107
  params=remove_none_from_dict(
1213
1108
  {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
1214
1109
  ),
1215
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1110
+ headers=self._client_wrapper.get_headers(),
1216
1111
  timeout=60,
1217
1112
  )
1218
1113
  if 200 <= _response.status_code < 300:
@@ -1226,12 +1121,7 @@ class AsyncFilesClient:
1226
1121
  raise ApiError(status_code=_response.status_code, body=_response_json)
1227
1122
 
1228
1123
  async def list_file_page_screenshots(
1229
- self,
1230
- id: str,
1231
- *,
1232
- project_id: typing.Optional[str] = None,
1233
- organization_id: typing.Optional[str] = None,
1234
- project_id: typing.Optional[str] = None,
1124
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1235
1125
  ) -> typing.List[PageScreenshotMetadata]:
1236
1126
  """
1237
1127
  List metadata for all screenshots of pages from a file.
@@ -1242,8 +1132,6 @@ class AsyncFilesClient:
1242
1132
  - project_id: typing.Optional[str].
1243
1133
 
1244
1134
  - organization_id: typing.Optional[str].
1245
-
1246
- - project_id: typing.Optional[str].
1247
1135
  ---
1248
1136
  from llama_cloud.client import AsyncLlamaCloud
1249
1137
 
@@ -1258,7 +1146,7 @@ class AsyncFilesClient:
1258
1146
  "GET",
1259
1147
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots"),
1260
1148
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1261
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1149
+ headers=self._client_wrapper.get_headers(),
1262
1150
  timeout=60,
1263
1151
  )
1264
1152
  if 200 <= _response.status_code < 300:
@@ -1278,7 +1166,6 @@ class AsyncFilesClient:
1278
1166
  *,
1279
1167
  project_id: typing.Optional[str] = None,
1280
1168
  organization_id: typing.Optional[str] = None,
1281
- project_id: typing.Optional[str] = None,
1282
1169
  ) -> typing.Any:
1283
1170
  """
1284
1171
  Get screenshot of a page from a file.
@@ -1291,8 +1178,6 @@ class AsyncFilesClient:
1291
1178
  - project_id: typing.Optional[str].
1292
1179
 
1293
1180
  - organization_id: typing.Optional[str].
1294
-
1295
- - project_id: typing.Optional[str].
1296
1181
  ---
1297
1182
  from llama_cloud.client import AsyncLlamaCloud
1298
1183
 
@@ -1310,7 +1195,7 @@ class AsyncFilesClient:
1310
1195
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots/{page_index}"
1311
1196
  ),
1312
1197
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1313
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1198
+ headers=self._client_wrapper.get_headers(),
1314
1199
  timeout=60,
1315
1200
  )
1316
1201
  if 200 <= _response.status_code < 300:
@@ -1324,12 +1209,7 @@ class AsyncFilesClient:
1324
1209
  raise ApiError(status_code=_response.status_code, body=_response_json)
1325
1210
 
1326
1211
  async def list_file_pages_figures(
1327
- self,
1328
- id: str,
1329
- *,
1330
- project_id: typing.Optional[str] = None,
1331
- organization_id: typing.Optional[str] = None,
1332
- project_id: typing.Optional[str] = None,
1212
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1333
1213
  ) -> typing.List[PageFigureMetadata]:
1334
1214
  """
1335
1215
  Parameters:
@@ -1338,8 +1218,6 @@ class AsyncFilesClient:
1338
1218
  - project_id: typing.Optional[str].
1339
1219
 
1340
1220
  - organization_id: typing.Optional[str].
1341
-
1342
- - project_id: typing.Optional[str].
1343
1221
  ---
1344
1222
  from llama_cloud.client import AsyncLlamaCloud
1345
1223
 
@@ -1354,7 +1232,7 @@ class AsyncFilesClient:
1354
1232
  "GET",
1355
1233
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures"),
1356
1234
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1357
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1235
+ headers=self._client_wrapper.get_headers(),
1358
1236
  timeout=60,
1359
1237
  )
1360
1238
  if 200 <= _response.status_code < 300:
@@ -1374,7 +1252,6 @@ class AsyncFilesClient:
1374
1252
  *,
1375
1253
  project_id: typing.Optional[str] = None,
1376
1254
  organization_id: typing.Optional[str] = None,
1377
- project_id: typing.Optional[str] = None,
1378
1255
  ) -> typing.List[PageFigureMetadata]:
1379
1256
  """
1380
1257
  Parameters:
@@ -1385,8 +1262,6 @@ class AsyncFilesClient:
1385
1262
  - project_id: typing.Optional[str].
1386
1263
 
1387
1264
  - organization_id: typing.Optional[str].
1388
-
1389
- - project_id: typing.Optional[str].
1390
1265
  ---
1391
1266
  from llama_cloud.client import AsyncLlamaCloud
1392
1267
 
@@ -1404,7 +1279,7 @@ class AsyncFilesClient:
1404
1279
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures/{page_index}"
1405
1280
  ),
1406
1281
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1407
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1282
+ headers=self._client_wrapper.get_headers(),
1408
1283
  timeout=60,
1409
1284
  )
1410
1285
  if 200 <= _response.status_code < 300:
@@ -1425,7 +1300,6 @@ class AsyncFilesClient:
1425
1300
  *,
1426
1301
  project_id: typing.Optional[str] = None,
1427
1302
  organization_id: typing.Optional[str] = None,
1428
- project_id: typing.Optional[str] = None,
1429
1303
  ) -> typing.Any:
1430
1304
  """
1431
1305
  Parameters:
@@ -1438,8 +1312,6 @@ class AsyncFilesClient:
1438
1312
  - project_id: typing.Optional[str].
1439
1313
 
1440
1314
  - organization_id: typing.Optional[str].
1441
-
1442
- - project_id: typing.Optional[str].
1443
1315
  ---
1444
1316
  from llama_cloud.client import AsyncLlamaCloud
1445
1317
 
@@ -1458,7 +1330,7 @@ class AsyncFilesClient:
1458
1330
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures/{page_index}/{figure_name}"
1459
1331
  ),
1460
1332
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1461
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1333
+ headers=self._client_wrapper.get_headers(),
1462
1334
  timeout=60,
1463
1335
  )
1464
1336
  if 200 <= _response.status_code < 300:
@@ -1478,7 +1350,6 @@ class AsyncFilesClient:
1478
1350
  *,
1479
1351
  project_id: typing.Optional[str] = None,
1480
1352
  organization_id: typing.Optional[str] = None,
1481
- project_id: typing.Optional[str] = None,
1482
1353
  ) -> PresignedUrl:
1483
1354
  """
1484
1355
  Returns a presigned url to read a page screenshot.
@@ -1494,8 +1365,6 @@ class AsyncFilesClient:
1494
1365
  - project_id: typing.Optional[str].
1495
1366
 
1496
1367
  - organization_id: typing.Optional[str].
1497
-
1498
- - project_id: typing.Optional[str].
1499
1368
  ---
1500
1369
  from llama_cloud.client import AsyncLlamaCloud
1501
1370
 
@@ -1514,7 +1383,7 @@ class AsyncFilesClient:
1514
1383
  f"api/v1/files/{id}/page_screenshots/{page_index}/presigned_url",
1515
1384
  ),
1516
1385
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1517
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1386
+ headers=self._client_wrapper.get_headers(),
1518
1387
  timeout=60,
1519
1388
  )
1520
1389
  if 200 <= _response.status_code < 300:
@@ -1535,7 +1404,6 @@ class AsyncFilesClient:
1535
1404
  *,
1536
1405
  project_id: typing.Optional[str] = None,
1537
1406
  organization_id: typing.Optional[str] = None,
1538
- project_id: typing.Optional[str] = None,
1539
1407
  ) -> PresignedUrl:
1540
1408
  """
1541
1409
  Returns a presigned url to read a page figure.
@@ -1553,8 +1421,6 @@ class AsyncFilesClient:
1553
1421
  - project_id: typing.Optional[str].
1554
1422
 
1555
1423
  - organization_id: typing.Optional[str].
1556
-
1557
- - project_id: typing.Optional[str].
1558
1424
  ---
1559
1425
  from llama_cloud.client import AsyncLlamaCloud
1560
1426
 
@@ -1574,7 +1440,7 @@ class AsyncFilesClient:
1574
1440
  f"api/v1/files/{id}/page-figures/{page_index}/{figure_name}/presigned_url",
1575
1441
  ),
1576
1442
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1577
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1443
+ headers=self._client_wrapper.get_headers(),
1578
1444
  timeout=60,
1579
1445
  )
1580
1446
  if 200 <= _response.status_code < 300: