llama-cloud 0.1.37__py3-none-any.whl → 0.1.38__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (37) hide show
  1. llama_cloud/__init__.py +8 -2
  2. llama_cloud/client.py +3 -0
  3. llama_cloud/resources/__init__.py +2 -0
  4. llama_cloud/resources/alpha/__init__.py +2 -0
  5. llama_cloud/resources/alpha/client.py +118 -0
  6. llama_cloud/resources/beta/client.py +126 -30
  7. llama_cloud/resources/chat_apps/client.py +32 -8
  8. llama_cloud/resources/classifier/client.py +139 -11
  9. llama_cloud/resources/data_sinks/client.py +32 -8
  10. llama_cloud/resources/data_sources/client.py +32 -8
  11. llama_cloud/resources/data_sources/types/data_source_update_component.py +2 -0
  12. llama_cloud/resources/embedding_model_configs/client.py +48 -12
  13. llama_cloud/resources/files/client.py +176 -42
  14. llama_cloud/resources/jobs/client.py +12 -6
  15. llama_cloud/resources/llama_extract/client.py +138 -32
  16. llama_cloud/resources/organizations/client.py +18 -4
  17. llama_cloud/resources/parsing/client.py +16 -4
  18. llama_cloud/resources/pipelines/client.py +32 -8
  19. llama_cloud/resources/projects/client.py +78 -18
  20. llama_cloud/resources/reports/client.py +126 -30
  21. llama_cloud/resources/retrievers/client.py +48 -12
  22. llama_cloud/types/__init__.py +6 -2
  23. llama_cloud/types/agent_deployment_summary.py +1 -0
  24. llama_cloud/types/classify_job.py +2 -0
  25. llama_cloud/types/cloud_jira_data_source_v_2.py +52 -0
  26. llama_cloud/types/cloud_jira_data_source_v_2_api_version.py +21 -0
  27. llama_cloud/types/configurable_data_source_names.py +4 -0
  28. llama_cloud/types/data_source_component.py +2 -0
  29. llama_cloud/types/data_source_create_component.py +2 -0
  30. llama_cloud/types/data_source_reader_version_metadata_reader_version.py +9 -1
  31. llama_cloud/types/{classify_job_with_status.py → paginated_response_classify_job.py} +5 -18
  32. llama_cloud/types/pipeline_data_source_component.py +2 -0
  33. llama_cloud/types/usage_response_active_alerts_item.py +4 -0
  34. {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.38.dist-info}/METADATA +2 -3
  35. {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.38.dist-info}/RECORD +37 -33
  36. {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.38.dist-info}/WHEEL +1 -1
  37. {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.38.dist-info}/LICENSE +0 -0
@@ -35,7 +35,12 @@ class FilesClient:
35
35
  self._client_wrapper = client_wrapper
36
36
 
37
37
  def get_file(
38
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
38
+ self,
39
+ id: str,
40
+ *,
41
+ project_id: typing.Optional[str] = None,
42
+ organization_id: typing.Optional[str] = None,
43
+ project_id: typing.Optional[str] = None,
39
44
  ) -> File:
40
45
  """
41
46
  Read File metadata objects.
@@ -46,6 +51,8 @@ class FilesClient:
46
51
  - project_id: typing.Optional[str].
47
52
 
48
53
  - organization_id: typing.Optional[str].
54
+
55
+ - project_id: typing.Optional[str].
49
56
  ---
50
57
  from llama_cloud.client import LlamaCloud
51
58
 
@@ -60,7 +67,7 @@ class FilesClient:
60
67
  "GET",
61
68
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
62
69
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
63
- headers=self._client_wrapper.get_headers(),
70
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
64
71
  timeout=60,
65
72
  )
66
73
  if 200 <= _response.status_code < 300:
@@ -74,7 +81,12 @@ class FilesClient:
74
81
  raise ApiError(status_code=_response.status_code, body=_response_json)
75
82
 
76
83
  def delete_file(
77
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
84
+ self,
85
+ id: str,
86
+ *,
87
+ project_id: typing.Optional[str] = None,
88
+ organization_id: typing.Optional[str] = None,
89
+ project_id: typing.Optional[str] = None,
78
90
  ) -> None:
79
91
  """
80
92
  Delete the file from S3.
@@ -85,6 +97,8 @@ class FilesClient:
85
97
  - project_id: typing.Optional[str].
86
98
 
87
99
  - organization_id: typing.Optional[str].
100
+
101
+ - project_id: typing.Optional[str].
88
102
  ---
89
103
  from llama_cloud.client import LlamaCloud
90
104
 
@@ -99,7 +113,7 @@ class FilesClient:
99
113
  "DELETE",
100
114
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
101
115
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
102
- headers=self._client_wrapper.get_headers(),
116
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
103
117
  timeout=60,
104
118
  )
105
119
  if 200 <= _response.status_code < 300:
@@ -113,7 +127,11 @@ class FilesClient:
113
127
  raise ApiError(status_code=_response.status_code, body=_response_json)
114
128
 
115
129
  def list_files(
116
- self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
130
+ self,
131
+ *,
132
+ project_id: typing.Optional[str] = None,
133
+ organization_id: typing.Optional[str] = None,
134
+ project_id: typing.Optional[str] = None,
117
135
  ) -> typing.List[File]:
118
136
  """
119
137
  Read File metadata objects.
@@ -122,6 +140,8 @@ class FilesClient:
122
140
  - project_id: typing.Optional[str].
123
141
 
124
142
  - organization_id: typing.Optional[str].
143
+
144
+ - project_id: typing.Optional[str].
125
145
  ---
126
146
  from llama_cloud.client import LlamaCloud
127
147
 
@@ -134,7 +154,7 @@ class FilesClient:
134
154
  "GET",
135
155
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
136
156
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
137
- headers=self._client_wrapper.get_headers(),
157
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
138
158
  timeout=60,
139
159
  )
140
160
  if 200 <= _response.status_code < 300:
@@ -154,6 +174,7 @@ class FilesClient:
154
174
  project_id: typing.Optional[str] = None,
155
175
  organization_id: typing.Optional[str] = None,
156
176
  upload_file: typing.IO,
177
+ project_id: typing.Optional[str] = None,
157
178
  ) -> File:
158
179
  """
159
180
  Upload a file to S3.
@@ -166,6 +187,8 @@ class FilesClient:
166
187
  - organization_id: typing.Optional[str].
167
188
 
168
189
  - upload_file: typing.IO.
190
+
191
+ - project_id: typing.Optional[str].
169
192
  """
170
193
  _response = self._client_wrapper.httpx_client.request(
171
194
  "POST",
@@ -175,7 +198,7 @@ class FilesClient:
175
198
  ),
176
199
  data=jsonable_encoder({}),
177
200
  files={"upload_file": upload_file},
178
- headers=self._client_wrapper.get_headers(),
201
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
179
202
  timeout=60,
180
203
  )
181
204
  if 200 <= _response.status_code < 300:
@@ -195,6 +218,7 @@ class FilesClient:
195
218
  project_id: typing.Optional[str] = None,
196
219
  organization_id: typing.Optional[str] = None,
197
220
  request: FileCreate,
221
+ project_id: typing.Optional[str] = None,
198
222
  ) -> FileIdPresignedUrl:
199
223
  """
200
224
  Create a presigned url for uploading a file.
@@ -210,6 +234,8 @@ class FilesClient:
210
234
  - organization_id: typing.Optional[str].
211
235
 
212
236
  - request: FileCreate.
237
+
238
+ - project_id: typing.Optional[str].
213
239
  ---
214
240
  from llama_cloud import FileCreate
215
241
  from llama_cloud.client import LlamaCloud
@@ -230,7 +256,7 @@ class FilesClient:
230
256
  {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
231
257
  ),
232
258
  json=jsonable_encoder(request),
233
- headers=self._client_wrapper.get_headers(),
259
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
234
260
  timeout=60,
235
261
  )
236
262
  if 200 <= _response.status_code < 300:
@@ -244,7 +270,11 @@ class FilesClient:
244
270
  raise ApiError(status_code=_response.status_code, body=_response_json)
245
271
 
246
272
  def sync_files(
247
- self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
273
+ self,
274
+ *,
275
+ project_id: typing.Optional[str] = None,
276
+ organization_id: typing.Optional[str] = None,
277
+ project_id: typing.Optional[str] = None,
248
278
  ) -> typing.List[File]:
249
279
  """
250
280
  Sync Files API against file contents uploaded via S3 presigned urls.
@@ -253,6 +283,8 @@ class FilesClient:
253
283
  - project_id: typing.Optional[str].
254
284
 
255
285
  - organization_id: typing.Optional[str].
286
+
287
+ - project_id: typing.Optional[str].
256
288
  ---
257
289
  from llama_cloud.client import LlamaCloud
258
290
 
@@ -265,7 +297,7 @@ class FilesClient:
265
297
  "PUT",
266
298
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/sync"),
267
299
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
268
- headers=self._client_wrapper.get_headers(),
300
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
269
301
  timeout=60,
270
302
  )
271
303
  if 200 <= _response.status_code < 300:
@@ -290,6 +322,7 @@ class FilesClient:
290
322
  verify_ssl: typing.Optional[bool] = OMIT,
291
323
  follow_redirects: typing.Optional[bool] = OMIT,
292
324
  resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]] = OMIT,
325
+ project_id: typing.Optional[str] = None,
293
326
  ) -> File:
294
327
  """
295
328
  Upload a file to the project from a URL.
@@ -319,6 +352,8 @@ class FilesClient:
319
352
  - follow_redirects: typing.Optional[bool]. Whether to follow redirects when downloading the file
320
353
 
321
354
  - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]].
355
+
356
+ - project_id: typing.Optional[str].
322
357
  ---
323
358
  from llama_cloud.client import LlamaCloud
324
359
 
@@ -347,7 +382,7 @@ class FilesClient:
347
382
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/upload_from_url"),
348
383
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
349
384
  json=jsonable_encoder(_request),
350
- headers=self._client_wrapper.get_headers(),
385
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
351
386
  timeout=60,
352
387
  )
353
388
  if 200 <= _response.status_code < 300:
@@ -367,6 +402,7 @@ class FilesClient:
367
402
  expires_at_seconds: typing.Optional[int] = None,
368
403
  project_id: typing.Optional[str] = None,
369
404
  organization_id: typing.Optional[str] = None,
405
+ project_id: typing.Optional[str] = None,
370
406
  ) -> PresignedUrl:
371
407
  """
372
408
  Returns a presigned url to read the file content.
@@ -379,6 +415,8 @@ class FilesClient:
379
415
  - project_id: typing.Optional[str].
380
416
 
381
417
  - organization_id: typing.Optional[str].
418
+
419
+ - project_id: typing.Optional[str].
382
420
  ---
383
421
  from llama_cloud.client import LlamaCloud
384
422
 
@@ -395,7 +433,7 @@ class FilesClient:
395
433
  params=remove_none_from_dict(
396
434
  {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
397
435
  ),
398
- headers=self._client_wrapper.get_headers(),
436
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
399
437
  timeout=60,
400
438
  )
401
439
  if 200 <= _response.status_code < 300:
@@ -409,7 +447,12 @@ class FilesClient:
409
447
  raise ApiError(status_code=_response.status_code, body=_response_json)
410
448
 
411
449
  def list_file_page_screenshots(
412
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
450
+ self,
451
+ id: str,
452
+ *,
453
+ project_id: typing.Optional[str] = None,
454
+ organization_id: typing.Optional[str] = None,
455
+ project_id: typing.Optional[str] = None,
413
456
  ) -> typing.List[PageScreenshotMetadata]:
414
457
  """
415
458
  List metadata for all screenshots of pages from a file.
@@ -420,6 +463,8 @@ class FilesClient:
420
463
  - project_id: typing.Optional[str].
421
464
 
422
465
  - organization_id: typing.Optional[str].
466
+
467
+ - project_id: typing.Optional[str].
423
468
  ---
424
469
  from llama_cloud.client import LlamaCloud
425
470
 
@@ -434,7 +479,7 @@ class FilesClient:
434
479
  "GET",
435
480
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots"),
436
481
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
437
- headers=self._client_wrapper.get_headers(),
482
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
438
483
  timeout=60,
439
484
  )
440
485
  if 200 <= _response.status_code < 300:
@@ -454,6 +499,7 @@ class FilesClient:
454
499
  *,
455
500
  project_id: typing.Optional[str] = None,
456
501
  organization_id: typing.Optional[str] = None,
502
+ project_id: typing.Optional[str] = None,
457
503
  ) -> typing.Any:
458
504
  """
459
505
  Get screenshot of a page from a file.
@@ -466,6 +512,8 @@ class FilesClient:
466
512
  - project_id: typing.Optional[str].
467
513
 
468
514
  - organization_id: typing.Optional[str].
515
+
516
+ - project_id: typing.Optional[str].
469
517
  ---
470
518
  from llama_cloud.client import LlamaCloud
471
519
 
@@ -483,7 +531,7 @@ class FilesClient:
483
531
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots/{page_index}"
484
532
  ),
485
533
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
486
- headers=self._client_wrapper.get_headers(),
534
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
487
535
  timeout=60,
488
536
  )
489
537
  if 200 <= _response.status_code < 300:
@@ -497,7 +545,12 @@ class FilesClient:
497
545
  raise ApiError(status_code=_response.status_code, body=_response_json)
498
546
 
499
547
  def list_file_pages_figures(
500
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
548
+ self,
549
+ id: str,
550
+ *,
551
+ project_id: typing.Optional[str] = None,
552
+ organization_id: typing.Optional[str] = None,
553
+ project_id: typing.Optional[str] = None,
501
554
  ) -> typing.List[PageFigureMetadata]:
502
555
  """
503
556
  Parameters:
@@ -506,6 +559,8 @@ class FilesClient:
506
559
  - project_id: typing.Optional[str].
507
560
 
508
561
  - organization_id: typing.Optional[str].
562
+
563
+ - project_id: typing.Optional[str].
509
564
  ---
510
565
  from llama_cloud.client import LlamaCloud
511
566
 
@@ -520,7 +575,7 @@ class FilesClient:
520
575
  "GET",
521
576
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures"),
522
577
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
523
- headers=self._client_wrapper.get_headers(),
578
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
524
579
  timeout=60,
525
580
  )
526
581
  if 200 <= _response.status_code < 300:
@@ -540,6 +595,7 @@ class FilesClient:
540
595
  *,
541
596
  project_id: typing.Optional[str] = None,
542
597
  organization_id: typing.Optional[str] = None,
598
+ project_id: typing.Optional[str] = None,
543
599
  ) -> typing.List[PageFigureMetadata]:
544
600
  """
545
601
  Parameters:
@@ -550,6 +606,8 @@ class FilesClient:
550
606
  - project_id: typing.Optional[str].
551
607
 
552
608
  - organization_id: typing.Optional[str].
609
+
610
+ - project_id: typing.Optional[str].
553
611
  ---
554
612
  from llama_cloud.client import LlamaCloud
555
613
 
@@ -567,7 +625,7 @@ class FilesClient:
567
625
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures/{page_index}"
568
626
  ),
569
627
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
570
- headers=self._client_wrapper.get_headers(),
628
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
571
629
  timeout=60,
572
630
  )
573
631
  if 200 <= _response.status_code < 300:
@@ -588,6 +646,7 @@ class FilesClient:
588
646
  *,
589
647
  project_id: typing.Optional[str] = None,
590
648
  organization_id: typing.Optional[str] = None,
649
+ project_id: typing.Optional[str] = None,
591
650
  ) -> typing.Any:
592
651
  """
593
652
  Parameters:
@@ -600,6 +659,8 @@ class FilesClient:
600
659
  - project_id: typing.Optional[str].
601
660
 
602
661
  - organization_id: typing.Optional[str].
662
+
663
+ - project_id: typing.Optional[str].
603
664
  ---
604
665
  from llama_cloud.client import LlamaCloud
605
666
 
@@ -618,7 +679,7 @@ class FilesClient:
618
679
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures/{page_index}/{figure_name}"
619
680
  ),
620
681
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
621
- headers=self._client_wrapper.get_headers(),
682
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
622
683
  timeout=60,
623
684
  )
624
685
  if 200 <= _response.status_code < 300:
@@ -638,6 +699,7 @@ class FilesClient:
638
699
  *,
639
700
  project_id: typing.Optional[str] = None,
640
701
  organization_id: typing.Optional[str] = None,
702
+ project_id: typing.Optional[str] = None,
641
703
  ) -> PresignedUrl:
642
704
  """
643
705
  Returns a presigned url to read a page screenshot.
@@ -653,6 +715,8 @@ class FilesClient:
653
715
  - project_id: typing.Optional[str].
654
716
 
655
717
  - organization_id: typing.Optional[str].
718
+
719
+ - project_id: typing.Optional[str].
656
720
  ---
657
721
  from llama_cloud.client import LlamaCloud
658
722
 
@@ -671,7 +735,7 @@ class FilesClient:
671
735
  f"api/v1/files/{id}/page_screenshots/{page_index}/presigned_url",
672
736
  ),
673
737
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
674
- headers=self._client_wrapper.get_headers(),
738
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
675
739
  timeout=60,
676
740
  )
677
741
  if 200 <= _response.status_code < 300:
@@ -692,6 +756,7 @@ class FilesClient:
692
756
  *,
693
757
  project_id: typing.Optional[str] = None,
694
758
  organization_id: typing.Optional[str] = None,
759
+ project_id: typing.Optional[str] = None,
695
760
  ) -> PresignedUrl:
696
761
  """
697
762
  Returns a presigned url to read a page figure.
@@ -709,6 +774,8 @@ class FilesClient:
709
774
  - project_id: typing.Optional[str].
710
775
 
711
776
  - organization_id: typing.Optional[str].
777
+
778
+ - project_id: typing.Optional[str].
712
779
  ---
713
780
  from llama_cloud.client import LlamaCloud
714
781
 
@@ -728,7 +795,7 @@ class FilesClient:
728
795
  f"api/v1/files/{id}/page-figures/{page_index}/{figure_name}/presigned_url",
729
796
  ),
730
797
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
731
- headers=self._client_wrapper.get_headers(),
798
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
732
799
  timeout=60,
733
800
  )
734
801
  if 200 <= _response.status_code < 300:
@@ -747,7 +814,12 @@ class AsyncFilesClient:
747
814
  self._client_wrapper = client_wrapper
748
815
 
749
816
  async def get_file(
750
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
817
+ self,
818
+ id: str,
819
+ *,
820
+ project_id: typing.Optional[str] = None,
821
+ organization_id: typing.Optional[str] = None,
822
+ project_id: typing.Optional[str] = None,
751
823
  ) -> File:
752
824
  """
753
825
  Read File metadata objects.
@@ -758,6 +830,8 @@ class AsyncFilesClient:
758
830
  - project_id: typing.Optional[str].
759
831
 
760
832
  - organization_id: typing.Optional[str].
833
+
834
+ - project_id: typing.Optional[str].
761
835
  ---
762
836
  from llama_cloud.client import AsyncLlamaCloud
763
837
 
@@ -772,7 +846,7 @@ class AsyncFilesClient:
772
846
  "GET",
773
847
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
774
848
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
775
- headers=self._client_wrapper.get_headers(),
849
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
776
850
  timeout=60,
777
851
  )
778
852
  if 200 <= _response.status_code < 300:
@@ -786,7 +860,12 @@ class AsyncFilesClient:
786
860
  raise ApiError(status_code=_response.status_code, body=_response_json)
787
861
 
788
862
  async def delete_file(
789
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
863
+ self,
864
+ id: str,
865
+ *,
866
+ project_id: typing.Optional[str] = None,
867
+ organization_id: typing.Optional[str] = None,
868
+ project_id: typing.Optional[str] = None,
790
869
  ) -> None:
791
870
  """
792
871
  Delete the file from S3.
@@ -797,6 +876,8 @@ class AsyncFilesClient:
797
876
  - project_id: typing.Optional[str].
798
877
 
799
878
  - organization_id: typing.Optional[str].
879
+
880
+ - project_id: typing.Optional[str].
800
881
  ---
801
882
  from llama_cloud.client import AsyncLlamaCloud
802
883
 
@@ -811,7 +892,7 @@ class AsyncFilesClient:
811
892
  "DELETE",
812
893
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
813
894
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
814
- headers=self._client_wrapper.get_headers(),
895
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
815
896
  timeout=60,
816
897
  )
817
898
  if 200 <= _response.status_code < 300:
@@ -825,7 +906,11 @@ class AsyncFilesClient:
825
906
  raise ApiError(status_code=_response.status_code, body=_response_json)
826
907
 
827
908
  async def list_files(
828
- self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
909
+ self,
910
+ *,
911
+ project_id: typing.Optional[str] = None,
912
+ organization_id: typing.Optional[str] = None,
913
+ project_id: typing.Optional[str] = None,
829
914
  ) -> typing.List[File]:
830
915
  """
831
916
  Read File metadata objects.
@@ -834,6 +919,8 @@ class AsyncFilesClient:
834
919
  - project_id: typing.Optional[str].
835
920
 
836
921
  - organization_id: typing.Optional[str].
922
+
923
+ - project_id: typing.Optional[str].
837
924
  ---
838
925
  from llama_cloud.client import AsyncLlamaCloud
839
926
 
@@ -846,7 +933,7 @@ class AsyncFilesClient:
846
933
  "GET",
847
934
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
848
935
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
849
- headers=self._client_wrapper.get_headers(),
936
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
850
937
  timeout=60,
851
938
  )
852
939
  if 200 <= _response.status_code < 300:
@@ -866,6 +953,7 @@ class AsyncFilesClient:
866
953
  project_id: typing.Optional[str] = None,
867
954
  organization_id: typing.Optional[str] = None,
868
955
  upload_file: typing.IO,
956
+ project_id: typing.Optional[str] = None,
869
957
  ) -> File:
870
958
  """
871
959
  Upload a file to S3.
@@ -878,6 +966,8 @@ class AsyncFilesClient:
878
966
  - organization_id: typing.Optional[str].
879
967
 
880
968
  - upload_file: typing.IO.
969
+
970
+ - project_id: typing.Optional[str].
881
971
  """
882
972
  _response = await self._client_wrapper.httpx_client.request(
883
973
  "POST",
@@ -887,7 +977,7 @@ class AsyncFilesClient:
887
977
  ),
888
978
  data=jsonable_encoder({}),
889
979
  files={"upload_file": upload_file},
890
- headers=self._client_wrapper.get_headers(),
980
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
891
981
  timeout=60,
892
982
  )
893
983
  if 200 <= _response.status_code < 300:
@@ -907,6 +997,7 @@ class AsyncFilesClient:
907
997
  project_id: typing.Optional[str] = None,
908
998
  organization_id: typing.Optional[str] = None,
909
999
  request: FileCreate,
1000
+ project_id: typing.Optional[str] = None,
910
1001
  ) -> FileIdPresignedUrl:
911
1002
  """
912
1003
  Create a presigned url for uploading a file.
@@ -922,6 +1013,8 @@ class AsyncFilesClient:
922
1013
  - organization_id: typing.Optional[str].
923
1014
 
924
1015
  - request: FileCreate.
1016
+
1017
+ - project_id: typing.Optional[str].
925
1018
  ---
926
1019
  from llama_cloud import FileCreate
927
1020
  from llama_cloud.client import AsyncLlamaCloud
@@ -942,7 +1035,7 @@ class AsyncFilesClient:
942
1035
  {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
943
1036
  ),
944
1037
  json=jsonable_encoder(request),
945
- headers=self._client_wrapper.get_headers(),
1038
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
946
1039
  timeout=60,
947
1040
  )
948
1041
  if 200 <= _response.status_code < 300:
@@ -956,7 +1049,11 @@ class AsyncFilesClient:
956
1049
  raise ApiError(status_code=_response.status_code, body=_response_json)
957
1050
 
958
1051
  async def sync_files(
959
- self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1052
+ self,
1053
+ *,
1054
+ project_id: typing.Optional[str] = None,
1055
+ organization_id: typing.Optional[str] = None,
1056
+ project_id: typing.Optional[str] = None,
960
1057
  ) -> typing.List[File]:
961
1058
  """
962
1059
  Sync Files API against file contents uploaded via S3 presigned urls.
@@ -965,6 +1062,8 @@ class AsyncFilesClient:
965
1062
  - project_id: typing.Optional[str].
966
1063
 
967
1064
  - organization_id: typing.Optional[str].
1065
+
1066
+ - project_id: typing.Optional[str].
968
1067
  ---
969
1068
  from llama_cloud.client import AsyncLlamaCloud
970
1069
 
@@ -977,7 +1076,7 @@ class AsyncFilesClient:
977
1076
  "PUT",
978
1077
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/sync"),
979
1078
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
980
- headers=self._client_wrapper.get_headers(),
1079
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
981
1080
  timeout=60,
982
1081
  )
983
1082
  if 200 <= _response.status_code < 300:
@@ -1002,6 +1101,7 @@ class AsyncFilesClient:
1002
1101
  verify_ssl: typing.Optional[bool] = OMIT,
1003
1102
  follow_redirects: typing.Optional[bool] = OMIT,
1004
1103
  resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]] = OMIT,
1104
+ project_id: typing.Optional[str] = None,
1005
1105
  ) -> File:
1006
1106
  """
1007
1107
  Upload a file to the project from a URL.
@@ -1031,6 +1131,8 @@ class AsyncFilesClient:
1031
1131
  - follow_redirects: typing.Optional[bool]. Whether to follow redirects when downloading the file
1032
1132
 
1033
1133
  - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]].
1134
+
1135
+ - project_id: typing.Optional[str].
1034
1136
  ---
1035
1137
  from llama_cloud.client import AsyncLlamaCloud
1036
1138
 
@@ -1059,7 +1161,7 @@ class AsyncFilesClient:
1059
1161
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/upload_from_url"),
1060
1162
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1061
1163
  json=jsonable_encoder(_request),
1062
- headers=self._client_wrapper.get_headers(),
1164
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1063
1165
  timeout=60,
1064
1166
  )
1065
1167
  if 200 <= _response.status_code < 300:
@@ -1079,6 +1181,7 @@ class AsyncFilesClient:
1079
1181
  expires_at_seconds: typing.Optional[int] = None,
1080
1182
  project_id: typing.Optional[str] = None,
1081
1183
  organization_id: typing.Optional[str] = None,
1184
+ project_id: typing.Optional[str] = None,
1082
1185
  ) -> PresignedUrl:
1083
1186
  """
1084
1187
  Returns a presigned url to read the file content.
@@ -1091,6 +1194,8 @@ class AsyncFilesClient:
1091
1194
  - project_id: typing.Optional[str].
1092
1195
 
1093
1196
  - organization_id: typing.Optional[str].
1197
+
1198
+ - project_id: typing.Optional[str].
1094
1199
  ---
1095
1200
  from llama_cloud.client import AsyncLlamaCloud
1096
1201
 
@@ -1107,7 +1212,7 @@ class AsyncFilesClient:
1107
1212
  params=remove_none_from_dict(
1108
1213
  {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
1109
1214
  ),
1110
- headers=self._client_wrapper.get_headers(),
1215
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1111
1216
  timeout=60,
1112
1217
  )
1113
1218
  if 200 <= _response.status_code < 300:
@@ -1121,7 +1226,12 @@ class AsyncFilesClient:
1121
1226
  raise ApiError(status_code=_response.status_code, body=_response_json)
1122
1227
 
1123
1228
  async def list_file_page_screenshots(
1124
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1229
+ self,
1230
+ id: str,
1231
+ *,
1232
+ project_id: typing.Optional[str] = None,
1233
+ organization_id: typing.Optional[str] = None,
1234
+ project_id: typing.Optional[str] = None,
1125
1235
  ) -> typing.List[PageScreenshotMetadata]:
1126
1236
  """
1127
1237
  List metadata for all screenshots of pages from a file.
@@ -1132,6 +1242,8 @@ class AsyncFilesClient:
1132
1242
  - project_id: typing.Optional[str].
1133
1243
 
1134
1244
  - organization_id: typing.Optional[str].
1245
+
1246
+ - project_id: typing.Optional[str].
1135
1247
  ---
1136
1248
  from llama_cloud.client import AsyncLlamaCloud
1137
1249
 
@@ -1146,7 +1258,7 @@ class AsyncFilesClient:
1146
1258
  "GET",
1147
1259
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots"),
1148
1260
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1149
- headers=self._client_wrapper.get_headers(),
1261
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1150
1262
  timeout=60,
1151
1263
  )
1152
1264
  if 200 <= _response.status_code < 300:
@@ -1166,6 +1278,7 @@ class AsyncFilesClient:
1166
1278
  *,
1167
1279
  project_id: typing.Optional[str] = None,
1168
1280
  organization_id: typing.Optional[str] = None,
1281
+ project_id: typing.Optional[str] = None,
1169
1282
  ) -> typing.Any:
1170
1283
  """
1171
1284
  Get screenshot of a page from a file.
@@ -1178,6 +1291,8 @@ class AsyncFilesClient:
1178
1291
  - project_id: typing.Optional[str].
1179
1292
 
1180
1293
  - organization_id: typing.Optional[str].
1294
+
1295
+ - project_id: typing.Optional[str].
1181
1296
  ---
1182
1297
  from llama_cloud.client import AsyncLlamaCloud
1183
1298
 
@@ -1195,7 +1310,7 @@ class AsyncFilesClient:
1195
1310
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots/{page_index}"
1196
1311
  ),
1197
1312
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1198
- headers=self._client_wrapper.get_headers(),
1313
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1199
1314
  timeout=60,
1200
1315
  )
1201
1316
  if 200 <= _response.status_code < 300:
@@ -1209,7 +1324,12 @@ class AsyncFilesClient:
1209
1324
  raise ApiError(status_code=_response.status_code, body=_response_json)
1210
1325
 
1211
1326
  async def list_file_pages_figures(
1212
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1327
+ self,
1328
+ id: str,
1329
+ *,
1330
+ project_id: typing.Optional[str] = None,
1331
+ organization_id: typing.Optional[str] = None,
1332
+ project_id: typing.Optional[str] = None,
1213
1333
  ) -> typing.List[PageFigureMetadata]:
1214
1334
  """
1215
1335
  Parameters:
@@ -1218,6 +1338,8 @@ class AsyncFilesClient:
1218
1338
  - project_id: typing.Optional[str].
1219
1339
 
1220
1340
  - organization_id: typing.Optional[str].
1341
+
1342
+ - project_id: typing.Optional[str].
1221
1343
  ---
1222
1344
  from llama_cloud.client import AsyncLlamaCloud
1223
1345
 
@@ -1232,7 +1354,7 @@ class AsyncFilesClient:
1232
1354
  "GET",
1233
1355
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures"),
1234
1356
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1235
- headers=self._client_wrapper.get_headers(),
1357
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1236
1358
  timeout=60,
1237
1359
  )
1238
1360
  if 200 <= _response.status_code < 300:
@@ -1252,6 +1374,7 @@ class AsyncFilesClient:
1252
1374
  *,
1253
1375
  project_id: typing.Optional[str] = None,
1254
1376
  organization_id: typing.Optional[str] = None,
1377
+ project_id: typing.Optional[str] = None,
1255
1378
  ) -> typing.List[PageFigureMetadata]:
1256
1379
  """
1257
1380
  Parameters:
@@ -1262,6 +1385,8 @@ class AsyncFilesClient:
1262
1385
  - project_id: typing.Optional[str].
1263
1386
 
1264
1387
  - organization_id: typing.Optional[str].
1388
+
1389
+ - project_id: typing.Optional[str].
1265
1390
  ---
1266
1391
  from llama_cloud.client import AsyncLlamaCloud
1267
1392
 
@@ -1279,7 +1404,7 @@ class AsyncFilesClient:
1279
1404
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures/{page_index}"
1280
1405
  ),
1281
1406
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1282
- headers=self._client_wrapper.get_headers(),
1407
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1283
1408
  timeout=60,
1284
1409
  )
1285
1410
  if 200 <= _response.status_code < 300:
@@ -1300,6 +1425,7 @@ class AsyncFilesClient:
1300
1425
  *,
1301
1426
  project_id: typing.Optional[str] = None,
1302
1427
  organization_id: typing.Optional[str] = None,
1428
+ project_id: typing.Optional[str] = None,
1303
1429
  ) -> typing.Any:
1304
1430
  """
1305
1431
  Parameters:
@@ -1312,6 +1438,8 @@ class AsyncFilesClient:
1312
1438
  - project_id: typing.Optional[str].
1313
1439
 
1314
1440
  - organization_id: typing.Optional[str].
1441
+
1442
+ - project_id: typing.Optional[str].
1315
1443
  ---
1316
1444
  from llama_cloud.client import AsyncLlamaCloud
1317
1445
 
@@ -1330,7 +1458,7 @@ class AsyncFilesClient:
1330
1458
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures/{page_index}/{figure_name}"
1331
1459
  ),
1332
1460
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1333
- headers=self._client_wrapper.get_headers(),
1461
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1334
1462
  timeout=60,
1335
1463
  )
1336
1464
  if 200 <= _response.status_code < 300:
@@ -1350,6 +1478,7 @@ class AsyncFilesClient:
1350
1478
  *,
1351
1479
  project_id: typing.Optional[str] = None,
1352
1480
  organization_id: typing.Optional[str] = None,
1481
+ project_id: typing.Optional[str] = None,
1353
1482
  ) -> PresignedUrl:
1354
1483
  """
1355
1484
  Returns a presigned url to read a page screenshot.
@@ -1365,6 +1494,8 @@ class AsyncFilesClient:
1365
1494
  - project_id: typing.Optional[str].
1366
1495
 
1367
1496
  - organization_id: typing.Optional[str].
1497
+
1498
+ - project_id: typing.Optional[str].
1368
1499
  ---
1369
1500
  from llama_cloud.client import AsyncLlamaCloud
1370
1501
 
@@ -1383,7 +1514,7 @@ class AsyncFilesClient:
1383
1514
  f"api/v1/files/{id}/page_screenshots/{page_index}/presigned_url",
1384
1515
  ),
1385
1516
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1386
- headers=self._client_wrapper.get_headers(),
1517
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1387
1518
  timeout=60,
1388
1519
  )
1389
1520
  if 200 <= _response.status_code < 300:
@@ -1404,6 +1535,7 @@ class AsyncFilesClient:
1404
1535
  *,
1405
1536
  project_id: typing.Optional[str] = None,
1406
1537
  organization_id: typing.Optional[str] = None,
1538
+ project_id: typing.Optional[str] = None,
1407
1539
  ) -> PresignedUrl:
1408
1540
  """
1409
1541
  Returns a presigned url to read a page figure.
@@ -1421,6 +1553,8 @@ class AsyncFilesClient:
1421
1553
  - project_id: typing.Optional[str].
1422
1554
 
1423
1555
  - organization_id: typing.Optional[str].
1556
+
1557
+ - project_id: typing.Optional[str].
1424
1558
  ---
1425
1559
  from llama_cloud.client import AsyncLlamaCloud
1426
1560
 
@@ -1440,7 +1574,7 @@ class AsyncFilesClient:
1440
1574
  f"api/v1/files/{id}/page-figures/{page_index}/{figure_name}/presigned_url",
1441
1575
  ),
1442
1576
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1443
- headers=self._client_wrapper.get_headers(),
1577
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1444
1578
  timeout=60,
1445
1579
  )
1446
1580
  if 200 <= _response.status_code < 300: