llama-cloud 0.1.37__py3-none-any.whl → 0.1.38__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (37) hide show
  1. llama_cloud/__init__.py +8 -2
  2. llama_cloud/client.py +3 -0
  3. llama_cloud/resources/__init__.py +2 -0
  4. llama_cloud/resources/alpha/__init__.py +2 -0
  5. llama_cloud/resources/alpha/client.py +118 -0
  6. llama_cloud/resources/beta/client.py +126 -30
  7. llama_cloud/resources/chat_apps/client.py +32 -8
  8. llama_cloud/resources/classifier/client.py +139 -11
  9. llama_cloud/resources/data_sinks/client.py +32 -8
  10. llama_cloud/resources/data_sources/client.py +32 -8
  11. llama_cloud/resources/data_sources/types/data_source_update_component.py +2 -0
  12. llama_cloud/resources/embedding_model_configs/client.py +48 -12
  13. llama_cloud/resources/files/client.py +176 -42
  14. llama_cloud/resources/jobs/client.py +12 -6
  15. llama_cloud/resources/llama_extract/client.py +138 -32
  16. llama_cloud/resources/organizations/client.py +18 -4
  17. llama_cloud/resources/parsing/client.py +16 -4
  18. llama_cloud/resources/pipelines/client.py +32 -8
  19. llama_cloud/resources/projects/client.py +78 -18
  20. llama_cloud/resources/reports/client.py +126 -30
  21. llama_cloud/resources/retrievers/client.py +48 -12
  22. llama_cloud/types/__init__.py +6 -2
  23. llama_cloud/types/agent_deployment_summary.py +1 -0
  24. llama_cloud/types/classify_job.py +2 -0
  25. llama_cloud/types/cloud_jira_data_source_v_2.py +52 -0
  26. llama_cloud/types/cloud_jira_data_source_v_2_api_version.py +21 -0
  27. llama_cloud/types/configurable_data_source_names.py +4 -0
  28. llama_cloud/types/data_source_component.py +2 -0
  29. llama_cloud/types/data_source_create_component.py +2 -0
  30. llama_cloud/types/data_source_reader_version_metadata_reader_version.py +9 -1
  31. llama_cloud/types/{classify_job_with_status.py → paginated_response_classify_job.py} +5 -18
  32. llama_cloud/types/pipeline_data_source_component.py +2 -0
  33. llama_cloud/types/usage_response_active_alerts_item.py +4 -0
  34. {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.38.dist-info}/METADATA +2 -3
  35. {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.38.dist-info}/RECORD +37 -33
  36. {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.38.dist-info}/WHEEL +1 -1
  37. {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.38.dist-info}/LICENSE +0 -0
@@ -53,6 +53,7 @@ class ReportsClient:
53
53
  existing_retriever_id: typing.Optional[str] = OMIT,
54
54
  files: typing.List[str],
55
55
  template_file: typing.Optional[str] = OMIT,
56
+ project_id: typing.Optional[str] = None,
56
57
  ) -> ReportCreateResponse:
57
58
  """
58
59
  Create a new report.
@@ -73,6 +74,8 @@ class ReportsClient:
73
74
  - files: typing.List[str].
74
75
 
75
76
  - template_file: typing.Optional[str].
77
+
78
+ - project_id: typing.Optional[str].
76
79
  """
77
80
  _request: typing.Dict[str, typing.Any] = {"name": name, "template_text": template_text, "files": files}
78
81
  if template_instructions is not OMIT:
@@ -86,7 +89,7 @@ class ReportsClient:
86
89
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/reports"),
87
90
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
88
91
  json=jsonable_encoder(_request),
89
- headers=self._client_wrapper.get_headers(),
92
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
90
93
  timeout=60,
91
94
  )
92
95
  if 200 <= _response.status_code < 300:
@@ -107,6 +110,7 @@ class ReportsClient:
107
110
  offset: typing.Optional[int] = None,
108
111
  project_id: typing.Optional[str] = None,
109
112
  organization_id: typing.Optional[str] = None,
113
+ project_id: typing.Optional[str] = None,
110
114
  ) -> PaginatedReportResponse:
111
115
  """
112
116
  List all reports for a project.
@@ -121,6 +125,8 @@ class ReportsClient:
121
125
  - project_id: typing.Optional[str].
122
126
 
123
127
  - organization_id: typing.Optional[str].
128
+
129
+ - project_id: typing.Optional[str].
124
130
  ---
125
131
  from llama_cloud import ReportState
126
132
  from llama_cloud.client import LlamaCloud
@@ -144,7 +150,7 @@ class ReportsClient:
144
150
  "organization_id": organization_id,
145
151
  }
146
152
  ),
147
- headers=self._client_wrapper.get_headers(),
153
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
148
154
  timeout=60,
149
155
  )
150
156
  if 200 <= _response.status_code < 300:
@@ -164,6 +170,7 @@ class ReportsClient:
164
170
  version: typing.Optional[int] = None,
165
171
  project_id: typing.Optional[str] = None,
166
172
  organization_id: typing.Optional[str] = None,
173
+ project_id: typing.Optional[str] = None,
167
174
  ) -> ReportResponse:
168
175
  """
169
176
  Get a specific report.
@@ -176,6 +183,8 @@ class ReportsClient:
176
183
  - project_id: typing.Optional[str].
177
184
 
178
185
  - organization_id: typing.Optional[str].
186
+
187
+ - project_id: typing.Optional[str].
179
188
  ---
180
189
  from llama_cloud.client import LlamaCloud
181
190
 
@@ -192,7 +201,7 @@ class ReportsClient:
192
201
  params=remove_none_from_dict(
193
202
  {"version": version, "project_id": project_id, "organization_id": organization_id}
194
203
  ),
195
- headers=self._client_wrapper.get_headers(),
204
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
196
205
  timeout=60,
197
206
  )
198
207
  if 200 <= _response.status_code < 300:
@@ -212,6 +221,7 @@ class ReportsClient:
212
221
  project_id: typing.Optional[str] = None,
213
222
  organization_id: typing.Optional[str] = None,
214
223
  name: str,
224
+ project_id: typing.Optional[str] = None,
215
225
  ) -> ReportMetadata:
216
226
  """
217
227
  Update metadata for a report.
@@ -224,6 +234,8 @@ class ReportsClient:
224
234
  - organization_id: typing.Optional[str].
225
235
 
226
236
  - name: str. The name of the report
237
+
238
+ - project_id: typing.Optional[str].
227
239
  ---
228
240
  from llama_cloud.client import LlamaCloud
229
241
 
@@ -240,7 +252,7 @@ class ReportsClient:
240
252
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}"),
241
253
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
242
254
  json=jsonable_encoder({"name": name}),
243
- headers=self._client_wrapper.get_headers(),
255
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
244
256
  timeout=60,
245
257
  )
246
258
  if 200 <= _response.status_code < 300:
@@ -260,6 +272,7 @@ class ReportsClient:
260
272
  cascade_delete: typing.Optional[bool] = None,
261
273
  project_id: typing.Optional[str] = None,
262
274
  organization_id: typing.Optional[str] = None,
275
+ project_id: typing.Optional[str] = None,
263
276
  ) -> typing.Any:
264
277
  """
265
278
  Delete a report.
@@ -272,6 +285,8 @@ class ReportsClient:
272
285
  - project_id: typing.Optional[str].
273
286
 
274
287
  - organization_id: typing.Optional[str].
288
+
289
+ - project_id: typing.Optional[str].
275
290
  ---
276
291
  from llama_cloud.client import LlamaCloud
277
292
 
@@ -288,7 +303,7 @@ class ReportsClient:
288
303
  params=remove_none_from_dict(
289
304
  {"cascade_delete": cascade_delete, "project_id": project_id, "organization_id": organization_id}
290
305
  ),
291
- headers=self._client_wrapper.get_headers(),
306
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
292
307
  timeout=60,
293
308
  )
294
309
  if 200 <= _response.status_code < 300:
@@ -308,6 +323,7 @@ class ReportsClient:
308
323
  project_id: typing.Optional[str] = None,
309
324
  organization_id: typing.Optional[str] = None,
310
325
  content: Report,
326
+ project_id: typing.Optional[str] = None,
311
327
  ) -> ReportResponse:
312
328
  """
313
329
  Update a report's content.
@@ -320,6 +336,8 @@ class ReportsClient:
320
336
  - organization_id: typing.Optional[str].
321
337
 
322
338
  - content: Report. The content of the report version
339
+
340
+ - project_id: typing.Optional[str].
323
341
  ---
324
342
  from llama_cloud import Report
325
343
  from llama_cloud.client import LlamaCloud
@@ -339,7 +357,7 @@ class ReportsClient:
339
357
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}"),
340
358
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
341
359
  json=jsonable_encoder({"content": content}),
342
- headers=self._client_wrapper.get_headers(),
360
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
343
361
  timeout=60,
344
362
  )
345
363
  if 200 <= _response.status_code < 300:
@@ -353,7 +371,12 @@ class ReportsClient:
353
371
  raise ApiError(status_code=_response.status_code, body=_response_json)
354
372
 
355
373
  def get_report_plan(
356
- self, report_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
374
+ self,
375
+ report_id: str,
376
+ *,
377
+ project_id: typing.Optional[str] = None,
378
+ organization_id: typing.Optional[str] = None,
379
+ project_id: typing.Optional[str] = None,
357
380
  ) -> ReportPlan:
358
381
  """
359
382
  Get the plan for a report.
@@ -364,6 +387,8 @@ class ReportsClient:
364
387
  - project_id: typing.Optional[str].
365
388
 
366
389
  - organization_id: typing.Optional[str].
390
+
391
+ - project_id: typing.Optional[str].
367
392
  ---
368
393
  from llama_cloud.client import LlamaCloud
369
394
 
@@ -378,7 +403,7 @@ class ReportsClient:
378
403
  "GET",
379
404
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/plan"),
380
405
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
381
- headers=self._client_wrapper.get_headers(),
406
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
382
407
  timeout=60,
383
408
  )
384
409
  if 200 <= _response.status_code < 300:
@@ -399,6 +424,7 @@ class ReportsClient:
399
424
  project_id: typing.Optional[str] = None,
400
425
  organization_id: typing.Optional[str] = None,
401
426
  request: typing.Optional[ReportPlan] = None,
427
+ project_id: typing.Optional[str] = None,
402
428
  ) -> ReportResponse:
403
429
  """
404
430
  Update the plan of a report, including approval, rejection, and editing.
@@ -413,6 +439,8 @@ class ReportsClient:
413
439
  - organization_id: typing.Optional[str].
414
440
 
415
441
  - request: typing.Optional[ReportPlan].
442
+
443
+ - project_id: typing.Optional[str].
416
444
  ---
417
445
  from llama_cloud import (
418
446
  ReportPlan,
@@ -436,7 +464,7 @@ class ReportsClient:
436
464
  {"action": action, "project_id": project_id, "organization_id": organization_id}
437
465
  ),
438
466
  json=jsonable_encoder(request),
439
- headers=self._client_wrapper.get_headers(),
467
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
440
468
  timeout=60,
441
469
  )
442
470
  if 200 <= _response.status_code < 300:
@@ -456,6 +484,7 @@ class ReportsClient:
456
484
  after: typing.Optional[dt.datetime] = None,
457
485
  project_id: typing.Optional[str] = None,
458
486
  organization_id: typing.Optional[str] = None,
487
+ project_id: typing.Optional[str] = None,
459
488
  ) -> typing.List[ReportEventItem]:
460
489
  """
461
490
  Get all historical events for a report.
@@ -468,6 +497,8 @@ class ReportsClient:
468
497
  - project_id: typing.Optional[str].
469
498
 
470
499
  - organization_id: typing.Optional[str].
500
+
501
+ - project_id: typing.Optional[str].
471
502
  ---
472
503
  from llama_cloud.client import LlamaCloud
473
504
 
@@ -488,7 +519,7 @@ class ReportsClient:
488
519
  "organization_id": organization_id,
489
520
  }
490
521
  ),
491
- headers=self._client_wrapper.get_headers(),
522
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
492
523
  timeout=60,
493
524
  )
494
525
  if 200 <= _response.status_code < 300:
@@ -502,7 +533,12 @@ class ReportsClient:
502
533
  raise ApiError(status_code=_response.status_code, body=_response_json)
503
534
 
504
535
  def get_report_metadata(
505
- self, report_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
536
+ self,
537
+ report_id: str,
538
+ *,
539
+ project_id: typing.Optional[str] = None,
540
+ organization_id: typing.Optional[str] = None,
541
+ project_id: typing.Optional[str] = None,
506
542
  ) -> ReportMetadata:
507
543
  """
508
544
  Get metadata for a report.
@@ -513,6 +549,8 @@ class ReportsClient:
513
549
  - project_id: typing.Optional[str].
514
550
 
515
551
  - organization_id: typing.Optional[str].
552
+
553
+ - project_id: typing.Optional[str].
516
554
  ---
517
555
  from llama_cloud.client import LlamaCloud
518
556
 
@@ -527,7 +565,7 @@ class ReportsClient:
527
565
  "GET",
528
566
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/metadata"),
529
567
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
530
- headers=self._client_wrapper.get_headers(),
568
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
531
569
  timeout=60,
532
570
  )
533
571
  if 200 <= _response.status_code < 300:
@@ -548,6 +586,7 @@ class ReportsClient:
548
586
  organization_id: typing.Optional[str] = None,
549
587
  user_query: str,
550
588
  chat_history: typing.List[LlamaIndexCoreBaseLlmsTypesChatMessage],
589
+ project_id: typing.Optional[str] = None,
551
590
  ) -> typing.List[EditSuggestion]:
552
591
  """
553
592
  Suggest edits to a report based on user query and chat history.
@@ -562,6 +601,8 @@ class ReportsClient:
562
601
  - user_query: str.
563
602
 
564
603
  - chat_history: typing.List[LlamaIndexCoreBaseLlmsTypesChatMessage].
604
+
605
+ - project_id: typing.Optional[str].
565
606
  ---
566
607
  from llama_cloud.client import LlamaCloud
567
608
 
@@ -581,7 +622,7 @@ class ReportsClient:
581
622
  ),
582
623
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
583
624
  json=jsonable_encoder({"user_query": user_query, "chat_history": chat_history}),
584
- headers=self._client_wrapper.get_headers(),
625
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
585
626
  timeout=60,
586
627
  )
587
628
  if 200 <= _response.status_code < 300:
@@ -595,7 +636,12 @@ class ReportsClient:
595
636
  raise ApiError(status_code=_response.status_code, body=_response_json)
596
637
 
597
638
  def restart_report(
598
- self, report_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
639
+ self,
640
+ report_id: str,
641
+ *,
642
+ project_id: typing.Optional[str] = None,
643
+ organization_id: typing.Optional[str] = None,
644
+ project_id: typing.Optional[str] = None,
599
645
  ) -> typing.Any:
600
646
  """
601
647
  Restart a report from scratch.
@@ -606,6 +652,8 @@ class ReportsClient:
606
652
  - project_id: typing.Optional[str].
607
653
 
608
654
  - organization_id: typing.Optional[str].
655
+
656
+ - project_id: typing.Optional[str].
609
657
  ---
610
658
  from llama_cloud.client import LlamaCloud
611
659
 
@@ -620,7 +668,7 @@ class ReportsClient:
620
668
  "POST",
621
669
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/restart"),
622
670
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
623
- headers=self._client_wrapper.get_headers(),
671
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
624
672
  timeout=60,
625
673
  )
626
674
  if 200 <= _response.status_code < 300:
@@ -649,6 +697,7 @@ class AsyncReportsClient:
649
697
  existing_retriever_id: typing.Optional[str] = OMIT,
650
698
  files: typing.List[str],
651
699
  template_file: typing.Optional[str] = OMIT,
700
+ project_id: typing.Optional[str] = None,
652
701
  ) -> ReportCreateResponse:
653
702
  """
654
703
  Create a new report.
@@ -669,6 +718,8 @@ class AsyncReportsClient:
669
718
  - files: typing.List[str].
670
719
 
671
720
  - template_file: typing.Optional[str].
721
+
722
+ - project_id: typing.Optional[str].
672
723
  """
673
724
  _request: typing.Dict[str, typing.Any] = {"name": name, "template_text": template_text, "files": files}
674
725
  if template_instructions is not OMIT:
@@ -682,7 +733,7 @@ class AsyncReportsClient:
682
733
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/reports"),
683
734
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
684
735
  json=jsonable_encoder(_request),
685
- headers=self._client_wrapper.get_headers(),
736
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
686
737
  timeout=60,
687
738
  )
688
739
  if 200 <= _response.status_code < 300:
@@ -703,6 +754,7 @@ class AsyncReportsClient:
703
754
  offset: typing.Optional[int] = None,
704
755
  project_id: typing.Optional[str] = None,
705
756
  organization_id: typing.Optional[str] = None,
757
+ project_id: typing.Optional[str] = None,
706
758
  ) -> PaginatedReportResponse:
707
759
  """
708
760
  List all reports for a project.
@@ -717,6 +769,8 @@ class AsyncReportsClient:
717
769
  - project_id: typing.Optional[str].
718
770
 
719
771
  - organization_id: typing.Optional[str].
772
+
773
+ - project_id: typing.Optional[str].
720
774
  ---
721
775
  from llama_cloud import ReportState
722
776
  from llama_cloud.client import AsyncLlamaCloud
@@ -740,7 +794,7 @@ class AsyncReportsClient:
740
794
  "organization_id": organization_id,
741
795
  }
742
796
  ),
743
- headers=self._client_wrapper.get_headers(),
797
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
744
798
  timeout=60,
745
799
  )
746
800
  if 200 <= _response.status_code < 300:
@@ -760,6 +814,7 @@ class AsyncReportsClient:
760
814
  version: typing.Optional[int] = None,
761
815
  project_id: typing.Optional[str] = None,
762
816
  organization_id: typing.Optional[str] = None,
817
+ project_id: typing.Optional[str] = None,
763
818
  ) -> ReportResponse:
764
819
  """
765
820
  Get a specific report.
@@ -772,6 +827,8 @@ class AsyncReportsClient:
772
827
  - project_id: typing.Optional[str].
773
828
 
774
829
  - organization_id: typing.Optional[str].
830
+
831
+ - project_id: typing.Optional[str].
775
832
  ---
776
833
  from llama_cloud.client import AsyncLlamaCloud
777
834
 
@@ -788,7 +845,7 @@ class AsyncReportsClient:
788
845
  params=remove_none_from_dict(
789
846
  {"version": version, "project_id": project_id, "organization_id": organization_id}
790
847
  ),
791
- headers=self._client_wrapper.get_headers(),
848
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
792
849
  timeout=60,
793
850
  )
794
851
  if 200 <= _response.status_code < 300:
@@ -808,6 +865,7 @@ class AsyncReportsClient:
808
865
  project_id: typing.Optional[str] = None,
809
866
  organization_id: typing.Optional[str] = None,
810
867
  name: str,
868
+ project_id: typing.Optional[str] = None,
811
869
  ) -> ReportMetadata:
812
870
  """
813
871
  Update metadata for a report.
@@ -820,6 +878,8 @@ class AsyncReportsClient:
820
878
  - organization_id: typing.Optional[str].
821
879
 
822
880
  - name: str. The name of the report
881
+
882
+ - project_id: typing.Optional[str].
823
883
  ---
824
884
  from llama_cloud.client import AsyncLlamaCloud
825
885
 
@@ -836,7 +896,7 @@ class AsyncReportsClient:
836
896
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}"),
837
897
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
838
898
  json=jsonable_encoder({"name": name}),
839
- headers=self._client_wrapper.get_headers(),
899
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
840
900
  timeout=60,
841
901
  )
842
902
  if 200 <= _response.status_code < 300:
@@ -856,6 +916,7 @@ class AsyncReportsClient:
856
916
  cascade_delete: typing.Optional[bool] = None,
857
917
  project_id: typing.Optional[str] = None,
858
918
  organization_id: typing.Optional[str] = None,
919
+ project_id: typing.Optional[str] = None,
859
920
  ) -> typing.Any:
860
921
  """
861
922
  Delete a report.
@@ -868,6 +929,8 @@ class AsyncReportsClient:
868
929
  - project_id: typing.Optional[str].
869
930
 
870
931
  - organization_id: typing.Optional[str].
932
+
933
+ - project_id: typing.Optional[str].
871
934
  ---
872
935
  from llama_cloud.client import AsyncLlamaCloud
873
936
 
@@ -884,7 +947,7 @@ class AsyncReportsClient:
884
947
  params=remove_none_from_dict(
885
948
  {"cascade_delete": cascade_delete, "project_id": project_id, "organization_id": organization_id}
886
949
  ),
887
- headers=self._client_wrapper.get_headers(),
950
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
888
951
  timeout=60,
889
952
  )
890
953
  if 200 <= _response.status_code < 300:
@@ -904,6 +967,7 @@ class AsyncReportsClient:
904
967
  project_id: typing.Optional[str] = None,
905
968
  organization_id: typing.Optional[str] = None,
906
969
  content: Report,
970
+ project_id: typing.Optional[str] = None,
907
971
  ) -> ReportResponse:
908
972
  """
909
973
  Update a report's content.
@@ -916,6 +980,8 @@ class AsyncReportsClient:
916
980
  - organization_id: typing.Optional[str].
917
981
 
918
982
  - content: Report. The content of the report version
983
+
984
+ - project_id: typing.Optional[str].
919
985
  ---
920
986
  from llama_cloud import Report
921
987
  from llama_cloud.client import AsyncLlamaCloud
@@ -935,7 +1001,7 @@ class AsyncReportsClient:
935
1001
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}"),
936
1002
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
937
1003
  json=jsonable_encoder({"content": content}),
938
- headers=self._client_wrapper.get_headers(),
1004
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
939
1005
  timeout=60,
940
1006
  )
941
1007
  if 200 <= _response.status_code < 300:
@@ -949,7 +1015,12 @@ class AsyncReportsClient:
949
1015
  raise ApiError(status_code=_response.status_code, body=_response_json)
950
1016
 
951
1017
  async def get_report_plan(
952
- self, report_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1018
+ self,
1019
+ report_id: str,
1020
+ *,
1021
+ project_id: typing.Optional[str] = None,
1022
+ organization_id: typing.Optional[str] = None,
1023
+ project_id: typing.Optional[str] = None,
953
1024
  ) -> ReportPlan:
954
1025
  """
955
1026
  Get the plan for a report.
@@ -960,6 +1031,8 @@ class AsyncReportsClient:
960
1031
  - project_id: typing.Optional[str].
961
1032
 
962
1033
  - organization_id: typing.Optional[str].
1034
+
1035
+ - project_id: typing.Optional[str].
963
1036
  ---
964
1037
  from llama_cloud.client import AsyncLlamaCloud
965
1038
 
@@ -974,7 +1047,7 @@ class AsyncReportsClient:
974
1047
  "GET",
975
1048
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/plan"),
976
1049
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
977
- headers=self._client_wrapper.get_headers(),
1050
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
978
1051
  timeout=60,
979
1052
  )
980
1053
  if 200 <= _response.status_code < 300:
@@ -995,6 +1068,7 @@ class AsyncReportsClient:
995
1068
  project_id: typing.Optional[str] = None,
996
1069
  organization_id: typing.Optional[str] = None,
997
1070
  request: typing.Optional[ReportPlan] = None,
1071
+ project_id: typing.Optional[str] = None,
998
1072
  ) -> ReportResponse:
999
1073
  """
1000
1074
  Update the plan of a report, including approval, rejection, and editing.
@@ -1009,6 +1083,8 @@ class AsyncReportsClient:
1009
1083
  - organization_id: typing.Optional[str].
1010
1084
 
1011
1085
  - request: typing.Optional[ReportPlan].
1086
+
1087
+ - project_id: typing.Optional[str].
1012
1088
  ---
1013
1089
  from llama_cloud import (
1014
1090
  ReportPlan,
@@ -1032,7 +1108,7 @@ class AsyncReportsClient:
1032
1108
  {"action": action, "project_id": project_id, "organization_id": organization_id}
1033
1109
  ),
1034
1110
  json=jsonable_encoder(request),
1035
- headers=self._client_wrapper.get_headers(),
1111
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1036
1112
  timeout=60,
1037
1113
  )
1038
1114
  if 200 <= _response.status_code < 300:
@@ -1052,6 +1128,7 @@ class AsyncReportsClient:
1052
1128
  after: typing.Optional[dt.datetime] = None,
1053
1129
  project_id: typing.Optional[str] = None,
1054
1130
  organization_id: typing.Optional[str] = None,
1131
+ project_id: typing.Optional[str] = None,
1055
1132
  ) -> typing.List[ReportEventItem]:
1056
1133
  """
1057
1134
  Get all historical events for a report.
@@ -1064,6 +1141,8 @@ class AsyncReportsClient:
1064
1141
  - project_id: typing.Optional[str].
1065
1142
 
1066
1143
  - organization_id: typing.Optional[str].
1144
+
1145
+ - project_id: typing.Optional[str].
1067
1146
  ---
1068
1147
  from llama_cloud.client import AsyncLlamaCloud
1069
1148
 
@@ -1084,7 +1163,7 @@ class AsyncReportsClient:
1084
1163
  "organization_id": organization_id,
1085
1164
  }
1086
1165
  ),
1087
- headers=self._client_wrapper.get_headers(),
1166
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1088
1167
  timeout=60,
1089
1168
  )
1090
1169
  if 200 <= _response.status_code < 300:
@@ -1098,7 +1177,12 @@ class AsyncReportsClient:
1098
1177
  raise ApiError(status_code=_response.status_code, body=_response_json)
1099
1178
 
1100
1179
  async def get_report_metadata(
1101
- self, report_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1180
+ self,
1181
+ report_id: str,
1182
+ *,
1183
+ project_id: typing.Optional[str] = None,
1184
+ organization_id: typing.Optional[str] = None,
1185
+ project_id: typing.Optional[str] = None,
1102
1186
  ) -> ReportMetadata:
1103
1187
  """
1104
1188
  Get metadata for a report.
@@ -1109,6 +1193,8 @@ class AsyncReportsClient:
1109
1193
  - project_id: typing.Optional[str].
1110
1194
 
1111
1195
  - organization_id: typing.Optional[str].
1196
+
1197
+ - project_id: typing.Optional[str].
1112
1198
  ---
1113
1199
  from llama_cloud.client import AsyncLlamaCloud
1114
1200
 
@@ -1123,7 +1209,7 @@ class AsyncReportsClient:
1123
1209
  "GET",
1124
1210
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/metadata"),
1125
1211
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1126
- headers=self._client_wrapper.get_headers(),
1212
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1127
1213
  timeout=60,
1128
1214
  )
1129
1215
  if 200 <= _response.status_code < 300:
@@ -1144,6 +1230,7 @@ class AsyncReportsClient:
1144
1230
  organization_id: typing.Optional[str] = None,
1145
1231
  user_query: str,
1146
1232
  chat_history: typing.List[LlamaIndexCoreBaseLlmsTypesChatMessage],
1233
+ project_id: typing.Optional[str] = None,
1147
1234
  ) -> typing.List[EditSuggestion]:
1148
1235
  """
1149
1236
  Suggest edits to a report based on user query and chat history.
@@ -1158,6 +1245,8 @@ class AsyncReportsClient:
1158
1245
  - user_query: str.
1159
1246
 
1160
1247
  - chat_history: typing.List[LlamaIndexCoreBaseLlmsTypesChatMessage].
1248
+
1249
+ - project_id: typing.Optional[str].
1161
1250
  ---
1162
1251
  from llama_cloud.client import AsyncLlamaCloud
1163
1252
 
@@ -1177,7 +1266,7 @@ class AsyncReportsClient:
1177
1266
  ),
1178
1267
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1179
1268
  json=jsonable_encoder({"user_query": user_query, "chat_history": chat_history}),
1180
- headers=self._client_wrapper.get_headers(),
1269
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1181
1270
  timeout=60,
1182
1271
  )
1183
1272
  if 200 <= _response.status_code < 300:
@@ -1191,7 +1280,12 @@ class AsyncReportsClient:
1191
1280
  raise ApiError(status_code=_response.status_code, body=_response_json)
1192
1281
 
1193
1282
  async def restart_report(
1194
- self, report_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1283
+ self,
1284
+ report_id: str,
1285
+ *,
1286
+ project_id: typing.Optional[str] = None,
1287
+ organization_id: typing.Optional[str] = None,
1288
+ project_id: typing.Optional[str] = None,
1195
1289
  ) -> typing.Any:
1196
1290
  """
1197
1291
  Restart a report from scratch.
@@ -1202,6 +1296,8 @@ class AsyncReportsClient:
1202
1296
  - project_id: typing.Optional[str].
1203
1297
 
1204
1298
  - organization_id: typing.Optional[str].
1299
+
1300
+ - project_id: typing.Optional[str].
1205
1301
  ---
1206
1302
  from llama_cloud.client import AsyncLlamaCloud
1207
1303
 
@@ -1216,7 +1312,7 @@ class AsyncReportsClient:
1216
1312
  "POST",
1217
1313
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/reports/{report_id}/restart"),
1218
1314
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1219
- headers=self._client_wrapper.get_headers(),
1315
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1220
1316
  timeout=60,
1221
1317
  )
1222
1318
  if 200 <= _response.status_code < 300: