llama-cloud 0.1.15__py3-none-any.whl → 0.1.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

@@ -9,14 +9,9 @@ from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
9
9
  from ...core.jsonable_encoder import jsonable_encoder
10
10
  from ...core.remove_none_from_dict import remove_none_from_dict
11
11
  from ...errors.unprocessable_entity_error import UnprocessableEntityError
12
- from ...types.eval_dataset import EvalDataset
13
12
  from ...types.http_validation_error import HttpValidationError
14
- from ...types.local_eval import LocalEval
15
- from ...types.local_eval_results import LocalEvalResults
16
- from ...types.local_eval_sets import LocalEvalSets
17
13
  from ...types.project import Project
18
14
  from ...types.project_create import ProjectCreate
19
- from ...types.prompt_mixin_prompts import PromptMixinPrompts
20
15
  from ...types.usage_and_plan import UsageAndPlan
21
16
 
22
17
  try:
@@ -298,730 +293,38 @@ class ProjectsClient:
298
293
  raise ApiError(status_code=_response.status_code, body=_response.text)
299
294
  raise ApiError(status_code=_response.status_code, body=_response_json)
300
295
 
301
- def list_datasets_for_project(
302
- self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
303
- ) -> typing.List[EvalDataset]:
304
- """
305
- List eval datasets for a project.
306
-
307
- Parameters:
308
- - project_id: typing.Optional[str].
309
-
310
- - organization_id: typing.Optional[str].
311
- ---
312
- from llama_cloud.client import LlamaCloud
313
-
314
- client = LlamaCloud(
315
- token="YOUR_TOKEN",
316
- )
317
- client.projects.list_datasets_for_project()
318
- """
319
- _response = self._client_wrapper.httpx_client.request(
320
- "GET",
321
- urllib.parse.urljoin(
322
- f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/eval/dataset"
323
- ),
324
- params=remove_none_from_dict({"organization_id": organization_id}),
325
- headers=self._client_wrapper.get_headers(),
326
- timeout=60,
327
- )
328
- if 200 <= _response.status_code < 300:
329
- return pydantic.parse_obj_as(typing.List[EvalDataset], _response.json()) # type: ignore
330
- if _response.status_code == 422:
331
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
332
- try:
333
- _response_json = _response.json()
334
- except JSONDecodeError:
335
- raise ApiError(status_code=_response.status_code, body=_response.text)
336
- raise ApiError(status_code=_response.status_code, body=_response_json)
337
-
338
- def create_eval_dataset_for_project(
339
- self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None, name: str
340
- ) -> EvalDataset:
341
- """
342
- Create a new eval dataset for a project.
343
-
344
- Parameters:
345
- - project_id: typing.Optional[str].
346
-
347
- - organization_id: typing.Optional[str].
348
-
349
- - name: str. The name of the EvalDataset.
350
- ---
351
- from llama_cloud.client import LlamaCloud
352
-
353
- client = LlamaCloud(
354
- token="YOUR_TOKEN",
355
- )
356
- client.projects.create_eval_dataset_for_project(
357
- name="string",
358
- )
359
- """
360
- _response = self._client_wrapper.httpx_client.request(
361
- "POST",
362
- urllib.parse.urljoin(
363
- f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/eval/dataset"
364
- ),
365
- params=remove_none_from_dict({"organization_id": organization_id}),
366
- json=jsonable_encoder({"name": name}),
367
- headers=self._client_wrapper.get_headers(),
368
- timeout=60,
369
- )
370
- if 200 <= _response.status_code < 300:
371
- return pydantic.parse_obj_as(EvalDataset, _response.json()) # type: ignore
372
- if _response.status_code == 422:
373
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
374
- try:
375
- _response_json = _response.json()
376
- except JSONDecodeError:
377
- raise ApiError(status_code=_response.status_code, body=_response.text)
378
- raise ApiError(status_code=_response.status_code, body=_response_json)
379
-
380
- def create_local_eval_set_for_project(
381
- self,
382
- project_id: typing.Optional[str],
383
- *,
384
- organization_id: typing.Optional[str] = None,
385
- app_name: str,
386
- results: typing.Dict[str, typing.List[LocalEval]],
387
- ) -> typing.List[LocalEvalResults]:
388
- """
389
- Create a new local eval set.
390
-
391
- Parameters:
392
- - project_id: typing.Optional[str].
393
-
394
- - organization_id: typing.Optional[str].
395
-
396
- - app_name: str. The name of the app.
397
-
398
- - results: typing.Dict[str, typing.List[LocalEval]]. The eval results.
399
- ---
400
- from llama_cloud.client import LlamaCloud
401
-
402
- client = LlamaCloud(
403
- token="YOUR_TOKEN",
404
- )
405
- client.projects.create_local_eval_set_for_project(
406
- app_name="string",
407
- results={"string": []},
408
- )
409
- """
410
- _response = self._client_wrapper.httpx_client.request(
411
- "POST",
412
- urllib.parse.urljoin(
413
- f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/localevalset"
414
- ),
415
- params=remove_none_from_dict({"organization_id": organization_id}),
416
- json=jsonable_encoder({"app_name": app_name, "results": results}),
417
- headers=self._client_wrapper.get_headers(),
418
- timeout=60,
419
- )
420
- if 200 <= _response.status_code < 300:
421
- return pydantic.parse_obj_as(typing.List[LocalEvalResults], _response.json()) # type: ignore
422
- if _response.status_code == 422:
423
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
424
- try:
425
- _response_json = _response.json()
426
- except JSONDecodeError:
427
- raise ApiError(status_code=_response.status_code, body=_response.text)
428
- raise ApiError(status_code=_response.status_code, body=_response_json)
429
-
430
- def list_local_evals_for_project(
431
- self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
432
- ) -> typing.List[LocalEvalResults]:
433
- """
434
- List local eval results for a project.
435
-
436
- Parameters:
437
- - project_id: typing.Optional[str].
438
-
439
- - organization_id: typing.Optional[str].
440
- ---
441
- from llama_cloud.client import LlamaCloud
442
-
443
- client = LlamaCloud(
444
- token="YOUR_TOKEN",
445
- )
446
- client.projects.list_local_evals_for_project()
447
- """
448
- _response = self._client_wrapper.httpx_client.request(
449
- "GET",
450
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/localeval"),
451
- params=remove_none_from_dict({"organization_id": organization_id}),
452
- headers=self._client_wrapper.get_headers(),
453
- timeout=60,
454
- )
455
- if 200 <= _response.status_code < 300:
456
- return pydantic.parse_obj_as(typing.List[LocalEvalResults], _response.json()) # type: ignore
457
- if _response.status_code == 422:
458
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
459
- try:
460
- _response_json = _response.json()
461
- except JSONDecodeError:
462
- raise ApiError(status_code=_response.status_code, body=_response.text)
463
- raise ApiError(status_code=_response.status_code, body=_response_json)
464
-
465
- def list_local_eval_sets_for_project(
466
- self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
467
- ) -> typing.List[LocalEvalSets]:
468
- """
469
- List local eval sets for a project.
470
-
471
- Parameters:
472
- - project_id: typing.Optional[str].
473
-
474
- - organization_id: typing.Optional[str].
475
- ---
476
- from llama_cloud.client import LlamaCloud
477
-
478
- client = LlamaCloud(
479
- token="YOUR_TOKEN",
480
- )
481
- client.projects.list_local_eval_sets_for_project()
482
- """
483
- _response = self._client_wrapper.httpx_client.request(
484
- "GET",
485
- urllib.parse.urljoin(
486
- f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/localevalsets"
487
- ),
488
- params=remove_none_from_dict({"organization_id": organization_id}),
489
- headers=self._client_wrapper.get_headers(),
490
- timeout=60,
491
- )
492
- if 200 <= _response.status_code < 300:
493
- return pydantic.parse_obj_as(typing.List[LocalEvalSets], _response.json()) # type: ignore
494
- if _response.status_code == 422:
495
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
496
- try:
497
- _response_json = _response.json()
498
- except JSONDecodeError:
499
- raise ApiError(status_code=_response.status_code, body=_response.text)
500
- raise ApiError(status_code=_response.status_code, body=_response_json)
501
-
502
- def delete_local_eval_set(
503
- self, project_id: typing.Optional[str], local_eval_set_id: str, *, organization_id: typing.Optional[str] = None
504
- ) -> typing.Any:
505
- """
506
- Delete a local eval set.
507
-
508
- Parameters:
509
- - project_id: typing.Optional[str].
510
-
511
- - local_eval_set_id: str.
512
-
513
- - organization_id: typing.Optional[str].
514
- ---
515
- from llama_cloud.client import LlamaCloud
516
-
517
- client = LlamaCloud(
518
- token="YOUR_TOKEN",
519
- )
520
- client.projects.delete_local_eval_set(
521
- local_eval_set_id="string",
522
- )
523
- """
524
- _response = self._client_wrapper.httpx_client.request(
525
- "DELETE",
526
- urllib.parse.urljoin(
527
- f"{self._client_wrapper.get_base_url()}/",
528
- f"api/v1/projects/{project_id}/localevalset/{local_eval_set_id}",
529
- ),
530
- params=remove_none_from_dict({"organization_id": organization_id}),
531
- headers=self._client_wrapper.get_headers(),
532
- timeout=60,
533
- )
534
- if 200 <= _response.status_code < 300:
535
- return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
536
- if _response.status_code == 422:
537
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
538
- try:
539
- _response_json = _response.json()
540
- except JSONDecodeError:
541
- raise ApiError(status_code=_response.status_code, body=_response.text)
542
- raise ApiError(status_code=_response.status_code, body=_response_json)
543
-
544
- def list_promptmixin_prompts(
545
- self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
546
- ) -> typing.List[PromptMixinPrompts]:
547
- """
548
- List PromptMixin prompt sets for a project.
549
-
550
- Parameters:
551
- - project_id: typing.Optional[str].
552
-
553
- - organization_id: typing.Optional[str].
554
- ---
555
- from llama_cloud.client import LlamaCloud
556
-
557
- client = LlamaCloud(
558
- token="YOUR_TOKEN",
559
- )
560
- client.projects.list_promptmixin_prompts()
561
- """
562
- _response = self._client_wrapper.httpx_client.request(
563
- "GET",
564
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts"),
565
- params=remove_none_from_dict({"organization_id": organization_id}),
566
- headers=self._client_wrapper.get_headers(),
567
- timeout=60,
568
- )
569
- if 200 <= _response.status_code < 300:
570
- return pydantic.parse_obj_as(typing.List[PromptMixinPrompts], _response.json()) # type: ignore
571
- if _response.status_code == 422:
572
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
573
- try:
574
- _response_json = _response.json()
575
- except JSONDecodeError:
576
- raise ApiError(status_code=_response.status_code, body=_response.text)
577
- raise ApiError(status_code=_response.status_code, body=_response_json)
578
-
579
- def create_prompt_mixin_prompts(
580
- self,
581
- project_id: typing.Optional[str],
582
- *,
583
- organization_id: typing.Optional[str] = None,
584
- request: PromptMixinPrompts,
585
- ) -> PromptMixinPrompts:
586
- """
587
- Create a new PromptMixin prompt set.
588
-
589
- Parameters:
590
- - project_id: typing.Optional[str].
591
-
592
- - organization_id: typing.Optional[str].
593
-
594
- - request: PromptMixinPrompts.
595
- ---
596
- from llama_cloud import PromptMixinPrompts
597
- from llama_cloud.client import LlamaCloud
598
-
599
- client = LlamaCloud(
600
- token="YOUR_TOKEN",
601
- )
602
- client.projects.create_prompt_mixin_prompts(
603
- request=PromptMixinPrompts(
604
- project_id="string",
605
- name="string",
606
- prompts=[],
607
- ),
608
- )
609
- """
610
- _response = self._client_wrapper.httpx_client.request(
611
- "POST",
612
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts"),
613
- params=remove_none_from_dict({"organization_id": organization_id}),
614
- json=jsonable_encoder(request),
615
- headers=self._client_wrapper.get_headers(),
616
- timeout=60,
617
- )
618
- if 200 <= _response.status_code < 300:
619
- return pydantic.parse_obj_as(PromptMixinPrompts, _response.json()) # type: ignore
620
- if _response.status_code == 422:
621
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
622
- try:
623
- _response_json = _response.json()
624
- except JSONDecodeError:
625
- raise ApiError(status_code=_response.status_code, body=_response.text)
626
- raise ApiError(status_code=_response.status_code, body=_response_json)
627
-
628
- def update_promptmixin_prompts(
629
- self,
630
- project_id: typing.Optional[str],
631
- prompt_set_id: str,
632
- *,
633
- organization_id: typing.Optional[str] = None,
634
- request: PromptMixinPrompts,
635
- ) -> PromptMixinPrompts:
636
- """
637
- Update a PromptMixin prompt set.
638
-
639
- Parameters:
640
- - project_id: typing.Optional[str].
641
-
642
- - prompt_set_id: str.
643
-
644
- - organization_id: typing.Optional[str].
645
-
646
- - request: PromptMixinPrompts.
647
- ---
648
- from llama_cloud import PromptMixinPrompts
649
- from llama_cloud.client import LlamaCloud
650
-
651
- client = LlamaCloud(
652
- token="YOUR_TOKEN",
653
- )
654
- client.projects.update_promptmixin_prompts(
655
- prompt_set_id="string",
656
- request=PromptMixinPrompts(
657
- project_id="string",
658
- name="string",
659
- prompts=[],
660
- ),
661
- )
662
- """
663
- _response = self._client_wrapper.httpx_client.request(
664
- "PUT",
665
- urllib.parse.urljoin(
666
- f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts/{prompt_set_id}"
667
- ),
668
- params=remove_none_from_dict({"organization_id": organization_id}),
669
- json=jsonable_encoder(request),
670
- headers=self._client_wrapper.get_headers(),
671
- timeout=60,
672
- )
673
- if 200 <= _response.status_code < 300:
674
- return pydantic.parse_obj_as(PromptMixinPrompts, _response.json()) # type: ignore
675
- if _response.status_code == 422:
676
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
677
- try:
678
- _response_json = _response.json()
679
- except JSONDecodeError:
680
- raise ApiError(status_code=_response.status_code, body=_response.text)
681
- raise ApiError(status_code=_response.status_code, body=_response_json)
682
-
683
- def delete_prompt_mixin_prompts(
684
- self, project_id: typing.Optional[str], prompt_set_id: str, *, organization_id: typing.Optional[str] = None
685
- ) -> typing.Any:
686
- """
687
- Delete a PromptMixin prompt set.
688
-
689
- Parameters:
690
- - project_id: typing.Optional[str].
691
-
692
- - prompt_set_id: str.
693
-
694
- - organization_id: typing.Optional[str].
695
- ---
696
- from llama_cloud.client import LlamaCloud
697
-
698
- client = LlamaCloud(
699
- token="YOUR_TOKEN",
700
- )
701
- client.projects.delete_prompt_mixin_prompts(
702
- prompt_set_id="string",
703
- )
704
- """
705
- _response = self._client_wrapper.httpx_client.request(
706
- "DELETE",
707
- urllib.parse.urljoin(
708
- f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts/{prompt_set_id}"
709
- ),
710
- params=remove_none_from_dict({"organization_id": organization_id}),
711
- headers=self._client_wrapper.get_headers(),
712
- timeout=60,
713
- )
714
- if 200 <= _response.status_code < 300:
715
- return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
716
- if _response.status_code == 422:
717
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
718
- try:
719
- _response_json = _response.json()
720
- except JSONDecodeError:
721
- raise ApiError(status_code=_response.status_code, body=_response.text)
722
- raise ApiError(status_code=_response.status_code, body=_response_json)
723
-
724
-
725
- class AsyncProjectsClient:
726
- def __init__(self, *, client_wrapper: AsyncClientWrapper):
727
- self._client_wrapper = client_wrapper
728
-
729
- async def list_projects(
730
- self, *, organization_id: typing.Optional[str] = None, project_name: typing.Optional[str] = None
731
- ) -> typing.List[Project]:
732
- """
733
- List projects or get one by name
734
-
735
- Parameters:
736
- - organization_id: typing.Optional[str].
737
-
738
- - project_name: typing.Optional[str].
739
- ---
740
- from llama_cloud.client import AsyncLlamaCloud
741
-
742
- client = AsyncLlamaCloud(
743
- token="YOUR_TOKEN",
744
- )
745
- await client.projects.list_projects()
746
- """
747
- _response = await self._client_wrapper.httpx_client.request(
748
- "GET",
749
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/projects"),
750
- params=remove_none_from_dict({"organization_id": organization_id, "project_name": project_name}),
751
- headers=self._client_wrapper.get_headers(),
752
- timeout=60,
753
- )
754
- if 200 <= _response.status_code < 300:
755
- return pydantic.parse_obj_as(typing.List[Project], _response.json()) # type: ignore
756
- if _response.status_code == 422:
757
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
758
- try:
759
- _response_json = _response.json()
760
- except JSONDecodeError:
761
- raise ApiError(status_code=_response.status_code, body=_response.text)
762
- raise ApiError(status_code=_response.status_code, body=_response_json)
763
-
764
- async def create_project(self, *, organization_id: typing.Optional[str] = None, request: ProjectCreate) -> Project:
765
- """
766
- Create a new project.
767
-
768
- Parameters:
769
- - organization_id: typing.Optional[str].
770
-
771
- - request: ProjectCreate.
772
- ---
773
- from llama_cloud import ProjectCreate
774
- from llama_cloud.client import AsyncLlamaCloud
775
-
776
- client = AsyncLlamaCloud(
777
- token="YOUR_TOKEN",
778
- )
779
- await client.projects.create_project(
780
- request=ProjectCreate(
781
- name="string",
782
- ),
783
- )
784
- """
785
- _response = await self._client_wrapper.httpx_client.request(
786
- "POST",
787
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/projects"),
788
- params=remove_none_from_dict({"organization_id": organization_id}),
789
- json=jsonable_encoder(request),
790
- headers=self._client_wrapper.get_headers(),
791
- timeout=60,
792
- )
793
- if 200 <= _response.status_code < 300:
794
- return pydantic.parse_obj_as(Project, _response.json()) # type: ignore
795
- if _response.status_code == 422:
796
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
797
- try:
798
- _response_json = _response.json()
799
- except JSONDecodeError:
800
- raise ApiError(status_code=_response.status_code, body=_response.text)
801
- raise ApiError(status_code=_response.status_code, body=_response_json)
802
-
803
- async def upsert_project(self, *, organization_id: typing.Optional[str] = None, request: ProjectCreate) -> Project:
804
- """
805
- Upsert a project.
806
- Updates if a project with the same name already exists. Otherwise, creates a new project.
807
-
808
- Parameters:
809
- - organization_id: typing.Optional[str].
810
-
811
- - request: ProjectCreate.
812
- ---
813
- from llama_cloud import ProjectCreate
814
- from llama_cloud.client import AsyncLlamaCloud
815
-
816
- client = AsyncLlamaCloud(
817
- token="YOUR_TOKEN",
818
- )
819
- await client.projects.upsert_project(
820
- request=ProjectCreate(
821
- name="string",
822
- ),
823
- )
824
- """
825
- _response = await self._client_wrapper.httpx_client.request(
826
- "PUT",
827
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/projects"),
828
- params=remove_none_from_dict({"organization_id": organization_id}),
829
- json=jsonable_encoder(request),
830
- headers=self._client_wrapper.get_headers(),
831
- timeout=60,
832
- )
833
- if 200 <= _response.status_code < 300:
834
- return pydantic.parse_obj_as(Project, _response.json()) # type: ignore
835
- if _response.status_code == 422:
836
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
837
- try:
838
- _response_json = _response.json()
839
- except JSONDecodeError:
840
- raise ApiError(status_code=_response.status_code, body=_response.text)
841
- raise ApiError(status_code=_response.status_code, body=_response_json)
842
-
843
- async def get_project(
844
- self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
845
- ) -> Project:
846
- """
847
- Get a project by ID.
848
-
849
- Parameters:
850
- - project_id: typing.Optional[str].
851
-
852
- - organization_id: typing.Optional[str].
853
- ---
854
- from llama_cloud.client import AsyncLlamaCloud
855
-
856
- client = AsyncLlamaCloud(
857
- token="YOUR_TOKEN",
858
- )
859
- await client.projects.get_project()
860
- """
861
- _response = await self._client_wrapper.httpx_client.request(
862
- "GET",
863
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
864
- params=remove_none_from_dict({"organization_id": organization_id}),
865
- headers=self._client_wrapper.get_headers(),
866
- timeout=60,
867
- )
868
- if 200 <= _response.status_code < 300:
869
- return pydantic.parse_obj_as(Project, _response.json()) # type: ignore
870
- if _response.status_code == 422:
871
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
872
- try:
873
- _response_json = _response.json()
874
- except JSONDecodeError:
875
- raise ApiError(status_code=_response.status_code, body=_response.text)
876
- raise ApiError(status_code=_response.status_code, body=_response_json)
877
-
878
- async def update_existing_project(
879
- self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None, name: str
880
- ) -> Project:
881
- """
882
- Update an existing project.
883
-
884
- Parameters:
885
- - project_id: typing.Optional[str].
886
-
887
- - organization_id: typing.Optional[str].
888
-
889
- - name: str.
890
- ---
891
- from llama_cloud.client import AsyncLlamaCloud
892
-
893
- client = AsyncLlamaCloud(
894
- token="YOUR_TOKEN",
895
- )
896
- await client.projects.update_existing_project(
897
- name="string",
898
- )
899
- """
900
- _response = await self._client_wrapper.httpx_client.request(
901
- "PUT",
902
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
903
- params=remove_none_from_dict({"organization_id": organization_id}),
904
- json=jsonable_encoder({"name": name}),
905
- headers=self._client_wrapper.get_headers(),
906
- timeout=60,
907
- )
908
- if 200 <= _response.status_code < 300:
909
- return pydantic.parse_obj_as(Project, _response.json()) # type: ignore
910
- if _response.status_code == 422:
911
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
912
- try:
913
- _response_json = _response.json()
914
- except JSONDecodeError:
915
- raise ApiError(status_code=_response.status_code, body=_response.text)
916
- raise ApiError(status_code=_response.status_code, body=_response_json)
917
-
918
- async def delete_project(
919
- self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
920
- ) -> None:
921
- """
922
- Delete a project by ID.
923
-
924
- Parameters:
925
- - project_id: typing.Optional[str].
926
-
927
- - organization_id: typing.Optional[str].
928
- ---
929
- from llama_cloud.client import AsyncLlamaCloud
930
-
931
- client = AsyncLlamaCloud(
932
- token="YOUR_TOKEN",
933
- )
934
- await client.projects.delete_project()
935
- """
936
- _response = await self._client_wrapper.httpx_client.request(
937
- "DELETE",
938
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
939
- params=remove_none_from_dict({"organization_id": organization_id}),
940
- headers=self._client_wrapper.get_headers(),
941
- timeout=60,
942
- )
943
- if 200 <= _response.status_code < 300:
944
- return
945
- if _response.status_code == 422:
946
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
947
- try:
948
- _response_json = _response.json()
949
- except JSONDecodeError:
950
- raise ApiError(status_code=_response.status_code, body=_response.text)
951
- raise ApiError(status_code=_response.status_code, body=_response_json)
952
-
953
- async def get_project_usage(
954
- self,
955
- project_id: typing.Optional[str],
956
- *,
957
- get_current_invoice_total: typing.Optional[bool] = None,
958
- organization_id: typing.Optional[str] = None,
959
- ) -> UsageAndPlan:
960
- """
961
- Get usage for a project
962
-
963
- Parameters:
964
- - project_id: typing.Optional[str].
965
-
966
- - get_current_invoice_total: typing.Optional[bool].
967
-
968
- - organization_id: typing.Optional[str].
969
- ---
970
- from llama_cloud.client import AsyncLlamaCloud
971
296
 
972
- client = AsyncLlamaCloud(
973
- token="YOUR_TOKEN",
974
- )
975
- await client.projects.get_project_usage()
976
- """
977
- _response = await self._client_wrapper.httpx_client.request(
978
- "GET",
979
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/usage"),
980
- params=remove_none_from_dict(
981
- {"get_current_invoice_total": get_current_invoice_total, "organization_id": organization_id}
982
- ),
983
- headers=self._client_wrapper.get_headers(),
984
- timeout=60,
985
- )
986
- if 200 <= _response.status_code < 300:
987
- return pydantic.parse_obj_as(UsageAndPlan, _response.json()) # type: ignore
988
- if _response.status_code == 422:
989
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
990
- try:
991
- _response_json = _response.json()
992
- except JSONDecodeError:
993
- raise ApiError(status_code=_response.status_code, body=_response.text)
994
- raise ApiError(status_code=_response.status_code, body=_response_json)
297
+ class AsyncProjectsClient:
298
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
299
+ self._client_wrapper = client_wrapper
995
300
 
996
- async def list_datasets_for_project(
997
- self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
998
- ) -> typing.List[EvalDataset]:
301
+ async def list_projects(
302
+ self, *, organization_id: typing.Optional[str] = None, project_name: typing.Optional[str] = None
303
+ ) -> typing.List[Project]:
999
304
  """
1000
- List eval datasets for a project.
305
+ List projects or get one by name
1001
306
 
1002
307
  Parameters:
1003
- - project_id: typing.Optional[str].
1004
-
1005
308
  - organization_id: typing.Optional[str].
309
+
310
+ - project_name: typing.Optional[str].
1006
311
  ---
1007
312
  from llama_cloud.client import AsyncLlamaCloud
1008
313
 
1009
314
  client = AsyncLlamaCloud(
1010
315
  token="YOUR_TOKEN",
1011
316
  )
1012
- await client.projects.list_datasets_for_project()
317
+ await client.projects.list_projects()
1013
318
  """
1014
319
  _response = await self._client_wrapper.httpx_client.request(
1015
320
  "GET",
1016
- urllib.parse.urljoin(
1017
- f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/eval/dataset"
1018
- ),
1019
- params=remove_none_from_dict({"organization_id": organization_id}),
321
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/projects"),
322
+ params=remove_none_from_dict({"organization_id": organization_id, "project_name": project_name}),
1020
323
  headers=self._client_wrapper.get_headers(),
1021
324
  timeout=60,
1022
325
  )
1023
326
  if 200 <= _response.status_code < 300:
1024
- return pydantic.parse_obj_as(typing.List[EvalDataset], _response.json()) # type: ignore
327
+ return pydantic.parse_obj_as(typing.List[Project], _response.json()) # type: ignore
1025
328
  if _response.status_code == 422:
1026
329
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1027
330
  try:
@@ -1030,40 +333,37 @@ class AsyncProjectsClient:
1030
333
  raise ApiError(status_code=_response.status_code, body=_response.text)
1031
334
  raise ApiError(status_code=_response.status_code, body=_response_json)
1032
335
 
1033
- async def create_eval_dataset_for_project(
1034
- self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None, name: str
1035
- ) -> EvalDataset:
336
+ async def create_project(self, *, organization_id: typing.Optional[str] = None, request: ProjectCreate) -> Project:
1036
337
  """
1037
- Create a new eval dataset for a project.
338
+ Create a new project.
1038
339
 
1039
340
  Parameters:
1040
- - project_id: typing.Optional[str].
1041
-
1042
341
  - organization_id: typing.Optional[str].
1043
342
 
1044
- - name: str. The name of the EvalDataset.
343
+ - request: ProjectCreate.
1045
344
  ---
345
+ from llama_cloud import ProjectCreate
1046
346
  from llama_cloud.client import AsyncLlamaCloud
1047
347
 
1048
348
  client = AsyncLlamaCloud(
1049
349
  token="YOUR_TOKEN",
1050
350
  )
1051
- await client.projects.create_eval_dataset_for_project(
1052
- name="string",
351
+ await client.projects.create_project(
352
+ request=ProjectCreate(
353
+ name="string",
354
+ ),
1053
355
  )
1054
356
  """
1055
357
  _response = await self._client_wrapper.httpx_client.request(
1056
358
  "POST",
1057
- urllib.parse.urljoin(
1058
- f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/eval/dataset"
1059
- ),
359
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/projects"),
1060
360
  params=remove_none_from_dict({"organization_id": organization_id}),
1061
- json=jsonable_encoder({"name": name}),
361
+ json=jsonable_encoder(request),
1062
362
  headers=self._client_wrapper.get_headers(),
1063
363
  timeout=60,
1064
364
  )
1065
365
  if 200 <= _response.status_code < 300:
1066
- return pydantic.parse_obj_as(EvalDataset, _response.json()) # type: ignore
366
+ return pydantic.parse_obj_as(Project, _response.json()) # type: ignore
1067
367
  if _response.status_code == 422:
1068
368
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1069
369
  try:
@@ -1072,83 +372,38 @@ class AsyncProjectsClient:
1072
372
  raise ApiError(status_code=_response.status_code, body=_response.text)
1073
373
  raise ApiError(status_code=_response.status_code, body=_response_json)
1074
374
 
1075
- async def create_local_eval_set_for_project(
1076
- self,
1077
- project_id: typing.Optional[str],
1078
- *,
1079
- organization_id: typing.Optional[str] = None,
1080
- app_name: str,
1081
- results: typing.Dict[str, typing.List[LocalEval]],
1082
- ) -> typing.List[LocalEvalResults]:
375
+ async def upsert_project(self, *, organization_id: typing.Optional[str] = None, request: ProjectCreate) -> Project:
1083
376
  """
1084
- Create a new local eval set.
377
+ Upsert a project.
378
+ Updates if a project with the same name already exists. Otherwise, creates a new project.
1085
379
 
1086
380
  Parameters:
1087
- - project_id: typing.Optional[str].
1088
-
1089
381
  - organization_id: typing.Optional[str].
1090
382
 
1091
- - app_name: str. The name of the app.
1092
-
1093
- - results: typing.Dict[str, typing.List[LocalEval]]. The eval results.
383
+ - request: ProjectCreate.
1094
384
  ---
385
+ from llama_cloud import ProjectCreate
1095
386
  from llama_cloud.client import AsyncLlamaCloud
1096
387
 
1097
388
  client = AsyncLlamaCloud(
1098
389
  token="YOUR_TOKEN",
1099
390
  )
1100
- await client.projects.create_local_eval_set_for_project(
1101
- app_name="string",
1102
- results={"string": []},
1103
- )
1104
- """
1105
- _response = await self._client_wrapper.httpx_client.request(
1106
- "POST",
1107
- urllib.parse.urljoin(
1108
- f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/localevalset"
391
+ await client.projects.upsert_project(
392
+ request=ProjectCreate(
393
+ name="string",
1109
394
  ),
1110
- params=remove_none_from_dict({"organization_id": organization_id}),
1111
- json=jsonable_encoder({"app_name": app_name, "results": results}),
1112
- headers=self._client_wrapper.get_headers(),
1113
- timeout=60,
1114
- )
1115
- if 200 <= _response.status_code < 300:
1116
- return pydantic.parse_obj_as(typing.List[LocalEvalResults], _response.json()) # type: ignore
1117
- if _response.status_code == 422:
1118
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1119
- try:
1120
- _response_json = _response.json()
1121
- except JSONDecodeError:
1122
- raise ApiError(status_code=_response.status_code, body=_response.text)
1123
- raise ApiError(status_code=_response.status_code, body=_response_json)
1124
-
1125
- async def list_local_evals_for_project(
1126
- self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
1127
- ) -> typing.List[LocalEvalResults]:
1128
- """
1129
- List local eval results for a project.
1130
-
1131
- Parameters:
1132
- - project_id: typing.Optional[str].
1133
-
1134
- - organization_id: typing.Optional[str].
1135
- ---
1136
- from llama_cloud.client import AsyncLlamaCloud
1137
-
1138
- client = AsyncLlamaCloud(
1139
- token="YOUR_TOKEN",
1140
395
  )
1141
- await client.projects.list_local_evals_for_project()
1142
396
  """
1143
397
  _response = await self._client_wrapper.httpx_client.request(
1144
- "GET",
1145
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/localeval"),
398
+ "PUT",
399
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/projects"),
1146
400
  params=remove_none_from_dict({"organization_id": organization_id}),
401
+ json=jsonable_encoder(request),
1147
402
  headers=self._client_wrapper.get_headers(),
1148
403
  timeout=60,
1149
404
  )
1150
405
  if 200 <= _response.status_code < 300:
1151
- return pydantic.parse_obj_as(typing.List[LocalEvalResults], _response.json()) # type: ignore
406
+ return pydantic.parse_obj_as(Project, _response.json()) # type: ignore
1152
407
  if _response.status_code == 422:
1153
408
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1154
409
  try:
@@ -1157,11 +412,11 @@ class AsyncProjectsClient:
1157
412
  raise ApiError(status_code=_response.status_code, body=_response.text)
1158
413
  raise ApiError(status_code=_response.status_code, body=_response_json)
1159
414
 
1160
- async def list_local_eval_sets_for_project(
415
+ async def get_project(
1161
416
  self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
1162
- ) -> typing.List[LocalEvalSets]:
417
+ ) -> Project:
1163
418
  """
1164
- List local eval sets for a project.
419
+ Get a project by ID.
1165
420
 
1166
421
  Parameters:
1167
422
  - project_id: typing.Optional[str].
@@ -1173,19 +428,17 @@ class AsyncProjectsClient:
1173
428
  client = AsyncLlamaCloud(
1174
429
  token="YOUR_TOKEN",
1175
430
  )
1176
- await client.projects.list_local_eval_sets_for_project()
431
+ await client.projects.get_project()
1177
432
  """
1178
433
  _response = await self._client_wrapper.httpx_client.request(
1179
434
  "GET",
1180
- urllib.parse.urljoin(
1181
- f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/localevalsets"
1182
- ),
435
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
1183
436
  params=remove_none_from_dict({"organization_id": organization_id}),
1184
437
  headers=self._client_wrapper.get_headers(),
1185
438
  timeout=60,
1186
439
  )
1187
440
  if 200 <= _response.status_code < 300:
1188
- return pydantic.parse_obj_as(typing.List[LocalEvalSets], _response.json()) # type: ignore
441
+ return pydantic.parse_obj_as(Project, _response.json()) # type: ignore
1189
442
  if _response.status_code == 422:
1190
443
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1191
444
  try:
@@ -1194,40 +447,38 @@ class AsyncProjectsClient:
1194
447
  raise ApiError(status_code=_response.status_code, body=_response.text)
1195
448
  raise ApiError(status_code=_response.status_code, body=_response_json)
1196
449
 
1197
- async def delete_local_eval_set(
1198
- self, project_id: typing.Optional[str], local_eval_set_id: str, *, organization_id: typing.Optional[str] = None
1199
- ) -> typing.Any:
450
+ async def update_existing_project(
451
+ self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None, name: str
452
+ ) -> Project:
1200
453
  """
1201
- Delete a local eval set.
454
+ Update an existing project.
1202
455
 
1203
456
  Parameters:
1204
457
  - project_id: typing.Optional[str].
1205
458
 
1206
- - local_eval_set_id: str.
1207
-
1208
459
  - organization_id: typing.Optional[str].
460
+
461
+ - name: str.
1209
462
  ---
1210
463
  from llama_cloud.client import AsyncLlamaCloud
1211
464
 
1212
465
  client = AsyncLlamaCloud(
1213
466
  token="YOUR_TOKEN",
1214
467
  )
1215
- await client.projects.delete_local_eval_set(
1216
- local_eval_set_id="string",
468
+ await client.projects.update_existing_project(
469
+ name="string",
1217
470
  )
1218
471
  """
1219
472
  _response = await self._client_wrapper.httpx_client.request(
1220
- "DELETE",
1221
- urllib.parse.urljoin(
1222
- f"{self._client_wrapper.get_base_url()}/",
1223
- f"api/v1/projects/{project_id}/localevalset/{local_eval_set_id}",
1224
- ),
473
+ "PUT",
474
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
1225
475
  params=remove_none_from_dict({"organization_id": organization_id}),
476
+ json=jsonable_encoder({"name": name}),
1226
477
  headers=self._client_wrapper.get_headers(),
1227
478
  timeout=60,
1228
479
  )
1229
480
  if 200 <= _response.status_code < 300:
1230
- return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
481
+ return pydantic.parse_obj_as(Project, _response.json()) # type: ignore
1231
482
  if _response.status_code == 422:
1232
483
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1233
484
  try:
@@ -1236,82 +487,33 @@ class AsyncProjectsClient:
1236
487
  raise ApiError(status_code=_response.status_code, body=_response.text)
1237
488
  raise ApiError(status_code=_response.status_code, body=_response_json)
1238
489
 
1239
- async def list_promptmixin_prompts(
490
+ async def delete_project(
1240
491
  self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
1241
- ) -> typing.List[PromptMixinPrompts]:
1242
- """
1243
- List PromptMixin prompt sets for a project.
1244
-
1245
- Parameters:
1246
- - project_id: typing.Optional[str].
1247
-
1248
- - organization_id: typing.Optional[str].
1249
- ---
1250
- from llama_cloud.client import AsyncLlamaCloud
1251
-
1252
- client = AsyncLlamaCloud(
1253
- token="YOUR_TOKEN",
1254
- )
1255
- await client.projects.list_promptmixin_prompts()
1256
- """
1257
- _response = await self._client_wrapper.httpx_client.request(
1258
- "GET",
1259
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts"),
1260
- params=remove_none_from_dict({"organization_id": organization_id}),
1261
- headers=self._client_wrapper.get_headers(),
1262
- timeout=60,
1263
- )
1264
- if 200 <= _response.status_code < 300:
1265
- return pydantic.parse_obj_as(typing.List[PromptMixinPrompts], _response.json()) # type: ignore
1266
- if _response.status_code == 422:
1267
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1268
- try:
1269
- _response_json = _response.json()
1270
- except JSONDecodeError:
1271
- raise ApiError(status_code=_response.status_code, body=_response.text)
1272
- raise ApiError(status_code=_response.status_code, body=_response_json)
1273
-
1274
- async def create_prompt_mixin_prompts(
1275
- self,
1276
- project_id: typing.Optional[str],
1277
- *,
1278
- organization_id: typing.Optional[str] = None,
1279
- request: PromptMixinPrompts,
1280
- ) -> PromptMixinPrompts:
492
+ ) -> None:
1281
493
  """
1282
- Create a new PromptMixin prompt set.
494
+ Delete a project by ID.
1283
495
 
1284
496
  Parameters:
1285
497
  - project_id: typing.Optional[str].
1286
498
 
1287
499
  - organization_id: typing.Optional[str].
1288
-
1289
- - request: PromptMixinPrompts.
1290
500
  ---
1291
- from llama_cloud import PromptMixinPrompts
1292
501
  from llama_cloud.client import AsyncLlamaCloud
1293
502
 
1294
503
  client = AsyncLlamaCloud(
1295
504
  token="YOUR_TOKEN",
1296
505
  )
1297
- await client.projects.create_prompt_mixin_prompts(
1298
- request=PromptMixinPrompts(
1299
- project_id="string",
1300
- name="string",
1301
- prompts=[],
1302
- ),
1303
- )
506
+ await client.projects.delete_project()
1304
507
  """
1305
508
  _response = await self._client_wrapper.httpx_client.request(
1306
- "POST",
1307
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts"),
509
+ "DELETE",
510
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
1308
511
  params=remove_none_from_dict({"organization_id": organization_id}),
1309
- json=jsonable_encoder(request),
1310
512
  headers=self._client_wrapper.get_headers(),
1311
513
  timeout=60,
1312
514
  )
1313
515
  if 200 <= _response.status_code < 300:
1314
- return pydantic.parse_obj_as(PromptMixinPrompts, _response.json()) # type: ignore
516
+ return
1315
517
  if _response.status_code == 422:
1316
518
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1317
519
  try:
@@ -1320,71 +522,20 @@ class AsyncProjectsClient:
1320
522
  raise ApiError(status_code=_response.status_code, body=_response.text)
1321
523
  raise ApiError(status_code=_response.status_code, body=_response_json)
1322
524
 
1323
- async def update_promptmixin_prompts(
525
+ async def get_project_usage(
1324
526
  self,
1325
527
  project_id: typing.Optional[str],
1326
- prompt_set_id: str,
1327
528
  *,
529
+ get_current_invoice_total: typing.Optional[bool] = None,
1328
530
  organization_id: typing.Optional[str] = None,
1329
- request: PromptMixinPrompts,
1330
- ) -> PromptMixinPrompts:
1331
- """
1332
- Update a PromptMixin prompt set.
1333
-
1334
- Parameters:
1335
- - project_id: typing.Optional[str].
1336
-
1337
- - prompt_set_id: str.
1338
-
1339
- - organization_id: typing.Optional[str].
1340
-
1341
- - request: PromptMixinPrompts.
1342
- ---
1343
- from llama_cloud import PromptMixinPrompts
1344
- from llama_cloud.client import AsyncLlamaCloud
1345
-
1346
- client = AsyncLlamaCloud(
1347
- token="YOUR_TOKEN",
1348
- )
1349
- await client.projects.update_promptmixin_prompts(
1350
- prompt_set_id="string",
1351
- request=PromptMixinPrompts(
1352
- project_id="string",
1353
- name="string",
1354
- prompts=[],
1355
- ),
1356
- )
1357
- """
1358
- _response = await self._client_wrapper.httpx_client.request(
1359
- "PUT",
1360
- urllib.parse.urljoin(
1361
- f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts/{prompt_set_id}"
1362
- ),
1363
- params=remove_none_from_dict({"organization_id": organization_id}),
1364
- json=jsonable_encoder(request),
1365
- headers=self._client_wrapper.get_headers(),
1366
- timeout=60,
1367
- )
1368
- if 200 <= _response.status_code < 300:
1369
- return pydantic.parse_obj_as(PromptMixinPrompts, _response.json()) # type: ignore
1370
- if _response.status_code == 422:
1371
- raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1372
- try:
1373
- _response_json = _response.json()
1374
- except JSONDecodeError:
1375
- raise ApiError(status_code=_response.status_code, body=_response.text)
1376
- raise ApiError(status_code=_response.status_code, body=_response_json)
1377
-
1378
- async def delete_prompt_mixin_prompts(
1379
- self, project_id: typing.Optional[str], prompt_set_id: str, *, organization_id: typing.Optional[str] = None
1380
- ) -> typing.Any:
531
+ ) -> UsageAndPlan:
1381
532
  """
1382
- Delete a PromptMixin prompt set.
533
+ Get usage for a project
1383
534
 
1384
535
  Parameters:
1385
536
  - project_id: typing.Optional[str].
1386
537
 
1387
- - prompt_set_id: str.
538
+ - get_current_invoice_total: typing.Optional[bool].
1388
539
 
1389
540
  - organization_id: typing.Optional[str].
1390
541
  ---
@@ -1393,21 +544,19 @@ class AsyncProjectsClient:
1393
544
  client = AsyncLlamaCloud(
1394
545
  token="YOUR_TOKEN",
1395
546
  )
1396
- await client.projects.delete_prompt_mixin_prompts(
1397
- prompt_set_id="string",
1398
- )
547
+ await client.projects.get_project_usage()
1399
548
  """
1400
549
  _response = await self._client_wrapper.httpx_client.request(
1401
- "DELETE",
1402
- urllib.parse.urljoin(
1403
- f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/prompts/{prompt_set_id}"
550
+ "GET",
551
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}/usage"),
552
+ params=remove_none_from_dict(
553
+ {"get_current_invoice_total": get_current_invoice_total, "organization_id": organization_id}
1404
554
  ),
1405
- params=remove_none_from_dict({"organization_id": organization_id}),
1406
555
  headers=self._client_wrapper.get_headers(),
1407
556
  timeout=60,
1408
557
  )
1409
558
  if 200 <= _response.status_code < 300:
1410
- return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
559
+ return pydantic.parse_obj_as(UsageAndPlan, _response.json()) # type: ignore
1411
560
  if _response.status_code == 422:
1412
561
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1413
562
  try: