llama-cloud 0.1.35__py3-none-any.whl → 0.1.37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (38) hide show
  1. llama_cloud/__init__.py +32 -4
  2. llama_cloud/resources/__init__.py +1 -3
  3. llama_cloud/resources/admin/client.py +108 -0
  4. llama_cloud/resources/beta/client.py +460 -0
  5. llama_cloud/resources/classifier/client.py +231 -181
  6. llama_cloud/resources/data_sinks/types/data_sink_update_component.py +2 -0
  7. llama_cloud/resources/files/__init__.py +2 -2
  8. llama_cloud/resources/files/client.py +15 -73
  9. llama_cloud/resources/files/types/__init__.py +1 -3
  10. llama_cloud/resources/llama_extract/client.py +96 -4
  11. llama_cloud/types/__init__.py +34 -2
  12. llama_cloud/types/classification_result.py +4 -5
  13. llama_cloud/types/classifier_rule.py +43 -0
  14. llama_cloud/types/classify_job.py +45 -0
  15. llama_cloud/types/{classify_response.py → classify_job_results.py} +3 -6
  16. llama_cloud/types/classify_job_with_status.py +47 -0
  17. llama_cloud/types/classify_parsing_configuration.py +38 -0
  18. llama_cloud/types/cloud_astra_db_vector_store.py +51 -0
  19. llama_cloud/types/cloud_confluence_data_source.py +15 -0
  20. llama_cloud/types/configurable_data_sink_names.py +4 -0
  21. llama_cloud/types/data_sink_component.py +2 -0
  22. llama_cloud/types/data_sink_create_component.py +2 -0
  23. llama_cloud/types/failure_handling_config.py +37 -0
  24. llama_cloud/types/file_classification.py +41 -0
  25. llama_cloud/types/file_create.py +41 -0
  26. llama_cloud/types/file_filter.py +40 -0
  27. llama_cloud/types/file_query_response.py +38 -0
  28. llama_cloud/types/file_store_info_response.py +34 -0
  29. llama_cloud/types/file_store_info_response_status.py +25 -0
  30. llama_cloud/types/llama_extract_mode_availability.py +37 -0
  31. llama_cloud/types/llama_extract_mode_availability_status.py +17 -0
  32. llama_cloud/types/supported_llm_model_names.py +12 -0
  33. {llama_cloud-0.1.35.dist-info → llama_cloud-0.1.37.dist-info}/METADATA +1 -1
  34. {llama_cloud-0.1.35.dist-info → llama_cloud-0.1.37.dist-info}/RECORD +38 -24
  35. /llama_cloud/{resources/files/types → types}/file_create_permission_info_value.py +0 -0
  36. /llama_cloud/{resources/files/types → types}/file_create_resource_info_value.py +0 -0
  37. {llama_cloud-0.1.35.dist-info → llama_cloud-0.1.37.dist-info}/LICENSE +0 -0
  38. {llama_cloud-0.1.35.dist-info → llama_cloud-0.1.37.dist-info}/WHEEL +0 -0
@@ -15,6 +15,10 @@ from ...types.agent_data import AgentData
15
15
  from ...types.batch import Batch
16
16
  from ...types.batch_paginated_list import BatchPaginatedList
17
17
  from ...types.batch_public_output import BatchPublicOutput
18
+ from ...types.file import File
19
+ from ...types.file_create import FileCreate
20
+ from ...types.file_filter import FileFilter
21
+ from ...types.file_query_response import FileQueryResponse
18
22
  from ...types.filter_operation import FilterOperation
19
23
  from ...types.http_validation_error import HttpValidationError
20
24
  from ...types.llama_parse_parameters import LlamaParseParameters
@@ -610,6 +614,234 @@ class BetaClient:
610
614
  raise ApiError(status_code=_response.status_code, body=_response.text)
611
615
  raise ApiError(status_code=_response.status_code, body=_response_json)
612
616
 
617
+ def create_file(
618
+ self,
619
+ *,
620
+ project_id: typing.Optional[str] = None,
621
+ organization_id: typing.Optional[str] = None,
622
+ request: FileCreate,
623
+ ) -> File:
624
+ """
625
+ Create a new file in the project.
626
+
627
+ Args:
628
+ file_create: File creation data
629
+ project: Validated project from dependency
630
+ db: Database session
631
+
632
+ Returns:
633
+ The created file
634
+
635
+ Parameters:
636
+ - project_id: typing.Optional[str].
637
+
638
+ - organization_id: typing.Optional[str].
639
+
640
+ - request: FileCreate.
641
+ ---
642
+ from llama_cloud import FileCreate
643
+ from llama_cloud.client import LlamaCloud
644
+
645
+ client = LlamaCloud(
646
+ token="YOUR_TOKEN",
647
+ )
648
+ client.beta.create_file(
649
+ request=FileCreate(
650
+ name="string",
651
+ ),
652
+ )
653
+ """
654
+ _response = self._client_wrapper.httpx_client.request(
655
+ "POST",
656
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
657
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
658
+ json=jsonable_encoder(request),
659
+ headers=self._client_wrapper.get_headers(),
660
+ timeout=60,
661
+ )
662
+ if 200 <= _response.status_code < 300:
663
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
664
+ if _response.status_code == 422:
665
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
666
+ try:
667
+ _response_json = _response.json()
668
+ except JSONDecodeError:
669
+ raise ApiError(status_code=_response.status_code, body=_response.text)
670
+ raise ApiError(status_code=_response.status_code, body=_response_json)
671
+
672
+ def upsert_file(
673
+ self,
674
+ *,
675
+ project_id: typing.Optional[str] = None,
676
+ organization_id: typing.Optional[str] = None,
677
+ request: FileCreate,
678
+ ) -> File:
679
+ """
680
+ Upsert a file (create or update if exists) in the project.
681
+
682
+ Args:
683
+ file_create: File creation/update data
684
+ project: Validated project from dependency
685
+ db: Database session
686
+
687
+ Returns:
688
+ The upserted file
689
+
690
+ Parameters:
691
+ - project_id: typing.Optional[str].
692
+
693
+ - organization_id: typing.Optional[str].
694
+
695
+ - request: FileCreate.
696
+ ---
697
+ from llama_cloud import FileCreate
698
+ from llama_cloud.client import LlamaCloud
699
+
700
+ client = LlamaCloud(
701
+ token="YOUR_TOKEN",
702
+ )
703
+ client.beta.upsert_file(
704
+ request=FileCreate(
705
+ name="string",
706
+ ),
707
+ )
708
+ """
709
+ _response = self._client_wrapper.httpx_client.request(
710
+ "PUT",
711
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
712
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
713
+ json=jsonable_encoder(request),
714
+ headers=self._client_wrapper.get_headers(),
715
+ timeout=60,
716
+ )
717
+ if 200 <= _response.status_code < 300:
718
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
719
+ if _response.status_code == 422:
720
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
721
+ try:
722
+ _response_json = _response.json()
723
+ except JSONDecodeError:
724
+ raise ApiError(status_code=_response.status_code, body=_response.text)
725
+ raise ApiError(status_code=_response.status_code, body=_response_json)
726
+
727
+ def query_files(
728
+ self,
729
+ *,
730
+ project_id: typing.Optional[str] = None,
731
+ organization_id: typing.Optional[str] = None,
732
+ page_size: typing.Optional[int] = OMIT,
733
+ page_token: typing.Optional[str] = OMIT,
734
+ filter: typing.Optional[FileFilter] = OMIT,
735
+ order_by: typing.Optional[str] = OMIT,
736
+ ) -> FileQueryResponse:
737
+ """
738
+ Query files with flexible filtering and pagination.
739
+
740
+ Args:
741
+ request: The query request with filters and pagination
742
+ project: Validated project from dependency
743
+ db: Database session
744
+
745
+ Returns:
746
+ Paginated response with files
747
+
748
+ Parameters:
749
+ - project_id: typing.Optional[str].
750
+
751
+ - organization_id: typing.Optional[str].
752
+
753
+ - page_size: typing.Optional[int].
754
+
755
+ - page_token: typing.Optional[str].
756
+
757
+ - filter: typing.Optional[FileFilter].
758
+
759
+ - order_by: typing.Optional[str].
760
+ ---
761
+ from llama_cloud import FileFilter
762
+ from llama_cloud.client import LlamaCloud
763
+
764
+ client = LlamaCloud(
765
+ token="YOUR_TOKEN",
766
+ )
767
+ client.beta.query_files(
768
+ filter=FileFilter(),
769
+ )
770
+ """
771
+ _request: typing.Dict[str, typing.Any] = {}
772
+ if page_size is not OMIT:
773
+ _request["page_size"] = page_size
774
+ if page_token is not OMIT:
775
+ _request["page_token"] = page_token
776
+ if filter is not OMIT:
777
+ _request["filter"] = filter
778
+ if order_by is not OMIT:
779
+ _request["order_by"] = order_by
780
+ _response = self._client_wrapper.httpx_client.request(
781
+ "POST",
782
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files/query"),
783
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
784
+ json=jsonable_encoder(_request),
785
+ headers=self._client_wrapper.get_headers(),
786
+ timeout=60,
787
+ )
788
+ if 200 <= _response.status_code < 300:
789
+ return pydantic.parse_obj_as(FileQueryResponse, _response.json()) # type: ignore
790
+ if _response.status_code == 422:
791
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
792
+ try:
793
+ _response_json = _response.json()
794
+ except JSONDecodeError:
795
+ raise ApiError(status_code=_response.status_code, body=_response.text)
796
+ raise ApiError(status_code=_response.status_code, body=_response_json)
797
+
798
+ def delete_file(
799
+ self, file_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
800
+ ) -> None:
801
+ """
802
+ Delete a single file from the project.
803
+
804
+ Args:
805
+ file_id: The ID of the file to delete
806
+ project: Validated project from dependency
807
+ db: Database session
808
+
809
+ Returns:
810
+ None (204 No Content on success)
811
+
812
+ Parameters:
813
+ - file_id: str.
814
+
815
+ - project_id: typing.Optional[str].
816
+
817
+ - organization_id: typing.Optional[str].
818
+ ---
819
+ from llama_cloud.client import LlamaCloud
820
+
821
+ client = LlamaCloud(
822
+ token="YOUR_TOKEN",
823
+ )
824
+ client.beta.delete_file(
825
+ file_id="string",
826
+ )
827
+ """
828
+ _response = self._client_wrapper.httpx_client.request(
829
+ "DELETE",
830
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/files/{file_id}"),
831
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
832
+ headers=self._client_wrapper.get_headers(),
833
+ timeout=60,
834
+ )
835
+ if 200 <= _response.status_code < 300:
836
+ return
837
+ if _response.status_code == 422:
838
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
839
+ try:
840
+ _response_json = _response.json()
841
+ except JSONDecodeError:
842
+ raise ApiError(status_code=_response.status_code, body=_response.text)
843
+ raise ApiError(status_code=_response.status_code, body=_response_json)
844
+
613
845
 
614
846
  class AsyncBetaClient:
615
847
  def __init__(self, *, client_wrapper: AsyncClientWrapper):
@@ -1186,3 +1418,231 @@ class AsyncBetaClient:
1186
1418
  except JSONDecodeError:
1187
1419
  raise ApiError(status_code=_response.status_code, body=_response.text)
1188
1420
  raise ApiError(status_code=_response.status_code, body=_response_json)
1421
+
1422
+ async def create_file(
1423
+ self,
1424
+ *,
1425
+ project_id: typing.Optional[str] = None,
1426
+ organization_id: typing.Optional[str] = None,
1427
+ request: FileCreate,
1428
+ ) -> File:
1429
+ """
1430
+ Create a new file in the project.
1431
+
1432
+ Args:
1433
+ file_create: File creation data
1434
+ project: Validated project from dependency
1435
+ db: Database session
1436
+
1437
+ Returns:
1438
+ The created file
1439
+
1440
+ Parameters:
1441
+ - project_id: typing.Optional[str].
1442
+
1443
+ - organization_id: typing.Optional[str].
1444
+
1445
+ - request: FileCreate.
1446
+ ---
1447
+ from llama_cloud import FileCreate
1448
+ from llama_cloud.client import AsyncLlamaCloud
1449
+
1450
+ client = AsyncLlamaCloud(
1451
+ token="YOUR_TOKEN",
1452
+ )
1453
+ await client.beta.create_file(
1454
+ request=FileCreate(
1455
+ name="string",
1456
+ ),
1457
+ )
1458
+ """
1459
+ _response = await self._client_wrapper.httpx_client.request(
1460
+ "POST",
1461
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
1462
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1463
+ json=jsonable_encoder(request),
1464
+ headers=self._client_wrapper.get_headers(),
1465
+ timeout=60,
1466
+ )
1467
+ if 200 <= _response.status_code < 300:
1468
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
1469
+ if _response.status_code == 422:
1470
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1471
+ try:
1472
+ _response_json = _response.json()
1473
+ except JSONDecodeError:
1474
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1475
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1476
+
1477
+ async def upsert_file(
1478
+ self,
1479
+ *,
1480
+ project_id: typing.Optional[str] = None,
1481
+ organization_id: typing.Optional[str] = None,
1482
+ request: FileCreate,
1483
+ ) -> File:
1484
+ """
1485
+ Upsert a file (create or update if exists) in the project.
1486
+
1487
+ Args:
1488
+ file_create: File creation/update data
1489
+ project: Validated project from dependency
1490
+ db: Database session
1491
+
1492
+ Returns:
1493
+ The upserted file
1494
+
1495
+ Parameters:
1496
+ - project_id: typing.Optional[str].
1497
+
1498
+ - organization_id: typing.Optional[str].
1499
+
1500
+ - request: FileCreate.
1501
+ ---
1502
+ from llama_cloud import FileCreate
1503
+ from llama_cloud.client import AsyncLlamaCloud
1504
+
1505
+ client = AsyncLlamaCloud(
1506
+ token="YOUR_TOKEN",
1507
+ )
1508
+ await client.beta.upsert_file(
1509
+ request=FileCreate(
1510
+ name="string",
1511
+ ),
1512
+ )
1513
+ """
1514
+ _response = await self._client_wrapper.httpx_client.request(
1515
+ "PUT",
1516
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
1517
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1518
+ json=jsonable_encoder(request),
1519
+ headers=self._client_wrapper.get_headers(),
1520
+ timeout=60,
1521
+ )
1522
+ if 200 <= _response.status_code < 300:
1523
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
1524
+ if _response.status_code == 422:
1525
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1526
+ try:
1527
+ _response_json = _response.json()
1528
+ except JSONDecodeError:
1529
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1530
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1531
+
1532
+ async def query_files(
1533
+ self,
1534
+ *,
1535
+ project_id: typing.Optional[str] = None,
1536
+ organization_id: typing.Optional[str] = None,
1537
+ page_size: typing.Optional[int] = OMIT,
1538
+ page_token: typing.Optional[str] = OMIT,
1539
+ filter: typing.Optional[FileFilter] = OMIT,
1540
+ order_by: typing.Optional[str] = OMIT,
1541
+ ) -> FileQueryResponse:
1542
+ """
1543
+ Query files with flexible filtering and pagination.
1544
+
1545
+ Args:
1546
+ request: The query request with filters and pagination
1547
+ project: Validated project from dependency
1548
+ db: Database session
1549
+
1550
+ Returns:
1551
+ Paginated response with files
1552
+
1553
+ Parameters:
1554
+ - project_id: typing.Optional[str].
1555
+
1556
+ - organization_id: typing.Optional[str].
1557
+
1558
+ - page_size: typing.Optional[int].
1559
+
1560
+ - page_token: typing.Optional[str].
1561
+
1562
+ - filter: typing.Optional[FileFilter].
1563
+
1564
+ - order_by: typing.Optional[str].
1565
+ ---
1566
+ from llama_cloud import FileFilter
1567
+ from llama_cloud.client import AsyncLlamaCloud
1568
+
1569
+ client = AsyncLlamaCloud(
1570
+ token="YOUR_TOKEN",
1571
+ )
1572
+ await client.beta.query_files(
1573
+ filter=FileFilter(),
1574
+ )
1575
+ """
1576
+ _request: typing.Dict[str, typing.Any] = {}
1577
+ if page_size is not OMIT:
1578
+ _request["page_size"] = page_size
1579
+ if page_token is not OMIT:
1580
+ _request["page_token"] = page_token
1581
+ if filter is not OMIT:
1582
+ _request["filter"] = filter
1583
+ if order_by is not OMIT:
1584
+ _request["order_by"] = order_by
1585
+ _response = await self._client_wrapper.httpx_client.request(
1586
+ "POST",
1587
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files/query"),
1588
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1589
+ json=jsonable_encoder(_request),
1590
+ headers=self._client_wrapper.get_headers(),
1591
+ timeout=60,
1592
+ )
1593
+ if 200 <= _response.status_code < 300:
1594
+ return pydantic.parse_obj_as(FileQueryResponse, _response.json()) # type: ignore
1595
+ if _response.status_code == 422:
1596
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1597
+ try:
1598
+ _response_json = _response.json()
1599
+ except JSONDecodeError:
1600
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1601
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1602
+
1603
+ async def delete_file(
1604
+ self, file_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1605
+ ) -> None:
1606
+ """
1607
+ Delete a single file from the project.
1608
+
1609
+ Args:
1610
+ file_id: The ID of the file to delete
1611
+ project: Validated project from dependency
1612
+ db: Database session
1613
+
1614
+ Returns:
1615
+ None (204 No Content on success)
1616
+
1617
+ Parameters:
1618
+ - file_id: str.
1619
+
1620
+ - project_id: typing.Optional[str].
1621
+
1622
+ - organization_id: typing.Optional[str].
1623
+ ---
1624
+ from llama_cloud.client import AsyncLlamaCloud
1625
+
1626
+ client = AsyncLlamaCloud(
1627
+ token="YOUR_TOKEN",
1628
+ )
1629
+ await client.beta.delete_file(
1630
+ file_id="string",
1631
+ )
1632
+ """
1633
+ _response = await self._client_wrapper.httpx_client.request(
1634
+ "DELETE",
1635
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/files/{file_id}"),
1636
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1637
+ headers=self._client_wrapper.get_headers(),
1638
+ timeout=60,
1639
+ )
1640
+ if 200 <= _response.status_code < 300:
1641
+ return
1642
+ if _response.status_code == 422:
1643
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1644
+ try:
1645
+ _response_json = _response.json()
1646
+ except JSONDecodeError:
1647
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1648
+ raise ApiError(status_code=_response.status_code, body=_response_json)