llama-cloud 0.1.36__py3-none-any.whl → 0.1.37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

llama_cloud/__init__.py CHANGED
@@ -141,10 +141,15 @@ from .types import (
141
141
  File,
142
142
  FileClassification,
143
143
  FileCountByStatusResponse,
144
+ FileCreate,
145
+ FileCreatePermissionInfoValue,
146
+ FileCreateResourceInfoValue,
144
147
  FileData,
148
+ FileFilter,
145
149
  FileIdPresignedUrl,
146
150
  FileParsePublic,
147
151
  FilePermissionInfoValue,
152
+ FileQueryResponse,
148
153
  FileResourceInfoValue,
149
154
  FileStoreInfoResponse,
150
155
  FileStoreInfoResponseStatus,
@@ -184,6 +189,8 @@ from .types import (
184
189
  LLamaParseTransformConfig,
185
190
  LegacyParseJobConfig,
186
191
  LicenseInfoResponse,
192
+ LlamaExtractModeAvailability,
193
+ LlamaExtractModeAvailabilityStatus,
187
194
  LlamaExtractSettings,
188
195
  LlamaIndexCoreBaseLlmsTypesChatMessage,
189
196
  LlamaIndexCoreBaseLlmsTypesChatMessageBlocksItem,
@@ -392,8 +399,6 @@ from .resources import (
392
399
  ExtractStatelessRequestDataSchema,
393
400
  ExtractStatelessRequestDataSchemaZeroValue,
394
401
  FileCreateFromUrlResourceInfoValue,
395
- FileCreatePermissionInfoValue,
396
- FileCreateResourceInfoValue,
397
402
  PipelineFileUpdateCustomMetadataValue,
398
403
  PipelineUpdateEmbeddingConfig,
399
404
  PipelineUpdateEmbeddingConfig_AzureEmbedding,
@@ -590,13 +595,16 @@ __all__ = [
590
595
  "File",
591
596
  "FileClassification",
592
597
  "FileCountByStatusResponse",
598
+ "FileCreate",
593
599
  "FileCreateFromUrlResourceInfoValue",
594
600
  "FileCreatePermissionInfoValue",
595
601
  "FileCreateResourceInfoValue",
596
602
  "FileData",
603
+ "FileFilter",
597
604
  "FileIdPresignedUrl",
598
605
  "FileParsePublic",
599
606
  "FilePermissionInfoValue",
607
+ "FileQueryResponse",
600
608
  "FileResourceInfoValue",
601
609
  "FileStoreInfoResponse",
602
610
  "FileStoreInfoResponseStatus",
@@ -637,6 +645,8 @@ __all__ = [
637
645
  "LegacyParseJobConfig",
638
646
  "LicenseInfoResponse",
639
647
  "LlamaCloudEnvironment",
648
+ "LlamaExtractModeAvailability",
649
+ "LlamaExtractModeAvailabilityStatus",
640
650
  "LlamaExtractSettings",
641
651
  "LlamaIndexCoreBaseLlmsTypesChatMessage",
642
652
  "LlamaIndexCoreBaseLlmsTypesChatMessageBlocksItem",
@@ -33,7 +33,7 @@ from .embedding_model_configs import (
33
33
  EmbeddingModelConfigCreateEmbeddingConfig_OpenaiEmbedding,
34
34
  EmbeddingModelConfigCreateEmbeddingConfig_VertexaiEmbedding,
35
35
  )
36
- from .files import FileCreateFromUrlResourceInfoValue, FileCreatePermissionInfoValue, FileCreateResourceInfoValue
36
+ from .files import FileCreateFromUrlResourceInfoValue
37
37
  from .llama_extract import (
38
38
  ExtractAgentCreateDataSchema,
39
39
  ExtractAgentCreateDataSchemaZeroValue,
@@ -84,8 +84,6 @@ __all__ = [
84
84
  "ExtractStatelessRequestDataSchema",
85
85
  "ExtractStatelessRequestDataSchemaZeroValue",
86
86
  "FileCreateFromUrlResourceInfoValue",
87
- "FileCreatePermissionInfoValue",
88
- "FileCreateResourceInfoValue",
89
87
  "PipelineFileUpdateCustomMetadataValue",
90
88
  "PipelineUpdateEmbeddingConfig",
91
89
  "PipelineUpdateEmbeddingConfig_AzureEmbedding",
@@ -11,6 +11,7 @@ from ...errors.unprocessable_entity_error import UnprocessableEntityError
11
11
  from ...types.file_store_info_response import FileStoreInfoResponse
12
12
  from ...types.http_validation_error import HttpValidationError
13
13
  from ...types.license_info_response import LicenseInfoResponse
14
+ from ...types.llama_extract_mode_availability import LlamaExtractModeAvailability
14
15
 
15
16
  try:
16
17
  import pydantic
@@ -79,6 +80,34 @@ class AdminClient:
79
80
  raise ApiError(status_code=_response.status_code, body=_response.text)
80
81
  raise ApiError(status_code=_response.status_code, body=_response_json)
81
82
 
83
+ def get_llamaextract_features(self) -> typing.List[LlamaExtractModeAvailability]:
84
+ """
85
+ Get LlamaExtract feature availability based on available models.
86
+
87
+ ---
88
+ from llama_cloud.client import LlamaCloud
89
+
90
+ client = LlamaCloud(
91
+ token="YOUR_TOKEN",
92
+ )
93
+ client.admin.get_llamaextract_features()
94
+ """
95
+ _response = self._client_wrapper.httpx_client.request(
96
+ "GET",
97
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/admin/llamaextract/features"),
98
+ headers=self._client_wrapper.get_headers(),
99
+ timeout=60,
100
+ )
101
+ if 200 <= _response.status_code < 300:
102
+ return pydantic.parse_obj_as(typing.List[LlamaExtractModeAvailability], _response.json()) # type: ignore
103
+ if _response.status_code == 422:
104
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
105
+ try:
106
+ _response_json = _response.json()
107
+ except JSONDecodeError:
108
+ raise ApiError(status_code=_response.status_code, body=_response.text)
109
+ raise ApiError(status_code=_response.status_code, body=_response_json)
110
+
82
111
 
83
112
  class AsyncAdminClient:
84
113
  def __init__(self, *, client_wrapper: AsyncClientWrapper):
@@ -137,3 +166,31 @@ class AsyncAdminClient:
137
166
  except JSONDecodeError:
138
167
  raise ApiError(status_code=_response.status_code, body=_response.text)
139
168
  raise ApiError(status_code=_response.status_code, body=_response_json)
169
+
170
+ async def get_llamaextract_features(self) -> typing.List[LlamaExtractModeAvailability]:
171
+ """
172
+ Get LlamaExtract feature availability based on available models.
173
+
174
+ ---
175
+ from llama_cloud.client import AsyncLlamaCloud
176
+
177
+ client = AsyncLlamaCloud(
178
+ token="YOUR_TOKEN",
179
+ )
180
+ await client.admin.get_llamaextract_features()
181
+ """
182
+ _response = await self._client_wrapper.httpx_client.request(
183
+ "GET",
184
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/admin/llamaextract/features"),
185
+ headers=self._client_wrapper.get_headers(),
186
+ timeout=60,
187
+ )
188
+ if 200 <= _response.status_code < 300:
189
+ return pydantic.parse_obj_as(typing.List[LlamaExtractModeAvailability], _response.json()) # type: ignore
190
+ if _response.status_code == 422:
191
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
192
+ try:
193
+ _response_json = _response.json()
194
+ except JSONDecodeError:
195
+ raise ApiError(status_code=_response.status_code, body=_response.text)
196
+ raise ApiError(status_code=_response.status_code, body=_response_json)
@@ -15,6 +15,10 @@ from ...types.agent_data import AgentData
15
15
  from ...types.batch import Batch
16
16
  from ...types.batch_paginated_list import BatchPaginatedList
17
17
  from ...types.batch_public_output import BatchPublicOutput
18
+ from ...types.file import File
19
+ from ...types.file_create import FileCreate
20
+ from ...types.file_filter import FileFilter
21
+ from ...types.file_query_response import FileQueryResponse
18
22
  from ...types.filter_operation import FilterOperation
19
23
  from ...types.http_validation_error import HttpValidationError
20
24
  from ...types.llama_parse_parameters import LlamaParseParameters
@@ -610,6 +614,234 @@ class BetaClient:
610
614
  raise ApiError(status_code=_response.status_code, body=_response.text)
611
615
  raise ApiError(status_code=_response.status_code, body=_response_json)
612
616
 
617
+ def create_file(
618
+ self,
619
+ *,
620
+ project_id: typing.Optional[str] = None,
621
+ organization_id: typing.Optional[str] = None,
622
+ request: FileCreate,
623
+ ) -> File:
624
+ """
625
+ Create a new file in the project.
626
+
627
+ Args:
628
+ file_create: File creation data
629
+ project: Validated project from dependency
630
+ db: Database session
631
+
632
+ Returns:
633
+ The created file
634
+
635
+ Parameters:
636
+ - project_id: typing.Optional[str].
637
+
638
+ - organization_id: typing.Optional[str].
639
+
640
+ - request: FileCreate.
641
+ ---
642
+ from llama_cloud import FileCreate
643
+ from llama_cloud.client import LlamaCloud
644
+
645
+ client = LlamaCloud(
646
+ token="YOUR_TOKEN",
647
+ )
648
+ client.beta.create_file(
649
+ request=FileCreate(
650
+ name="string",
651
+ ),
652
+ )
653
+ """
654
+ _response = self._client_wrapper.httpx_client.request(
655
+ "POST",
656
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
657
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
658
+ json=jsonable_encoder(request),
659
+ headers=self._client_wrapper.get_headers(),
660
+ timeout=60,
661
+ )
662
+ if 200 <= _response.status_code < 300:
663
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
664
+ if _response.status_code == 422:
665
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
666
+ try:
667
+ _response_json = _response.json()
668
+ except JSONDecodeError:
669
+ raise ApiError(status_code=_response.status_code, body=_response.text)
670
+ raise ApiError(status_code=_response.status_code, body=_response_json)
671
+
672
+ def upsert_file(
673
+ self,
674
+ *,
675
+ project_id: typing.Optional[str] = None,
676
+ organization_id: typing.Optional[str] = None,
677
+ request: FileCreate,
678
+ ) -> File:
679
+ """
680
+ Upsert a file (create or update if exists) in the project.
681
+
682
+ Args:
683
+ file_create: File creation/update data
684
+ project: Validated project from dependency
685
+ db: Database session
686
+
687
+ Returns:
688
+ The upserted file
689
+
690
+ Parameters:
691
+ - project_id: typing.Optional[str].
692
+
693
+ - organization_id: typing.Optional[str].
694
+
695
+ - request: FileCreate.
696
+ ---
697
+ from llama_cloud import FileCreate
698
+ from llama_cloud.client import LlamaCloud
699
+
700
+ client = LlamaCloud(
701
+ token="YOUR_TOKEN",
702
+ )
703
+ client.beta.upsert_file(
704
+ request=FileCreate(
705
+ name="string",
706
+ ),
707
+ )
708
+ """
709
+ _response = self._client_wrapper.httpx_client.request(
710
+ "PUT",
711
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
712
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
713
+ json=jsonable_encoder(request),
714
+ headers=self._client_wrapper.get_headers(),
715
+ timeout=60,
716
+ )
717
+ if 200 <= _response.status_code < 300:
718
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
719
+ if _response.status_code == 422:
720
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
721
+ try:
722
+ _response_json = _response.json()
723
+ except JSONDecodeError:
724
+ raise ApiError(status_code=_response.status_code, body=_response.text)
725
+ raise ApiError(status_code=_response.status_code, body=_response_json)
726
+
727
+ def query_files(
728
+ self,
729
+ *,
730
+ project_id: typing.Optional[str] = None,
731
+ organization_id: typing.Optional[str] = None,
732
+ page_size: typing.Optional[int] = OMIT,
733
+ page_token: typing.Optional[str] = OMIT,
734
+ filter: typing.Optional[FileFilter] = OMIT,
735
+ order_by: typing.Optional[str] = OMIT,
736
+ ) -> FileQueryResponse:
737
+ """
738
+ Query files with flexible filtering and pagination.
739
+
740
+ Args:
741
+ request: The query request with filters and pagination
742
+ project: Validated project from dependency
743
+ db: Database session
744
+
745
+ Returns:
746
+ Paginated response with files
747
+
748
+ Parameters:
749
+ - project_id: typing.Optional[str].
750
+
751
+ - organization_id: typing.Optional[str].
752
+
753
+ - page_size: typing.Optional[int].
754
+
755
+ - page_token: typing.Optional[str].
756
+
757
+ - filter: typing.Optional[FileFilter].
758
+
759
+ - order_by: typing.Optional[str].
760
+ ---
761
+ from llama_cloud import FileFilter
762
+ from llama_cloud.client import LlamaCloud
763
+
764
+ client = LlamaCloud(
765
+ token="YOUR_TOKEN",
766
+ )
767
+ client.beta.query_files(
768
+ filter=FileFilter(),
769
+ )
770
+ """
771
+ _request: typing.Dict[str, typing.Any] = {}
772
+ if page_size is not OMIT:
773
+ _request["page_size"] = page_size
774
+ if page_token is not OMIT:
775
+ _request["page_token"] = page_token
776
+ if filter is not OMIT:
777
+ _request["filter"] = filter
778
+ if order_by is not OMIT:
779
+ _request["order_by"] = order_by
780
+ _response = self._client_wrapper.httpx_client.request(
781
+ "POST",
782
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files/query"),
783
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
784
+ json=jsonable_encoder(_request),
785
+ headers=self._client_wrapper.get_headers(),
786
+ timeout=60,
787
+ )
788
+ if 200 <= _response.status_code < 300:
789
+ return pydantic.parse_obj_as(FileQueryResponse, _response.json()) # type: ignore
790
+ if _response.status_code == 422:
791
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
792
+ try:
793
+ _response_json = _response.json()
794
+ except JSONDecodeError:
795
+ raise ApiError(status_code=_response.status_code, body=_response.text)
796
+ raise ApiError(status_code=_response.status_code, body=_response_json)
797
+
798
+ def delete_file(
799
+ self, file_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
800
+ ) -> None:
801
+ """
802
+ Delete a single file from the project.
803
+
804
+ Args:
805
+ file_id: The ID of the file to delete
806
+ project: Validated project from dependency
807
+ db: Database session
808
+
809
+ Returns:
810
+ None (204 No Content on success)
811
+
812
+ Parameters:
813
+ - file_id: str.
814
+
815
+ - project_id: typing.Optional[str].
816
+
817
+ - organization_id: typing.Optional[str].
818
+ ---
819
+ from llama_cloud.client import LlamaCloud
820
+
821
+ client = LlamaCloud(
822
+ token="YOUR_TOKEN",
823
+ )
824
+ client.beta.delete_file(
825
+ file_id="string",
826
+ )
827
+ """
828
+ _response = self._client_wrapper.httpx_client.request(
829
+ "DELETE",
830
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/files/{file_id}"),
831
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
832
+ headers=self._client_wrapper.get_headers(),
833
+ timeout=60,
834
+ )
835
+ if 200 <= _response.status_code < 300:
836
+ return
837
+ if _response.status_code == 422:
838
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
839
+ try:
840
+ _response_json = _response.json()
841
+ except JSONDecodeError:
842
+ raise ApiError(status_code=_response.status_code, body=_response.text)
843
+ raise ApiError(status_code=_response.status_code, body=_response_json)
844
+
613
845
 
614
846
  class AsyncBetaClient:
615
847
  def __init__(self, *, client_wrapper: AsyncClientWrapper):
@@ -1186,3 +1418,231 @@ class AsyncBetaClient:
1186
1418
  except JSONDecodeError:
1187
1419
  raise ApiError(status_code=_response.status_code, body=_response.text)
1188
1420
  raise ApiError(status_code=_response.status_code, body=_response_json)
1421
+
1422
+ async def create_file(
1423
+ self,
1424
+ *,
1425
+ project_id: typing.Optional[str] = None,
1426
+ organization_id: typing.Optional[str] = None,
1427
+ request: FileCreate,
1428
+ ) -> File:
1429
+ """
1430
+ Create a new file in the project.
1431
+
1432
+ Args:
1433
+ file_create: File creation data
1434
+ project: Validated project from dependency
1435
+ db: Database session
1436
+
1437
+ Returns:
1438
+ The created file
1439
+
1440
+ Parameters:
1441
+ - project_id: typing.Optional[str].
1442
+
1443
+ - organization_id: typing.Optional[str].
1444
+
1445
+ - request: FileCreate.
1446
+ ---
1447
+ from llama_cloud import FileCreate
1448
+ from llama_cloud.client import AsyncLlamaCloud
1449
+
1450
+ client = AsyncLlamaCloud(
1451
+ token="YOUR_TOKEN",
1452
+ )
1453
+ await client.beta.create_file(
1454
+ request=FileCreate(
1455
+ name="string",
1456
+ ),
1457
+ )
1458
+ """
1459
+ _response = await self._client_wrapper.httpx_client.request(
1460
+ "POST",
1461
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
1462
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1463
+ json=jsonable_encoder(request),
1464
+ headers=self._client_wrapper.get_headers(),
1465
+ timeout=60,
1466
+ )
1467
+ if 200 <= _response.status_code < 300:
1468
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
1469
+ if _response.status_code == 422:
1470
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1471
+ try:
1472
+ _response_json = _response.json()
1473
+ except JSONDecodeError:
1474
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1475
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1476
+
1477
+ async def upsert_file(
1478
+ self,
1479
+ *,
1480
+ project_id: typing.Optional[str] = None,
1481
+ organization_id: typing.Optional[str] = None,
1482
+ request: FileCreate,
1483
+ ) -> File:
1484
+ """
1485
+ Upsert a file (create or update if exists) in the project.
1486
+
1487
+ Args:
1488
+ file_create: File creation/update data
1489
+ project: Validated project from dependency
1490
+ db: Database session
1491
+
1492
+ Returns:
1493
+ The upserted file
1494
+
1495
+ Parameters:
1496
+ - project_id: typing.Optional[str].
1497
+
1498
+ - organization_id: typing.Optional[str].
1499
+
1500
+ - request: FileCreate.
1501
+ ---
1502
+ from llama_cloud import FileCreate
1503
+ from llama_cloud.client import AsyncLlamaCloud
1504
+
1505
+ client = AsyncLlamaCloud(
1506
+ token="YOUR_TOKEN",
1507
+ )
1508
+ await client.beta.upsert_file(
1509
+ request=FileCreate(
1510
+ name="string",
1511
+ ),
1512
+ )
1513
+ """
1514
+ _response = await self._client_wrapper.httpx_client.request(
1515
+ "PUT",
1516
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
1517
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1518
+ json=jsonable_encoder(request),
1519
+ headers=self._client_wrapper.get_headers(),
1520
+ timeout=60,
1521
+ )
1522
+ if 200 <= _response.status_code < 300:
1523
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
1524
+ if _response.status_code == 422:
1525
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1526
+ try:
1527
+ _response_json = _response.json()
1528
+ except JSONDecodeError:
1529
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1530
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1531
+
1532
+ async def query_files(
1533
+ self,
1534
+ *,
1535
+ project_id: typing.Optional[str] = None,
1536
+ organization_id: typing.Optional[str] = None,
1537
+ page_size: typing.Optional[int] = OMIT,
1538
+ page_token: typing.Optional[str] = OMIT,
1539
+ filter: typing.Optional[FileFilter] = OMIT,
1540
+ order_by: typing.Optional[str] = OMIT,
1541
+ ) -> FileQueryResponse:
1542
+ """
1543
+ Query files with flexible filtering and pagination.
1544
+
1545
+ Args:
1546
+ request: The query request with filters and pagination
1547
+ project: Validated project from dependency
1548
+ db: Database session
1549
+
1550
+ Returns:
1551
+ Paginated response with files
1552
+
1553
+ Parameters:
1554
+ - project_id: typing.Optional[str].
1555
+
1556
+ - organization_id: typing.Optional[str].
1557
+
1558
+ - page_size: typing.Optional[int].
1559
+
1560
+ - page_token: typing.Optional[str].
1561
+
1562
+ - filter: typing.Optional[FileFilter].
1563
+
1564
+ - order_by: typing.Optional[str].
1565
+ ---
1566
+ from llama_cloud import FileFilter
1567
+ from llama_cloud.client import AsyncLlamaCloud
1568
+
1569
+ client = AsyncLlamaCloud(
1570
+ token="YOUR_TOKEN",
1571
+ )
1572
+ await client.beta.query_files(
1573
+ filter=FileFilter(),
1574
+ )
1575
+ """
1576
+ _request: typing.Dict[str, typing.Any] = {}
1577
+ if page_size is not OMIT:
1578
+ _request["page_size"] = page_size
1579
+ if page_token is not OMIT:
1580
+ _request["page_token"] = page_token
1581
+ if filter is not OMIT:
1582
+ _request["filter"] = filter
1583
+ if order_by is not OMIT:
1584
+ _request["order_by"] = order_by
1585
+ _response = await self._client_wrapper.httpx_client.request(
1586
+ "POST",
1587
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files/query"),
1588
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1589
+ json=jsonable_encoder(_request),
1590
+ headers=self._client_wrapper.get_headers(),
1591
+ timeout=60,
1592
+ )
1593
+ if 200 <= _response.status_code < 300:
1594
+ return pydantic.parse_obj_as(FileQueryResponse, _response.json()) # type: ignore
1595
+ if _response.status_code == 422:
1596
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1597
+ try:
1598
+ _response_json = _response.json()
1599
+ except JSONDecodeError:
1600
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1601
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1602
+
1603
+ async def delete_file(
1604
+ self, file_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1605
+ ) -> None:
1606
+ """
1607
+ Delete a single file from the project.
1608
+
1609
+ Args:
1610
+ file_id: The ID of the file to delete
1611
+ project: Validated project from dependency
1612
+ db: Database session
1613
+
1614
+ Returns:
1615
+ None (204 No Content on success)
1616
+
1617
+ Parameters:
1618
+ - file_id: str.
1619
+
1620
+ - project_id: typing.Optional[str].
1621
+
1622
+ - organization_id: typing.Optional[str].
1623
+ ---
1624
+ from llama_cloud.client import AsyncLlamaCloud
1625
+
1626
+ client = AsyncLlamaCloud(
1627
+ token="YOUR_TOKEN",
1628
+ )
1629
+ await client.beta.delete_file(
1630
+ file_id="string",
1631
+ )
1632
+ """
1633
+ _response = await self._client_wrapper.httpx_client.request(
1634
+ "DELETE",
1635
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/files/{file_id}"),
1636
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1637
+ headers=self._client_wrapper.get_headers(),
1638
+ timeout=60,
1639
+ )
1640
+ if 200 <= _response.status_code < 300:
1641
+ return
1642
+ if _response.status_code == 422:
1643
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1644
+ try:
1645
+ _response_json = _response.json()
1646
+ except JSONDecodeError:
1647
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1648
+ raise ApiError(status_code=_response.status_code, body=_response_json)
@@ -1,5 +1,5 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- from .types import FileCreateFromUrlResourceInfoValue, FileCreatePermissionInfoValue, FileCreateResourceInfoValue
3
+ from .types import FileCreateFromUrlResourceInfoValue
4
4
 
5
- __all__ = ["FileCreateFromUrlResourceInfoValue", "FileCreatePermissionInfoValue", "FileCreateResourceInfoValue"]
5
+ __all__ = ["FileCreateFromUrlResourceInfoValue"]
@@ -1,6 +1,5 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- import datetime as dt
4
3
  import typing
5
4
  import urllib.parse
6
5
  from json.decoder import JSONDecodeError
@@ -11,14 +10,13 @@ from ...core.jsonable_encoder import jsonable_encoder
11
10
  from ...core.remove_none_from_dict import remove_none_from_dict
12
11
  from ...errors.unprocessable_entity_error import UnprocessableEntityError
13
12
  from ...types.file import File
13
+ from ...types.file_create import FileCreate
14
14
  from ...types.file_id_presigned_url import FileIdPresignedUrl
15
15
  from ...types.http_validation_error import HttpValidationError
16
16
  from ...types.page_figure_metadata import PageFigureMetadata
17
17
  from ...types.page_screenshot_metadata import PageScreenshotMetadata
18
18
  from ...types.presigned_url import PresignedUrl
19
19
  from .types.file_create_from_url_resource_info_value import FileCreateFromUrlResourceInfoValue
20
- from .types.file_create_permission_info_value import FileCreatePermissionInfoValue
21
- from .types.file_create_resource_info_value import FileCreateResourceInfoValue
22
20
 
23
21
  try:
24
22
  import pydantic
@@ -196,13 +194,7 @@ class FilesClient:
196
194
  expires_at_seconds: typing.Optional[int] = None,
197
195
  project_id: typing.Optional[str] = None,
198
196
  organization_id: typing.Optional[str] = None,
199
- name: str,
200
- external_file_id: typing.Optional[str] = OMIT,
201
- file_size: typing.Optional[int] = OMIT,
202
- last_modified_at: typing.Optional[dt.datetime] = OMIT,
203
- resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]] = OMIT,
204
- permission_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreatePermissionInfoValue]]] = OMIT,
205
- data_source_id: typing.Optional[str] = OMIT,
197
+ request: FileCreate,
206
198
  ) -> FileIdPresignedUrl:
207
199
  """
208
200
  Create a presigned url for uploading a file.
@@ -217,49 +209,27 @@ class FilesClient:
217
209
 
218
210
  - organization_id: typing.Optional[str].
219
211
 
220
- - name: str. Name that will be used for created file. If possible, always include the file extension in the name.
221
-
222
- - external_file_id: typing.Optional[str].
223
-
224
- - file_size: typing.Optional[int].
225
-
226
- - last_modified_at: typing.Optional[dt.datetime].
227
-
228
- - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]].
229
-
230
- - permission_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreatePermissionInfoValue]]].
231
-
232
- - data_source_id: typing.Optional[str].
212
+ - request: FileCreate.
233
213
  ---
214
+ from llama_cloud import FileCreate
234
215
  from llama_cloud.client import LlamaCloud
235
216
 
236
217
  client = LlamaCloud(
237
218
  token="YOUR_TOKEN",
238
219
  )
239
220
  client.files.generate_presigned_url(
240
- name="string",
221
+ request=FileCreate(
222
+ name="string",
223
+ ),
241
224
  )
242
225
  """
243
- _request: typing.Dict[str, typing.Any] = {"name": name}
244
- if external_file_id is not OMIT:
245
- _request["external_file_id"] = external_file_id
246
- if file_size is not OMIT:
247
- _request["file_size"] = file_size
248
- if last_modified_at is not OMIT:
249
- _request["last_modified_at"] = last_modified_at
250
- if resource_info is not OMIT:
251
- _request["resource_info"] = resource_info
252
- if permission_info is not OMIT:
253
- _request["permission_info"] = permission_info
254
- if data_source_id is not OMIT:
255
- _request["data_source_id"] = data_source_id
256
226
  _response = self._client_wrapper.httpx_client.request(
257
227
  "PUT",
258
228
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
259
229
  params=remove_none_from_dict(
260
230
  {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
261
231
  ),
262
- json=jsonable_encoder(_request),
232
+ json=jsonable_encoder(request),
263
233
  headers=self._client_wrapper.get_headers(),
264
234
  timeout=60,
265
235
  )
@@ -936,13 +906,7 @@ class AsyncFilesClient:
936
906
  expires_at_seconds: typing.Optional[int] = None,
937
907
  project_id: typing.Optional[str] = None,
938
908
  organization_id: typing.Optional[str] = None,
939
- name: str,
940
- external_file_id: typing.Optional[str] = OMIT,
941
- file_size: typing.Optional[int] = OMIT,
942
- last_modified_at: typing.Optional[dt.datetime] = OMIT,
943
- resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]] = OMIT,
944
- permission_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreatePermissionInfoValue]]] = OMIT,
945
- data_source_id: typing.Optional[str] = OMIT,
909
+ request: FileCreate,
946
910
  ) -> FileIdPresignedUrl:
947
911
  """
948
912
  Create a presigned url for uploading a file.
@@ -957,49 +921,27 @@ class AsyncFilesClient:
957
921
 
958
922
  - organization_id: typing.Optional[str].
959
923
 
960
- - name: str. Name that will be used for created file. If possible, always include the file extension in the name.
961
-
962
- - external_file_id: typing.Optional[str].
963
-
964
- - file_size: typing.Optional[int].
965
-
966
- - last_modified_at: typing.Optional[dt.datetime].
967
-
968
- - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]].
969
-
970
- - permission_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreatePermissionInfoValue]]].
971
-
972
- - data_source_id: typing.Optional[str].
924
+ - request: FileCreate.
973
925
  ---
926
+ from llama_cloud import FileCreate
974
927
  from llama_cloud.client import AsyncLlamaCloud
975
928
 
976
929
  client = AsyncLlamaCloud(
977
930
  token="YOUR_TOKEN",
978
931
  )
979
932
  await client.files.generate_presigned_url(
980
- name="string",
933
+ request=FileCreate(
934
+ name="string",
935
+ ),
981
936
  )
982
937
  """
983
- _request: typing.Dict[str, typing.Any] = {"name": name}
984
- if external_file_id is not OMIT:
985
- _request["external_file_id"] = external_file_id
986
- if file_size is not OMIT:
987
- _request["file_size"] = file_size
988
- if last_modified_at is not OMIT:
989
- _request["last_modified_at"] = last_modified_at
990
- if resource_info is not OMIT:
991
- _request["resource_info"] = resource_info
992
- if permission_info is not OMIT:
993
- _request["permission_info"] = permission_info
994
- if data_source_id is not OMIT:
995
- _request["data_source_id"] = data_source_id
996
938
  _response = await self._client_wrapper.httpx_client.request(
997
939
  "PUT",
998
940
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
999
941
  params=remove_none_from_dict(
1000
942
  {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
1001
943
  ),
1002
- json=jsonable_encoder(_request),
944
+ json=jsonable_encoder(request),
1003
945
  headers=self._client_wrapper.get_headers(),
1004
946
  timeout=60,
1005
947
  )
@@ -1,7 +1,5 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
3
  from .file_create_from_url_resource_info_value import FileCreateFromUrlResourceInfoValue
4
- from .file_create_permission_info_value import FileCreatePermissionInfoValue
5
- from .file_create_resource_info_value import FileCreateResourceInfoValue
6
4
 
7
- __all__ = ["FileCreateFromUrlResourceInfoValue", "FileCreatePermissionInfoValue", "FileCreateResourceInfoValue"]
5
+ __all__ = ["FileCreateFromUrlResourceInfoValue"]
@@ -148,10 +148,15 @@ from .failure_handling_config import FailureHandlingConfig
148
148
  from .file import File
149
149
  from .file_classification import FileClassification
150
150
  from .file_count_by_status_response import FileCountByStatusResponse
151
+ from .file_create import FileCreate
152
+ from .file_create_permission_info_value import FileCreatePermissionInfoValue
153
+ from .file_create_resource_info_value import FileCreateResourceInfoValue
151
154
  from .file_data import FileData
155
+ from .file_filter import FileFilter
152
156
  from .file_id_presigned_url import FileIdPresignedUrl
153
157
  from .file_parse_public import FileParsePublic
154
158
  from .file_permission_info_value import FilePermissionInfoValue
159
+ from .file_query_response import FileQueryResponse
155
160
  from .file_resource_info_value import FileResourceInfoValue
156
161
  from .file_store_info_response import FileStoreInfoResponse
157
162
  from .file_store_info_response_status import FileStoreInfoResponseStatus
@@ -193,6 +198,8 @@ from .job_record_with_usage_metrics import JobRecordWithUsageMetrics
193
198
  from .l_lama_parse_transform_config import LLamaParseTransformConfig
194
199
  from .legacy_parse_job_config import LegacyParseJobConfig
195
200
  from .license_info_response import LicenseInfoResponse
201
+ from .llama_extract_mode_availability import LlamaExtractModeAvailability
202
+ from .llama_extract_mode_availability_status import LlamaExtractModeAvailabilityStatus
196
203
  from .llama_extract_settings import LlamaExtractSettings
197
204
  from .llama_index_core_base_llms_types_chat_message import LlamaIndexCoreBaseLlmsTypesChatMessage
198
205
  from .llama_index_core_base_llms_types_chat_message_blocks_item import (
@@ -530,10 +537,15 @@ __all__ = [
530
537
  "File",
531
538
  "FileClassification",
532
539
  "FileCountByStatusResponse",
540
+ "FileCreate",
541
+ "FileCreatePermissionInfoValue",
542
+ "FileCreateResourceInfoValue",
533
543
  "FileData",
544
+ "FileFilter",
534
545
  "FileIdPresignedUrl",
535
546
  "FileParsePublic",
536
547
  "FilePermissionInfoValue",
548
+ "FileQueryResponse",
537
549
  "FileResourceInfoValue",
538
550
  "FileStoreInfoResponse",
539
551
  "FileStoreInfoResponseStatus",
@@ -573,6 +585,8 @@ __all__ = [
573
585
  "LLamaParseTransformConfig",
574
586
  "LegacyParseJobConfig",
575
587
  "LicenseInfoResponse",
588
+ "LlamaExtractModeAvailability",
589
+ "LlamaExtractModeAvailabilityStatus",
576
590
  "LlamaExtractSettings",
577
591
  "LlamaIndexCoreBaseLlmsTypesChatMessage",
578
592
  "LlamaIndexCoreBaseLlmsTypesChatMessageBlocksItem",
@@ -0,0 +1,41 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .file_create_permission_info_value import FileCreatePermissionInfoValue
8
+ from .file_create_resource_info_value import FileCreateResourceInfoValue
9
+
10
+ try:
11
+ import pydantic
12
+ if pydantic.__version__.startswith("1."):
13
+ raise ImportError
14
+ import pydantic.v1 as pydantic # type: ignore
15
+ except ImportError:
16
+ import pydantic # type: ignore
17
+
18
+
19
+ class FileCreate(pydantic.BaseModel):
20
+ name: str = pydantic.Field(
21
+ description="Name that will be used for created file. If possible, always include the file extension in the name."
22
+ )
23
+ external_file_id: typing.Optional[str]
24
+ file_size: typing.Optional[int]
25
+ last_modified_at: typing.Optional[dt.datetime]
26
+ resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]]
27
+ permission_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreatePermissionInfoValue]]]
28
+ data_source_id: typing.Optional[str]
29
+
30
+ def json(self, **kwargs: typing.Any) -> str:
31
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
32
+ return super().json(**kwargs_with_defaults)
33
+
34
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
35
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
36
+ return super().dict(**kwargs_with_defaults)
37
+
38
+ class Config:
39
+ frozen = True
40
+ smart_union = True
41
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,40 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+
8
+ try:
9
+ import pydantic
10
+ if pydantic.__version__.startswith("1."):
11
+ raise ImportError
12
+ import pydantic.v1 as pydantic # type: ignore
13
+ except ImportError:
14
+ import pydantic # type: ignore
15
+
16
+
17
+ class FileFilter(pydantic.BaseModel):
18
+ """
19
+ Filter parameters for file queries.
20
+ """
21
+
22
+ project_id: typing.Optional[str]
23
+ file_ids: typing.Optional[typing.List[str]]
24
+ file_name: typing.Optional[str]
25
+ data_source_id: typing.Optional[str]
26
+ external_file_id: typing.Optional[str]
27
+ only_manually_uploaded: typing.Optional[bool]
28
+
29
+ def json(self, **kwargs: typing.Any) -> str:
30
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
31
+ return super().json(**kwargs_with_defaults)
32
+
33
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
34
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
35
+ return super().dict(**kwargs_with_defaults)
36
+
37
+ class Config:
38
+ frozen = True
39
+ smart_union = True
40
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,38 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .file import File
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class FileQueryResponse(pydantic.BaseModel):
19
+ """
20
+ Response schema for paginated file queries.
21
+ """
22
+
23
+ items: typing.List[File] = pydantic.Field(description="The list of items.")
24
+ next_page_token: typing.Optional[str]
25
+ total_size: typing.Optional[int]
26
+
27
+ def json(self, **kwargs: typing.Any) -> str:
28
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
29
+ return super().json(**kwargs_with_defaults)
30
+
31
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
32
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
33
+ return super().dict(**kwargs_with_defaults)
34
+
35
+ class Config:
36
+ frozen = True
37
+ smart_union = True
38
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,37 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .llama_extract_mode_availability_status import LlamaExtractModeAvailabilityStatus
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class LlamaExtractModeAvailability(pydantic.BaseModel):
19
+ mode: str
20
+ status: LlamaExtractModeAvailabilityStatus
21
+ parse_mode: str
22
+ parse_models: typing.List[str]
23
+ extract_models: typing.List[str]
24
+ missing_models: typing.Optional[typing.List[str]]
25
+
26
+ def json(self, **kwargs: typing.Any) -> str:
27
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
28
+ return super().json(**kwargs_with_defaults)
29
+
30
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
31
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
32
+ return super().dict(**kwargs_with_defaults)
33
+
34
+ class Config:
35
+ frozen = True
36
+ smart_union = True
37
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,17 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class LlamaExtractModeAvailabilityStatus(str, enum.Enum):
10
+ AVAILABLE = "available"
11
+ UNAVAILABLE = "unavailable"
12
+
13
+ def visit(self, available: typing.Callable[[], T_Result], unavailable: typing.Callable[[], T_Result]) -> T_Result:
14
+ if self is LlamaExtractModeAvailabilityStatus.AVAILABLE:
15
+ return available()
16
+ if self is LlamaExtractModeAvailabilityStatus.UNAVAILABLE:
17
+ return unavailable()
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: llama-cloud
3
- Version: 0.1.36
3
+ Version: 0.1.37
4
4
  Summary:
5
5
  License: MIT
6
6
  Author: Logan Markewich
@@ -12,6 +12,8 @@ Classifier: Programming Language :: Python :: 3.8
12
12
  Classifier: Programming Language :: Python :: 3.9
13
13
  Classifier: Programming Language :: Python :: 3.10
14
14
  Classifier: Programming Language :: Python :: 3.11
15
+ Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Programming Language :: Python :: 3.13
15
17
  Requires-Dist: certifi (>=2024.7.4)
16
18
  Requires-Dist: httpx (>=0.20.0)
17
19
  Requires-Dist: pydantic (>=1.10)
@@ -1,4 +1,4 @@
1
- llama_cloud/__init__.py,sha256=ZE5Y7UEItg2hbjMAjjLgdoMaEYCnc478iEHSGwS4HAo,27419
1
+ llama_cloud/__init__.py,sha256=h1hssl79cA7z_YQpPphW_UcHPXLfxyPqV8WqhJwbjUM,27687
2
2
  llama_cloud/client.py,sha256=xIC_pTNYLA3AfLE8esqhrzam93LLo7oc6Vrog64Bwzw,6399
3
3
  llama_cloud/core/__init__.py,sha256=QJS3CJ2TYP2E1Tge0CS6Z7r8LTNzJHQVX1hD3558eP0,519
4
4
  llama_cloud/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
@@ -9,13 +9,13 @@ llama_cloud/core/remove_none_from_dict.py,sha256=8m91FC3YuVem0Gm9_sXhJ2tGvP33owJ
9
9
  llama_cloud/environment.py,sha256=feTjOebeFZMrBdnHat4RE5aHlpt-sJm4NhK4ntV1htI,167
10
10
  llama_cloud/errors/__init__.py,sha256=pbbVUFtB9LCocA1RMWMMF_RKjsy5YkOKX5BAuE49w6g,170
11
11
  llama_cloud/errors/unprocessable_entity_error.py,sha256=FvR7XPlV3Xx5nu8HNlmLhBRdk4so_gCHjYT5PyZe6sM,313
12
- llama_cloud/resources/__init__.py,sha256=YEJrxAIFcQ6-d8qKlUYidwJqWFVWLKUw4B3gQrn1nKI,4429
12
+ llama_cloud/resources/__init__.py,sha256=PYMjBpAdnSZdd_tF9vTFuPbD4fyUDnhALjawaJ393H0,4297
13
13
  llama_cloud/resources/admin/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
14
- llama_cloud/resources/admin/client.py,sha256=2eXQ7OVBgxT2c_EX3mFxKyoAwuuZ6aDSkLJ7Yf8AOKw,5956
14
+ llama_cloud/resources/admin/client.py,sha256=YIYy9kU1_xaE0gkpmZZbCgLzZj6XSrAUplS7S2uWmwM,8536
15
15
  llama_cloud/resources/agent_deployments/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
16
16
  llama_cloud/resources/agent_deployments/client.py,sha256=3EOzOjmRs4KISgJ566enq3FCuN3YtskjO0OHqQGtkQ0,6122
17
17
  llama_cloud/resources/beta/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
18
- llama_cloud/resources/beta/client.py,sha256=KMveY6Uj_lurX9DcY198GoOW7rhww_emrvHFHHD4W7o,46846
18
+ llama_cloud/resources/beta/client.py,sha256=_GNkHQxyZxhZOkLIRzfCw6PexQx-E8r_7R-3Wd9Y0uE,63128
19
19
  llama_cloud/resources/chat_apps/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
20
20
  llama_cloud/resources/chat_apps/client.py,sha256=orSI8rpQbUwVEToolEeiEi5Qe--suXFvfu6D9JDii5I,23595
21
21
  llama_cloud/resources/classifier/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
@@ -35,12 +35,10 @@ llama_cloud/resources/embedding_model_configs/types/__init__.py,sha256=6-rcDwJhw
35
35
  llama_cloud/resources/embedding_model_configs/types/embedding_model_config_create_embedding_config.py,sha256=SQCHJk0AmBbKS5XKdcEJxhDhIMLQCmCI13IHC28v7vQ,3054
36
36
  llama_cloud/resources/evals/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
37
37
  llama_cloud/resources/evals/client.py,sha256=v2AyeQV0hVgC6xoP2gJNgneJMaeXALV1hIeirYGxlPw,3242
38
- llama_cloud/resources/files/__init__.py,sha256=3B0SNM8EE6PddD5LpxYllci9vflEXy1xjPzhEEd-OUk,293
39
- llama_cloud/resources/files/client.py,sha256=41iMAtvSIz019jGFJ5aBVG-Haxho_bUYKBavBdFYV2I,59400
40
- llama_cloud/resources/files/types/__init__.py,sha256=EPYENAwkjBWv1MLf8s7R5-RO-cxZ_8NPrqfR4ZoR7jY,418
38
+ llama_cloud/resources/files/__init__.py,sha256=Ws53l-S3kyAGFinYPOb9WpN84DtbFn6gLYZtI2akBLQ,169
39
+ llama_cloud/resources/files/client.py,sha256=Crd0IR0cV5fld4jUGAHE8VsIbw7vCYrOIyBTSwDyitA,56242
40
+ llama_cloud/resources/files/types/__init__.py,sha256=ZZuDQsYsxmQ9VwpfN7oqftzGRnFTR2EMYdCa7zARo4g,204
41
41
  llama_cloud/resources/files/types/file_create_from_url_resource_info_value.py,sha256=Wc8wFgujOO5pZvbbh2TMMzpa37GKZd14GYNJ9bdq7BE,214
42
- llama_cloud/resources/files/types/file_create_permission_info_value.py,sha256=KPCFuEaa8NiB85A5MfdXRAQ0poAUTl7Feg6BTfmdWas,209
43
- llama_cloud/resources/files/types/file_create_resource_info_value.py,sha256=R7Y-CJf7fnbvIqE3xOI5XOrmPwLbVJLC7zpxMu8Zopk,201
44
42
  llama_cloud/resources/jobs/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
45
43
  llama_cloud/resources/jobs/client.py,sha256=gv_N8e0lay7cjt6MCwx-Cj4FiCXKhbyCDaWbadaJpgY,6270
46
44
  llama_cloud/resources/llama_extract/__init__.py,sha256=V6VZ8hQXwAuvOOZyk43nnbINoDQqEr03AjKQPhYKluk,997
@@ -77,7 +75,7 @@ llama_cloud/resources/retrievers/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-
77
75
  llama_cloud/resources/retrievers/client.py,sha256=z2LhmA-cZVFzr9P6loeCZYnJbvSIk0QitFeVFp-IyZk,32126
78
76
  llama_cloud/resources/users/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
79
77
  llama_cloud/resources/users/client.py,sha256=A2s8e2syQHkkSwPz-Lrt_Zxp1K-8nqJqj5EafE6NWYs,5545
80
- llama_cloud/types/__init__.py,sha256=zPg9ZvyXe6PcaPTS519YoNDtKLjfGdUeoXH0Hr_wGMc,33040
78
+ llama_cloud/types/__init__.py,sha256=l9f6mCL1dMOR2kl7HUdKLuHOUOXHHo6sxi2-8ZP8Tx8,33685
81
79
  llama_cloud/types/advanced_mode_transform_config.py,sha256=4xCXye0_cPmVS1F8aNTx81sIaEPjQH9kiCCAIoqUzlI,1502
82
80
  llama_cloud/types/advanced_mode_transform_config_chunking_config.py,sha256=wYbJnWLpeQDfhmDZz-wJfYzD1iGT5Jcxb9ga3mzUuvk,1983
83
81
  llama_cloud/types/advanced_mode_transform_config_segmentation_config.py,sha256=anNGq0F5-IlbIW3kpC8OilzLJnUq5tdIcWHnRnmlYsg,1303
@@ -196,10 +194,15 @@ llama_cloud/types/failure_handling_config.py,sha256=EmAQW0qm7-JTSYFwhmIWxqkVNWym
196
194
  llama_cloud/types/file.py,sha256=rQXitPRKOYw91nK5qOZ0vpOmIx_MCpRb0g78d9dQs6w,1822
197
195
  llama_cloud/types/file_classification.py,sha256=jKzAc_3rg0Usyf3TNr-bI5HZn9zGIj9vYH90RKoDtiY,1418
198
196
  llama_cloud/types/file_count_by_status_response.py,sha256=WuorbZvKjDs9Ql1hUiQu4gN5iCm8d6fr92KLyHpRvQU,1356
197
+ llama_cloud/types/file_create.py,sha256=eLUC50CzXOdAR_P2mBtX_R7kGteIVbP1V3LzuP1s0Xs,1629
198
+ llama_cloud/types/file_create_permission_info_value.py,sha256=KPCFuEaa8NiB85A5MfdXRAQ0poAUTl7Feg6BTfmdWas,209
199
+ llama_cloud/types/file_create_resource_info_value.py,sha256=R7Y-CJf7fnbvIqE3xOI5XOrmPwLbVJLC7zpxMu8Zopk,201
199
200
  llama_cloud/types/file_data.py,sha256=dH2SNK9ZM-ZH7uKFIfBsk8bVixM33rUr40BdZWFXLhU,1225
201
+ llama_cloud/types/file_filter.py,sha256=VMP_NxXhhyUKInwPTcGPXYO2r5Q17ilds_tXgy6jteo,1257
200
202
  llama_cloud/types/file_id_presigned_url.py,sha256=Yr_MGFKbuBEHK4efRSK53fHcoo5bbAKnqQGGhMycUc0,1398
201
203
  llama_cloud/types/file_parse_public.py,sha256=sshZ0BcjHMGpuz4ylSurv0K_3ejfPrUGGyDxBHCtdMg,1378
202
204
  llama_cloud/types/file_permission_info_value.py,sha256=RyQlNbhvIKS87Ywu7XUaw5jDToZX64M9Wqzu1U_q2Us,197
205
+ llama_cloud/types/file_query_response.py,sha256=e92h6xJoqGPM9VSDy7wnrkQpsaxrVH8YVHzRIgTTl-g,1199
203
206
  llama_cloud/types/file_resource_info_value.py,sha256=g6T6ELeLK9jgcvX6r-EuAl_4JkwnyqdS0RRoabMReSU,195
204
207
  llama_cloud/types/file_store_info_response.py,sha256=YztOvESSDM52urD0gyO47RPWz-kZEjIpEYSeZYfkCLk,1195
205
208
  llama_cloud/types/file_store_info_response_status.py,sha256=UiPdZDEACVuiZ6zqkLnAYJVIxa-TIVwGN6_xF9lt9Xc,778
@@ -230,6 +233,8 @@ llama_cloud/types/job_record_with_usage_metrics.py,sha256=iNV2do5TB_0e3PoOz_DJyA
230
233
  llama_cloud/types/l_lama_parse_transform_config.py,sha256=YQRJZvKh1Ee2FUyW_N0nqYJoW599qBgH3JCH9SH6YLo,1249
231
234
  llama_cloud/types/legacy_parse_job_config.py,sha256=eEPExbkUi9J7lQoY0Fuc2HK_RlhPmO30cMkfjtmmizs,12832
232
235
  llama_cloud/types/license_info_response.py,sha256=fE9vcWO8k92SBqb_wOyBu_16C61s72utA-SifEi9iBc,1192
236
+ llama_cloud/types/llama_extract_mode_availability.py,sha256=7XelUrLe9wteCeEnP_shnb485lwKo56A2EZ66bq9HQw,1257
237
+ llama_cloud/types/llama_extract_mode_availability_status.py,sha256=_ildgVCsBdqOLD__qdEjcYxqgKunXhJ_VHUeqjZJX8c,566
233
238
  llama_cloud/types/llama_extract_settings.py,sha256=YKhhyUNgqpowTdTx715Uk13GdBsxCUZLVsLi5iYQIiY,2767
234
239
  llama_cloud/types/llama_index_core_base_llms_types_chat_message.py,sha256=NelHo-T-ebVMhRKsqE_xV8AJW4c7o6lS0uEQnPsmTwg,1365
235
240
  llama_cloud/types/llama_index_core_base_llms_types_chat_message_blocks_item.py,sha256=-aL8fh-w2Xf4uQs_LHzb3q6LL_onLAcVzCR5yMI4qJw,1571
@@ -389,7 +394,7 @@ llama_cloud/types/vertex_embedding_mode.py,sha256=yY23FjuWU_DkXjBb3JoKV4SCMqel2B
389
394
  llama_cloud/types/vertex_text_embedding.py,sha256=-C4fNCYfFl36ATdBMGFVPpiHIKxjk0KB1ERA2Ec20aU,1932
390
395
  llama_cloud/types/webhook_configuration.py,sha256=_Xm15whrWoKNBuCoO5y_NunA-ByhCAYK87LnC4W-Pzg,1350
391
396
  llama_cloud/types/webhook_configuration_webhook_events_item.py,sha256=OL3moFO_6hsKZYSBQBsSHmWA0NgLcLJgBPZfABwT60c,2544
392
- llama_cloud-0.1.36.dist-info/LICENSE,sha256=_iNqtPcw1Ue7dZKwOwgPtbegMUkWVy15hC7bffAdNmY,1067
393
- llama_cloud-0.1.36.dist-info/METADATA,sha256=j9LO4ZreG5bdpYgvrpUvEB7wjKJQVNT0106_7aqGGU0,1092
394
- llama_cloud-0.1.36.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
395
- llama_cloud-0.1.36.dist-info/RECORD,,
397
+ llama_cloud-0.1.37.dist-info/LICENSE,sha256=_iNqtPcw1Ue7dZKwOwgPtbegMUkWVy15hC7bffAdNmY,1067
398
+ llama_cloud-0.1.37.dist-info/METADATA,sha256=3ZhEF_4jLcePL5FpH8KPjt1q2VOMivGivd20sul-Hps,1194
399
+ llama_cloud-0.1.37.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
400
+ llama_cloud-0.1.37.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.7.0
2
+ Generator: poetry-core 2.1.3
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any