llama-cloud 0.0.8__py3-none-any.whl → 0.0.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of llama-cloud might be problematic. Click here for more details.
- llama_cloud/__init__.py +22 -0
- llama_cloud/client.py +3 -0
- llama_cloud/resources/__init__.py +13 -1
- llama_cloud/resources/data_sinks/client.py +40 -8
- llama_cloud/resources/data_sources/client.py +48 -12
- llama_cloud/resources/data_sources/types/data_source_update_component_one.py +4 -0
- llama_cloud/resources/extraction/client.py +55 -38
- llama_cloud/resources/organizations/__init__.py +2 -0
- llama_cloud/resources/organizations/client.py +867 -0
- llama_cloud/resources/parsing/client.py +104 -0
- llama_cloud/resources/pipelines/client.py +358 -24
- llama_cloud/resources/projects/client.py +28 -8
- llama_cloud/types/__init__.py +20 -0
- llama_cloud/types/chat_data.py +38 -0
- llama_cloud/types/cloud_azure_ai_search_vector_store.py +1 -1
- llama_cloud/types/cloud_confluence_data_source.py +45 -0
- llama_cloud/types/cloud_jira_data_source.py +43 -0
- llama_cloud/types/cloud_sharepoint_data_source.py +1 -0
- llama_cloud/types/configurable_data_source_names.py +8 -0
- llama_cloud/types/data_source_component_one.py +4 -0
- llama_cloud/types/data_source_create_component_one.py +4 -0
- llama_cloud/types/eval_dataset_job_record.py +1 -0
- llama_cloud/types/extraction_job.py +35 -0
- llama_cloud/types/extraction_schema.py +1 -2
- llama_cloud/types/llama_parse_parameters.py +5 -0
- llama_cloud/types/organization.py +38 -0
- llama_cloud/types/organization_create.py +35 -0
- llama_cloud/types/pipeline.py +0 -3
- llama_cloud/types/pipeline_create.py +0 -3
- llama_cloud/types/pipeline_data_source_component_one.py +4 -0
- llama_cloud/types/preset_retrieval_params.py +5 -0
- llama_cloud/types/project.py +1 -1
- llama_cloud/types/retrieval_mode.py +29 -0
- llama_cloud/types/user_organization.py +49 -0
- llama_cloud/types/user_organization_create.py +36 -0
- llama_cloud/types/user_organization_delete.py +36 -0
- {llama_cloud-0.0.8.dist-info → llama_cloud-0.0.10.dist-info}/METADATA +2 -1
- {llama_cloud-0.0.8.dist-info → llama_cloud-0.0.10.dist-info}/RECORD +40 -28
- {llama_cloud-0.0.8.dist-info → llama_cloud-0.0.10.dist-info}/WHEEL +1 -1
- {llama_cloud-0.0.8.dist-info → llama_cloud-0.0.10.dist-info}/LICENSE +0 -0
|
@@ -113,6 +113,11 @@ class ParsingClient:
|
|
|
113
113
|
page_separator: str,
|
|
114
114
|
bounding_box: str,
|
|
115
115
|
target_pages: str,
|
|
116
|
+
use_vendor_multimodal_model: bool,
|
|
117
|
+
vendor_multimodal_model_name: str,
|
|
118
|
+
vendor_multimodal_api_key: str,
|
|
119
|
+
page_prefix: str,
|
|
120
|
+
page_suffix: str,
|
|
116
121
|
file: typing.IO,
|
|
117
122
|
) -> ParsingJob:
|
|
118
123
|
"""
|
|
@@ -143,6 +148,16 @@ class ParsingClient:
|
|
|
143
148
|
|
|
144
149
|
- target_pages: str.
|
|
145
150
|
|
|
151
|
+
- use_vendor_multimodal_model: bool.
|
|
152
|
+
|
|
153
|
+
- vendor_multimodal_model_name: str.
|
|
154
|
+
|
|
155
|
+
- vendor_multimodal_api_key: str.
|
|
156
|
+
|
|
157
|
+
- page_prefix: str.
|
|
158
|
+
|
|
159
|
+
- page_suffix: str.
|
|
160
|
+
|
|
146
161
|
- file: typing.IO.
|
|
147
162
|
"""
|
|
148
163
|
_response = self._client_wrapper.httpx_client.request(
|
|
@@ -162,6 +177,11 @@ class ParsingClient:
|
|
|
162
177
|
"page_separator": page_separator,
|
|
163
178
|
"bounding_box": bounding_box,
|
|
164
179
|
"target_pages": target_pages,
|
|
180
|
+
"use_vendor_multimodal_model": use_vendor_multimodal_model,
|
|
181
|
+
"vendor_multimodal_model_name": vendor_multimodal_model_name,
|
|
182
|
+
"vendor_multimodal_api_key": vendor_multimodal_api_key,
|
|
183
|
+
"page_prefix": page_prefix,
|
|
184
|
+
"page_suffix": page_suffix,
|
|
165
185
|
}
|
|
166
186
|
),
|
|
167
187
|
files={"file": file},
|
|
@@ -238,6 +258,38 @@ class ParsingClient:
|
|
|
238
258
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
239
259
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
240
260
|
|
|
261
|
+
def get_parsing_job_details(self, job_id: str) -> typing.Any:
|
|
262
|
+
"""
|
|
263
|
+
Get a job by id
|
|
264
|
+
|
|
265
|
+
Parameters:
|
|
266
|
+
- job_id: str.
|
|
267
|
+
---
|
|
268
|
+
from llama_cloud.client import LlamaCloud
|
|
269
|
+
|
|
270
|
+
client = LlamaCloud(
|
|
271
|
+
token="YOUR_TOKEN",
|
|
272
|
+
)
|
|
273
|
+
client.parsing.get_parsing_job_details(
|
|
274
|
+
job_id="string",
|
|
275
|
+
)
|
|
276
|
+
"""
|
|
277
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
278
|
+
"GET",
|
|
279
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/parsing/job/{job_id}/details"),
|
|
280
|
+
headers=self._client_wrapper.get_headers(),
|
|
281
|
+
timeout=60,
|
|
282
|
+
)
|
|
283
|
+
if 200 <= _response.status_code < 300:
|
|
284
|
+
return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
|
|
285
|
+
if _response.status_code == 422:
|
|
286
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
287
|
+
try:
|
|
288
|
+
_response_json = _response.json()
|
|
289
|
+
except JSONDecodeError:
|
|
290
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
291
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
292
|
+
|
|
241
293
|
def get_job_text_result(self, job_id: str) -> ParsingJobTextResult:
|
|
242
294
|
"""
|
|
243
295
|
Get a job by id
|
|
@@ -586,6 +638,11 @@ class AsyncParsingClient:
|
|
|
586
638
|
page_separator: str,
|
|
587
639
|
bounding_box: str,
|
|
588
640
|
target_pages: str,
|
|
641
|
+
use_vendor_multimodal_model: bool,
|
|
642
|
+
vendor_multimodal_model_name: str,
|
|
643
|
+
vendor_multimodal_api_key: str,
|
|
644
|
+
page_prefix: str,
|
|
645
|
+
page_suffix: str,
|
|
589
646
|
file: typing.IO,
|
|
590
647
|
) -> ParsingJob:
|
|
591
648
|
"""
|
|
@@ -616,6 +673,16 @@ class AsyncParsingClient:
|
|
|
616
673
|
|
|
617
674
|
- target_pages: str.
|
|
618
675
|
|
|
676
|
+
- use_vendor_multimodal_model: bool.
|
|
677
|
+
|
|
678
|
+
- vendor_multimodal_model_name: str.
|
|
679
|
+
|
|
680
|
+
- vendor_multimodal_api_key: str.
|
|
681
|
+
|
|
682
|
+
- page_prefix: str.
|
|
683
|
+
|
|
684
|
+
- page_suffix: str.
|
|
685
|
+
|
|
619
686
|
- file: typing.IO.
|
|
620
687
|
"""
|
|
621
688
|
_response = await self._client_wrapper.httpx_client.request(
|
|
@@ -635,6 +702,11 @@ class AsyncParsingClient:
|
|
|
635
702
|
"page_separator": page_separator,
|
|
636
703
|
"bounding_box": bounding_box,
|
|
637
704
|
"target_pages": target_pages,
|
|
705
|
+
"use_vendor_multimodal_model": use_vendor_multimodal_model,
|
|
706
|
+
"vendor_multimodal_model_name": vendor_multimodal_model_name,
|
|
707
|
+
"vendor_multimodal_api_key": vendor_multimodal_api_key,
|
|
708
|
+
"page_prefix": page_prefix,
|
|
709
|
+
"page_suffix": page_suffix,
|
|
638
710
|
}
|
|
639
711
|
),
|
|
640
712
|
files={"file": file},
|
|
@@ -711,6 +783,38 @@ class AsyncParsingClient:
|
|
|
711
783
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
712
784
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
713
785
|
|
|
786
|
+
async def get_parsing_job_details(self, job_id: str) -> typing.Any:
|
|
787
|
+
"""
|
|
788
|
+
Get a job by id
|
|
789
|
+
|
|
790
|
+
Parameters:
|
|
791
|
+
- job_id: str.
|
|
792
|
+
---
|
|
793
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
794
|
+
|
|
795
|
+
client = AsyncLlamaCloud(
|
|
796
|
+
token="YOUR_TOKEN",
|
|
797
|
+
)
|
|
798
|
+
await client.parsing.get_parsing_job_details(
|
|
799
|
+
job_id="string",
|
|
800
|
+
)
|
|
801
|
+
"""
|
|
802
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
803
|
+
"GET",
|
|
804
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/parsing/job/{job_id}/details"),
|
|
805
|
+
headers=self._client_wrapper.get_headers(),
|
|
806
|
+
timeout=60,
|
|
807
|
+
)
|
|
808
|
+
if 200 <= _response.status_code < 300:
|
|
809
|
+
return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
|
|
810
|
+
if _response.status_code == 422:
|
|
811
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
812
|
+
try:
|
|
813
|
+
_response_json = _response.json()
|
|
814
|
+
except JSONDecodeError:
|
|
815
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
816
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
817
|
+
|
|
714
818
|
async def get_job_text_result(self, job_id: str) -> ParsingJobTextResult:
|
|
715
819
|
"""
|
|
716
820
|
Get a job by id
|