llama-cloud 0.1.39__py3-none-any.whl → 0.1.41__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llama_cloud/__init__.py +24 -72
- llama_cloud/client.py +2 -5
- llama_cloud/resources/__init__.py +0 -4
- llama_cloud/resources/admin/client.py +5 -5
- llama_cloud/resources/beta/client.py +1351 -335
- llama_cloud/resources/llama_extract/client.py +56 -0
- llama_cloud/resources/parsing/client.py +8 -0
- llama_cloud/resources/pipelines/client.py +37 -0
- llama_cloud/types/__init__.py +24 -72
- llama_cloud/types/{src_app_schema_chat_chat_message.py → chat_message.py} +1 -1
- llama_cloud/types/extract_config.py +8 -2
- llama_cloud/types/extract_job_create.py +2 -0
- llama_cloud/types/extract_job_create_priority.py +29 -0
- llama_cloud/types/extract_models.py +28 -28
- llama_cloud/types/job_names.py +0 -4
- llama_cloud/types/{document_block.py → llama_extract_feature_availability.py} +5 -6
- llama_cloud/types/llama_extract_mode_availability.py +4 -3
- llama_cloud/types/llama_extract_settings.py +1 -1
- llama_cloud/types/llama_parse_parameters.py +1 -0
- llama_cloud/types/{progress_event.py → parse_configuration.py} +12 -12
- llama_cloud/types/{llama_index_core_base_llms_types_chat_message.py → parse_configuration_create.py} +9 -7
- llama_cloud/types/{edit_suggestion.py → parse_configuration_filter.py} +8 -6
- llama_cloud/types/{report_update_event.py → parse_configuration_query_response.py} +6 -6
- llama_cloud/types/parse_job_config.py +1 -0
- llama_cloud/types/pipeline.py +4 -0
- llama_cloud/types/pipeline_create.py +2 -0
- llama_cloud/types/playground_session.py +2 -2
- llama_cloud/types/public_model_name.py +97 -0
- llama_cloud/types/{report_create_response.py → schema_generation_availability.py} +4 -2
- llama_cloud/types/schema_generation_availability_status.py +17 -0
- llama_cloud/types/{report_event_item.py → sparse_model_config.py} +10 -8
- llama_cloud/types/sparse_model_type.py +33 -0
- llama_cloud/types/webhook_configuration.py +1 -0
- llama_cloud-0.1.41.dist-info/METADATA +106 -0
- {llama_cloud-0.1.39.dist-info → llama_cloud-0.1.41.dist-info}/RECORD +37 -56
- {llama_cloud-0.1.39.dist-info → llama_cloud-0.1.41.dist-info}/WHEEL +1 -1
- llama_cloud/resources/reports/__init__.py +0 -5
- llama_cloud/resources/reports/client.py +0 -1230
- llama_cloud/resources/reports/types/__init__.py +0 -7
- llama_cloud/resources/reports/types/update_report_plan_api_v_1_reports_report_id_plan_patch_request_action.py +0 -25
- llama_cloud/types/audio_block.py +0 -34
- llama_cloud/types/edit_suggestion_blocks_item.py +0 -8
- llama_cloud/types/image_block.py +0 -35
- llama_cloud/types/llama_index_core_base_llms_types_chat_message_blocks_item.py +0 -56
- llama_cloud/types/paginated_report_response.py +0 -35
- llama_cloud/types/progress_event_status.py +0 -33
- llama_cloud/types/report.py +0 -33
- llama_cloud/types/report_block.py +0 -35
- llama_cloud/types/report_block_dependency.py +0 -29
- llama_cloud/types/report_event_item_event_data.py +0 -45
- llama_cloud/types/report_event_type.py +0 -37
- llama_cloud/types/report_metadata.py +0 -43
- llama_cloud/types/report_plan.py +0 -36
- llama_cloud/types/report_plan_block.py +0 -36
- llama_cloud/types/report_query.py +0 -33
- llama_cloud/types/report_response.py +0 -41
- llama_cloud/types/report_state.py +0 -37
- llama_cloud/types/report_state_event.py +0 -38
- llama_cloud/types/text_block.py +0 -31
- llama_cloud-0.1.39.dist-info/METADATA +0 -32
- {llama_cloud-0.1.39.dist-info → llama_cloud-0.1.41.dist-info}/LICENSE +0 -0
|
@@ -1,38 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
import datetime as dt
|
|
4
|
-
import typing
|
|
5
|
-
|
|
6
|
-
from ..core.datetime_utils import serialize_datetime
|
|
7
|
-
from .report_state import ReportState
|
|
8
|
-
|
|
9
|
-
try:
|
|
10
|
-
import pydantic
|
|
11
|
-
if pydantic.__version__.startswith("1."):
|
|
12
|
-
raise ImportError
|
|
13
|
-
import pydantic.v1 as pydantic # type: ignore
|
|
14
|
-
except ImportError:
|
|
15
|
-
import pydantic # type: ignore
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
class ReportStateEvent(pydantic.BaseModel):
|
|
19
|
-
"""
|
|
20
|
-
Event for notifying when an report's state changes.
|
|
21
|
-
"""
|
|
22
|
-
|
|
23
|
-
timestamp: typing.Optional[dt.datetime]
|
|
24
|
-
msg: str = pydantic.Field(description="The message to display to the user")
|
|
25
|
-
status: ReportState = pydantic.Field(description="The new state of the report")
|
|
26
|
-
|
|
27
|
-
def json(self, **kwargs: typing.Any) -> str:
|
|
28
|
-
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
29
|
-
return super().json(**kwargs_with_defaults)
|
|
30
|
-
|
|
31
|
-
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
32
|
-
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
33
|
-
return super().dict(**kwargs_with_defaults)
|
|
34
|
-
|
|
35
|
-
class Config:
|
|
36
|
-
frozen = True
|
|
37
|
-
smart_union = True
|
|
38
|
-
json_encoders = {dt.datetime: serialize_datetime}
|
llama_cloud/types/text_block.py
DELETED
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
import datetime as dt
|
|
4
|
-
import typing
|
|
5
|
-
|
|
6
|
-
from ..core.datetime_utils import serialize_datetime
|
|
7
|
-
|
|
8
|
-
try:
|
|
9
|
-
import pydantic
|
|
10
|
-
if pydantic.__version__.startswith("1."):
|
|
11
|
-
raise ImportError
|
|
12
|
-
import pydantic.v1 as pydantic # type: ignore
|
|
13
|
-
except ImportError:
|
|
14
|
-
import pydantic # type: ignore
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
class TextBlock(pydantic.BaseModel):
|
|
18
|
-
text: str
|
|
19
|
-
|
|
20
|
-
def json(self, **kwargs: typing.Any) -> str:
|
|
21
|
-
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
22
|
-
return super().json(**kwargs_with_defaults)
|
|
23
|
-
|
|
24
|
-
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
25
|
-
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
26
|
-
return super().dict(**kwargs_with_defaults)
|
|
27
|
-
|
|
28
|
-
class Config:
|
|
29
|
-
frozen = True
|
|
30
|
-
smart_union = True
|
|
31
|
-
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: llama-cloud
|
|
3
|
-
Version: 0.1.39
|
|
4
|
-
Summary:
|
|
5
|
-
License: MIT
|
|
6
|
-
Author: Logan Markewich
|
|
7
|
-
Author-email: logan@runllama.ai
|
|
8
|
-
Requires-Python: >=3.8,<4
|
|
9
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
10
|
-
Classifier: Programming Language :: Python :: 3
|
|
11
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
12
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
13
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
14
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
15
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
16
|
-
Requires-Dist: certifi (>=2024.7.4)
|
|
17
|
-
Requires-Dist: httpx (>=0.20.0)
|
|
18
|
-
Requires-Dist: pydantic (>=1.10)
|
|
19
|
-
Description-Content-Type: text/markdown
|
|
20
|
-
|
|
21
|
-
# LlamaIndex Python Client
|
|
22
|
-
|
|
23
|
-
This client is auto-generated using [Fern](https://buildwithfern.com/docs/intro)
|
|
24
|
-
|
|
25
|
-
To publish:
|
|
26
|
-
- update the version in `pyproject.toml`
|
|
27
|
-
- run `poetry publish --build`
|
|
28
|
-
|
|
29
|
-
Setup credentials:
|
|
30
|
-
- run `poetry config pypi-token.pypi <my-token>`
|
|
31
|
-
- Get token form PyPi once logged in with credentials in [1Password](https://start.1password.com/open/i?a=32SA66TZ3JCRXOCMASLSDCT5TI&v=lhv7hvb5o46cwo257c3hviqkle&i=yvslwei7jtf6tgqamzcdantqi4&h=llamaindex.1password.com)
|
|
32
|
-
|
|
File without changes
|