llama-cloud 0.0.11__py3-none-any.whl → 0.0.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of llama-cloud might be problematic. Click here for more details.
- llama_cloud/__init__.py +72 -10
- llama_cloud/client.py +3 -0
- llama_cloud/resources/__init__.py +11 -1
- llama_cloud/resources/auth/__init__.py +2 -0
- llama_cloud/resources/auth/client.py +124 -0
- llama_cloud/resources/data_sinks/types/data_sink_update_component_one.py +4 -0
- llama_cloud/resources/pipelines/__init__.py +12 -2
- llama_cloud/resources/pipelines/client.py +9 -47
- llama_cloud/resources/pipelines/types/__init__.py +11 -1
- llama_cloud/resources/pipelines/types/pipeline_update_transform_config.py +31 -0
- llama_cloud/types/__init__.py +70 -10
- llama_cloud/types/advanced_mode_transform_config.py +38 -0
- llama_cloud/types/advanced_mode_transform_config_chunking_config.py +67 -0
- llama_cloud/types/advanced_mode_transform_config_segmentation_config.py +45 -0
- llama_cloud/types/character_chunking_config.py +32 -0
- llama_cloud/types/{html_node_parser.py → character_splitter.py} +9 -9
- llama_cloud/types/chat_data.py +2 -0
- llama_cloud/types/cloud_az_storage_blob_data_source.py +11 -2
- llama_cloud/types/{transform_config.py → cloud_milvus_vector_store.py} +11 -7
- llama_cloud/types/cloud_mongo_db_atlas_vector_search.py +51 -0
- llama_cloud/types/configurable_data_sink_names.py +8 -0
- llama_cloud/types/configurable_transformation_names.py +8 -12
- llama_cloud/types/configured_transformation_item_component_one.py +4 -6
- llama_cloud/types/custom_claims.py +61 -0
- llama_cloud/types/data_sink_component_one.py +4 -0
- llama_cloud/types/data_sink_create_component_one.py +4 -0
- llama_cloud/types/element_segmentation_config.py +29 -0
- llama_cloud/types/embedding_config_component.py +2 -14
- llama_cloud/types/embedding_config_component_one.py +19 -0
- llama_cloud/types/eval_dataset_job_record.py +1 -0
- llama_cloud/types/ingestion_error_response.py +34 -0
- llama_cloud/types/job_name_mapping.py +45 -0
- llama_cloud/types/llama_parse_supported_file_extensions.py +32 -0
- llama_cloud/types/{simple_file_node_parser.py → llm_parameters.py} +6 -14
- llama_cloud/types/managed_ingestion_status_response.py +6 -0
- llama_cloud/types/none_chunking_config.py +29 -0
- llama_cloud/types/none_segmentation_config.py +29 -0
- llama_cloud/types/page_segmentation_config.py +29 -0
- llama_cloud/types/{json_node_parser.py → page_splitter_node_parser.py} +3 -8
- llama_cloud/types/parsing_job.py +2 -0
- llama_cloud/types/parsing_usage.py +1 -1
- llama_cloud/types/pipeline_create.py +2 -2
- llama_cloud/types/pipeline_create_transform_config.py +31 -0
- llama_cloud/types/semantic_chunking_config.py +32 -0
- llama_cloud/types/sentence_chunking_config.py +34 -0
- llama_cloud/types/supported_eval_llm_model_names.py +8 -0
- llama_cloud/types/token_chunking_config.py +33 -0
- llama_cloud/types/user.py +35 -0
- {llama_cloud-0.0.11.dist-info → llama_cloud-0.0.13.dist-info}/METADATA +1 -1
- {llama_cloud-0.0.11.dist-info → llama_cloud-0.0.13.dist-info}/RECORD +52 -32
- llama_cloud/types/transform_config_mode.py +0 -21
- {llama_cloud-0.0.11.dist-info → llama_cloud-0.0.13.dist-info}/LICENSE +0 -0
- {llama_cloud-0.0.11.dist-info → llama_cloud-0.0.13.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
12
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
13
|
+
except ImportError:
|
|
14
|
+
import pydantic # type: ignore
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class NoneChunkingConfig(pydantic.BaseModel):
|
|
18
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
19
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
20
|
+
return super().json(**kwargs_with_defaults)
|
|
21
|
+
|
|
22
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
23
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
24
|
+
return super().dict(**kwargs_with_defaults)
|
|
25
|
+
|
|
26
|
+
class Config:
|
|
27
|
+
frozen = True
|
|
28
|
+
smart_union = True
|
|
29
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
12
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
13
|
+
except ImportError:
|
|
14
|
+
import pydantic # type: ignore
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class NoneSegmentationConfig(pydantic.BaseModel):
|
|
18
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
19
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
20
|
+
return super().json(**kwargs_with_defaults)
|
|
21
|
+
|
|
22
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
23
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
24
|
+
return super().dict(**kwargs_with_defaults)
|
|
25
|
+
|
|
26
|
+
class Config:
|
|
27
|
+
frozen = True
|
|
28
|
+
smart_union = True
|
|
29
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
12
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
13
|
+
except ImportError:
|
|
14
|
+
import pydantic # type: ignore
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class PageSegmentationConfig(pydantic.BaseModel):
|
|
18
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
19
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
20
|
+
return super().json(**kwargs_with_defaults)
|
|
21
|
+
|
|
22
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
23
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
24
|
+
return super().dict(**kwargs_with_defaults)
|
|
25
|
+
|
|
26
|
+
class Config:
|
|
27
|
+
frozen = True
|
|
28
|
+
smart_union = True
|
|
29
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -14,15 +14,9 @@ except ImportError:
|
|
|
14
14
|
import pydantic # type: ignore
|
|
15
15
|
|
|
16
16
|
|
|
17
|
-
class
|
|
17
|
+
class PageSplitterNodeParser(pydantic.BaseModel):
|
|
18
18
|
"""
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
Splits a document into Nodes using custom JSON splitting logic.
|
|
22
|
-
|
|
23
|
-
Args:
|
|
24
|
-
include_metadata (bool): whether to include metadata in nodes
|
|
25
|
-
include_prev_next_rel (bool): whether to include prev/next relationships
|
|
19
|
+
Split text into pages.
|
|
26
20
|
"""
|
|
27
21
|
|
|
28
22
|
include_metadata: typing.Optional[bool] = pydantic.Field(
|
|
@@ -30,6 +24,7 @@ class JsonNodeParser(pydantic.BaseModel):
|
|
|
30
24
|
)
|
|
31
25
|
include_prev_next_rel: typing.Optional[bool] = pydantic.Field(description="Include prev/next node relationships.")
|
|
32
26
|
callback_manager: typing.Optional[typing.Dict[str, typing.Any]]
|
|
27
|
+
page_separator: typing.Optional[str] = pydantic.Field(description="Separator to split text into pages.")
|
|
33
28
|
class_name: typing.Optional[str]
|
|
34
29
|
|
|
35
30
|
def json(self, **kwargs: typing.Any) -> str:
|
llama_cloud/types/parsing_job.py
CHANGED
|
@@ -18,6 +18,8 @@ except ImportError:
|
|
|
18
18
|
class ParsingJob(pydantic.BaseModel):
|
|
19
19
|
id: str
|
|
20
20
|
status: StatusEnum
|
|
21
|
+
error_code: typing.Optional[str]
|
|
22
|
+
error_message: typing.Optional[str]
|
|
21
23
|
|
|
22
24
|
def json(self, **kwargs: typing.Any) -> str:
|
|
23
25
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
@@ -16,7 +16,7 @@ except ImportError:
|
|
|
16
16
|
|
|
17
17
|
class ParsingUsage(pydantic.BaseModel):
|
|
18
18
|
usage_pdf_pages: int
|
|
19
|
-
max_pdf_pages: int
|
|
19
|
+
max_pdf_pages: typing.Optional[int]
|
|
20
20
|
|
|
21
21
|
def json(self, **kwargs: typing.Any) -> str:
|
|
22
22
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
@@ -9,9 +9,9 @@ from .data_sink_create import DataSinkCreate
|
|
|
9
9
|
from .embedding_config import EmbeddingConfig
|
|
10
10
|
from .eval_execution_params import EvalExecutionParams
|
|
11
11
|
from .llama_parse_parameters import LlamaParseParameters
|
|
12
|
+
from .pipeline_create_transform_config import PipelineCreateTransformConfig
|
|
12
13
|
from .pipeline_type import PipelineType
|
|
13
14
|
from .preset_retrieval_params import PresetRetrievalParams
|
|
14
|
-
from .transform_config import TransformConfig
|
|
15
15
|
|
|
16
16
|
try:
|
|
17
17
|
import pydantic
|
|
@@ -30,7 +30,7 @@ class PipelineCreate(pydantic.BaseModel):
|
|
|
30
30
|
embedding_config: typing.Optional[EmbeddingConfig] = pydantic.Field(
|
|
31
31
|
description="Configuration for the embedding model."
|
|
32
32
|
)
|
|
33
|
-
transform_config: typing.Optional[
|
|
33
|
+
transform_config: typing.Optional[PipelineCreateTransformConfig] = pydantic.Field(
|
|
34
34
|
description="Configuration for the transformation."
|
|
35
35
|
)
|
|
36
36
|
configured_transformations: typing.Optional[typing.List[ConfiguredTransformationItem]] = pydantic.Field(
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import typing
|
|
6
|
+
|
|
7
|
+
import typing_extensions
|
|
8
|
+
|
|
9
|
+
from .advanced_mode_transform_config import AdvancedModeTransformConfig
|
|
10
|
+
from .auto_transform_config import AutoTransformConfig
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class PipelineCreateTransformConfig_Auto(AutoTransformConfig):
|
|
14
|
+
mode: typing_extensions.Literal["auto"]
|
|
15
|
+
|
|
16
|
+
class Config:
|
|
17
|
+
frozen = True
|
|
18
|
+
smart_union = True
|
|
19
|
+
allow_population_by_field_name = True
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class PipelineCreateTransformConfig_Advanced(AdvancedModeTransformConfig):
|
|
23
|
+
mode: typing_extensions.Literal["advanced"]
|
|
24
|
+
|
|
25
|
+
class Config:
|
|
26
|
+
frozen = True
|
|
27
|
+
smart_union = True
|
|
28
|
+
allow_population_by_field_name = True
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
PipelineCreateTransformConfig = typing.Union[PipelineCreateTransformConfig_Auto, PipelineCreateTransformConfig_Advanced]
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
12
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
13
|
+
except ImportError:
|
|
14
|
+
import pydantic # type: ignore
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class SemanticChunkingConfig(pydantic.BaseModel):
|
|
18
|
+
buffer_size: typing.Optional[int]
|
|
19
|
+
breakpoint_percentile_threshold: typing.Optional[int]
|
|
20
|
+
|
|
21
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
22
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
23
|
+
return super().json(**kwargs_with_defaults)
|
|
24
|
+
|
|
25
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
26
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
27
|
+
return super().dict(**kwargs_with_defaults)
|
|
28
|
+
|
|
29
|
+
class Config:
|
|
30
|
+
frozen = True
|
|
31
|
+
smart_union = True
|
|
32
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
12
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
13
|
+
except ImportError:
|
|
14
|
+
import pydantic # type: ignore
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class SentenceChunkingConfig(pydantic.BaseModel):
|
|
18
|
+
chunk_size: typing.Optional[int]
|
|
19
|
+
chunk_overlap: typing.Optional[int]
|
|
20
|
+
separator: typing.Optional[str]
|
|
21
|
+
paragraph_separator: typing.Optional[str]
|
|
22
|
+
|
|
23
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
24
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
25
|
+
return super().json(**kwargs_with_defaults)
|
|
26
|
+
|
|
27
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
28
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
29
|
+
return super().dict(**kwargs_with_defaults)
|
|
30
|
+
|
|
31
|
+
class Config:
|
|
32
|
+
frozen = True
|
|
33
|
+
smart_union = True
|
|
34
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -14,12 +14,16 @@ class SupportedEvalLlmModelNames(str, enum.Enum):
|
|
|
14
14
|
GPT_3_5_TURBO = "GPT_3_5_TURBO"
|
|
15
15
|
GPT_4 = "GPT_4"
|
|
16
16
|
GPT_4_TURBO = "GPT_4_TURBO"
|
|
17
|
+
GPT_4_O = "GPT_4O"
|
|
18
|
+
GPT_4_O_MINI = "GPT_4O_MINI"
|
|
17
19
|
|
|
18
20
|
def visit(
|
|
19
21
|
self,
|
|
20
22
|
gpt_3_5_turbo: typing.Callable[[], T_Result],
|
|
21
23
|
gpt_4: typing.Callable[[], T_Result],
|
|
22
24
|
gpt_4_turbo: typing.Callable[[], T_Result],
|
|
25
|
+
gpt_4_o: typing.Callable[[], T_Result],
|
|
26
|
+
gpt_4_o_mini: typing.Callable[[], T_Result],
|
|
23
27
|
) -> T_Result:
|
|
24
28
|
if self is SupportedEvalLlmModelNames.GPT_3_5_TURBO:
|
|
25
29
|
return gpt_3_5_turbo()
|
|
@@ -27,3 +31,7 @@ class SupportedEvalLlmModelNames(str, enum.Enum):
|
|
|
27
31
|
return gpt_4()
|
|
28
32
|
if self is SupportedEvalLlmModelNames.GPT_4_TURBO:
|
|
29
33
|
return gpt_4_turbo()
|
|
34
|
+
if self is SupportedEvalLlmModelNames.GPT_4_O:
|
|
35
|
+
return gpt_4_o()
|
|
36
|
+
if self is SupportedEvalLlmModelNames.GPT_4_O_MINI:
|
|
37
|
+
return gpt_4_o_mini()
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic
|
|
10
|
+
if pydantic.__version__.startswith("1."):
|
|
11
|
+
raise ImportError
|
|
12
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
13
|
+
except ImportError:
|
|
14
|
+
import pydantic # type: ignore
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class TokenChunkingConfig(pydantic.BaseModel):
|
|
18
|
+
chunk_size: typing.Optional[int]
|
|
19
|
+
chunk_overlap: typing.Optional[int]
|
|
20
|
+
separator: typing.Optional[str]
|
|
21
|
+
|
|
22
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
23
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
24
|
+
return super().json(**kwargs_with_defaults)
|
|
25
|
+
|
|
26
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
27
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
28
|
+
return super().dict(**kwargs_with_defaults)
|
|
29
|
+
|
|
30
|
+
class Config:
|
|
31
|
+
frozen = True
|
|
32
|
+
smart_union = True
|
|
33
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
from .custom_claims import CustomClaims
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
import pydantic
|
|
11
|
+
if pydantic.__version__.startswith("1."):
|
|
12
|
+
raise ImportError
|
|
13
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
14
|
+
except ImportError:
|
|
15
|
+
import pydantic # type: ignore
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class User(pydantic.BaseModel):
|
|
19
|
+
id: str
|
|
20
|
+
email: str
|
|
21
|
+
name: typing.Optional[str] = pydantic.Field(description="The user's name.")
|
|
22
|
+
claims: typing.Optional[CustomClaims] = pydantic.Field(description="The user's custom claims.")
|
|
23
|
+
|
|
24
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
25
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
26
|
+
return super().json(**kwargs_with_defaults)
|
|
27
|
+
|
|
28
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
29
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
30
|
+
return super().dict(**kwargs_with_defaults)
|
|
31
|
+
|
|
32
|
+
class Config:
|
|
33
|
+
frozen = True
|
|
34
|
+
smart_union = True
|
|
35
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
llama_cloud/__init__.py,sha256=
|
|
2
|
-
llama_cloud/client.py,sha256=
|
|
1
|
+
llama_cloud/__init__.py,sha256=nCOIz8gk2isrjwjdbu_xvYjuGsUyAwCRPvWVii6Qeo8,11331
|
|
2
|
+
llama_cloud/client.py,sha256=kITbWAZl-xw19xv9ouSiT1wQ1i7yWHhNG5XDTjb-EVc,4503
|
|
3
3
|
llama_cloud/core/__init__.py,sha256=QJS3CJ2TYP2E1Tge0CS6Z7r8LTNzJHQVX1hD3558eP0,519
|
|
4
4
|
llama_cloud/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
|
|
5
5
|
llama_cloud/core/client_wrapper.py,sha256=xmj0jCdQ0ySzbSqHUWOkpRRy069y74I_HuXkWltcsVM,1507
|
|
@@ -9,14 +9,16 @@ llama_cloud/core/remove_none_from_dict.py,sha256=8m91FC3YuVem0Gm9_sXhJ2tGvP33owJ
|
|
|
9
9
|
llama_cloud/environment.py,sha256=q4q-uY5WgcSlzfHwEANOqFQPu0lstqvMnVOsSfifMKo,168
|
|
10
10
|
llama_cloud/errors/__init__.py,sha256=pbbVUFtB9LCocA1RMWMMF_RKjsy5YkOKX5BAuE49w6g,170
|
|
11
11
|
llama_cloud/errors/unprocessable_entity_error.py,sha256=FvR7XPlV3Xx5nu8HNlmLhBRdk4so_gCHjYT5PyZe6sM,313
|
|
12
|
-
llama_cloud/resources/__init__.py,sha256=
|
|
12
|
+
llama_cloud/resources/__init__.py,sha256=NSJNbk10uOIEVT6mgZts5OxyrvK2KJli-hqALbJ_VvQ,1464
|
|
13
|
+
llama_cloud/resources/auth/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
14
|
+
llama_cloud/resources/auth/client.py,sha256=kUfPUIXNS95MBKsknEvdqsDojlVJfVnxmHkAaiYVxCY,4560
|
|
13
15
|
llama_cloud/resources/component_definitions/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
14
16
|
llama_cloud/resources/component_definitions/client.py,sha256=YYfoXNa1qim2OdD5y4N5mvoBZKtrCuXS560mtqH_-1c,7569
|
|
15
17
|
llama_cloud/resources/data_sinks/__init__.py,sha256=nsMEyxkVilxvQGSdJi0Z0yKZoTaTWewZIGJNoMwNDsw,205
|
|
16
18
|
llama_cloud/resources/data_sinks/client.py,sha256=upViKGrwZfRM-BZqMjfWEaEeI8YxThVgr4P9SCH065E,20431
|
|
17
19
|
llama_cloud/resources/data_sinks/types/__init__.py,sha256=M9AO57_TUUgjUcGOhxcROql5U7UbJDbEm7aQj3YqU2I,269
|
|
18
20
|
llama_cloud/resources/data_sinks/types/data_sink_update_component.py,sha256=TjBOpvPvUIyi-NT1Gv1vShMoe-jzDKc8UYaFfo7XOO8,249
|
|
19
|
-
llama_cloud/resources/data_sinks/types/data_sink_update_component_one.py,sha256=
|
|
21
|
+
llama_cloud/resources/data_sinks/types/data_sink_update_component_one.py,sha256=4O7ffz4LiffnqVnddb15r7xuYt7G9kUn67NZdWdwizc,982
|
|
20
22
|
llama_cloud/resources/data_sources/__init__.py,sha256=CCs8ur4fvszPjy0GpTWmMjUAx0WykNgKDKFDNbkYLeM,289
|
|
21
23
|
llama_cloud/resources/data_sources/client.py,sha256=uxM67CtKYSexXeKxuHojlbLR7YkqQueRYIrhSLc6Pqs,21915
|
|
22
24
|
llama_cloud/resources/data_sources/types/__init__.py,sha256=iOdDXvAM6w80PR62JCscsTOwzDIXHHcG_Ypv18DEdic,410
|
|
@@ -38,27 +40,35 @@ llama_cloud/resources/organizations/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRY
|
|
|
38
40
|
llama_cloud/resources/organizations/client.py,sha256=akn_3sytJW_VhuLVBbP0TKiKKbBGuuAPDtGVIbW4kdA,34167
|
|
39
41
|
llama_cloud/resources/parsing/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
40
42
|
llama_cloud/resources/parsing/client.py,sha256=fLNVNa945yP76sTf6NdSm5ikifSFFrjxxHXy0vwNBUc,40450
|
|
41
|
-
llama_cloud/resources/pipelines/__init__.py,sha256=
|
|
42
|
-
llama_cloud/resources/pipelines/client.py,sha256=
|
|
43
|
-
llama_cloud/resources/pipelines/types/__init__.py,sha256=
|
|
43
|
+
llama_cloud/resources/pipelines/__init__.py,sha256=qhTSfZXEvZ1UZWqRiwqVk3Dq541mvzV93R9jdbQilGw,435
|
|
44
|
+
llama_cloud/resources/pipelines/client.py,sha256=q44JF3VC7C__tM7stWqWcIlQ9IWOijRAjiNjL9cBHhk,127688
|
|
45
|
+
llama_cloud/resources/pipelines/types/__init__.py,sha256=phHeTm9cC0tWi4fgrfrjsLAt-UBl1GwcThcSo26vTac,513
|
|
44
46
|
llama_cloud/resources/pipelines/types/pipeline_file_update_custom_metadata_value.py,sha256=trI48WLxPcAqV9207Q6-3cj1nl4EGlZpw7En56ZsPgg,217
|
|
47
|
+
llama_cloud/resources/pipelines/types/pipeline_update_transform_config.py,sha256=QhoTMm88VYbc9EktYuA8qhbUFqwIpHmO5LhML7Z4Sjk,872
|
|
45
48
|
llama_cloud/resources/projects/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
46
49
|
llama_cloud/resources/projects/client.py,sha256=nK81HdhGbWY1rh8rSEsKzRuvyvCQ-IkhLHIPDqEqVFU,47754
|
|
47
|
-
llama_cloud/types/__init__.py,sha256=
|
|
50
|
+
llama_cloud/types/__init__.py,sha256=xSVdNFtS5JrUR-YkA1sCMfxAPsWqwjCh0v66foyyvA8,14899
|
|
51
|
+
llama_cloud/types/advanced_mode_transform_config.py,sha256=4xCXye0_cPmVS1F8aNTx81sIaEPjQH9kiCCAIoqUzlI,1502
|
|
52
|
+
llama_cloud/types/advanced_mode_transform_config_chunking_config.py,sha256=wYbJnWLpeQDfhmDZz-wJfYzD1iGT5Jcxb9ga3mzUuvk,1983
|
|
53
|
+
llama_cloud/types/advanced_mode_transform_config_segmentation_config.py,sha256=anNGq0F5-IlbIW3kpC8OilzLJnUq5tdIcWHnRnmlYsg,1303
|
|
48
54
|
llama_cloud/types/auto_transform_config.py,sha256=HVeHZM75DMRznScqLTfrMwcZwIdyWPuaEYbPewnHqwc,1168
|
|
49
55
|
llama_cloud/types/azure_open_ai_embedding.py,sha256=-9LzRDNcxhRvEshA8SaI9zFMTpHLXJ34iMnpIVk88Cc,3590
|
|
50
56
|
llama_cloud/types/base.py,sha256=cn_Zn61yLMDCMm1iZTPvKILSRlqRzOqZtSYyYBY5dIE,938
|
|
51
57
|
llama_cloud/types/base_prompt_template.py,sha256=GO9k4EDVMf3gRQIA4bVfXqgIMKnKTXhi1JlGvhqXDRY,1576
|
|
52
58
|
llama_cloud/types/bedrock_embedding.py,sha256=RbECq-DxfbizAWpgUN4QIandJ-bGmgFug2EQH6LPHP0,2627
|
|
53
|
-
llama_cloud/types/
|
|
59
|
+
llama_cloud/types/character_chunking_config.py,sha256=2ooAnrlVVbKj4nDi_lR66x5E6nWOmj5YDWhSMQD0ubc,1035
|
|
60
|
+
llama_cloud/types/character_splitter.py,sha256=tA8Eob62cs3geVMVkFvDp17nxijYVUQyjpvV0wGyLpQ,1969
|
|
61
|
+
llama_cloud/types/chat_data.py,sha256=tCZAqgfF5IAElLD9hDRrSzG5-nh_gnJkDm2B-gIg8bg,1356
|
|
54
62
|
llama_cloud/types/chat_message.py,sha256=DNzACxFUJxIHHo74j0e0DSz_1BBgykqfMPekiGCheFo,1168
|
|
55
|
-
llama_cloud/types/cloud_az_storage_blob_data_source.py,sha256=
|
|
63
|
+
llama_cloud/types/cloud_az_storage_blob_data_source.py,sha256=T4zqGF2KUoJ3g9sQbVcmJI2Rae5tvsUm0ML0Lvh8JMU,2196
|
|
56
64
|
llama_cloud/types/cloud_azure_ai_search_vector_store.py,sha256=PlkvzRM7WFNQBxsrhC81Rmm5kVK3rNaOLAYe14L8BzE,1401
|
|
57
65
|
llama_cloud/types/cloud_chroma_vector_store.py,sha256=-PKWkXWRpypeVy6nSbFDDkypdBgHgeqsXtfjGKygjXM,1388
|
|
58
66
|
llama_cloud/types/cloud_confluence_data_source.py,sha256=jLV9bytO2u-Nhn9-HG95TI3xz1934jzPyPOGE_j0h9A,1926
|
|
59
67
|
llama_cloud/types/cloud_document.py,sha256=VJBsYh0OLThzUYfQQE2DZjaMLoyjxCWda8o_ePfGj5Y,1201
|
|
60
68
|
llama_cloud/types/cloud_document_create.py,sha256=F_Zy1Au9Ta1D19Zy1DBmnKIyqqE2TUpzuakHSKn0C-M,1224
|
|
61
69
|
llama_cloud/types/cloud_jira_data_source.py,sha256=chvMhR253mgLAslxvCsS7T7UBM97i0XFjjtx6pfv8Ts,1732
|
|
70
|
+
llama_cloud/types/cloud_milvus_vector_store.py,sha256=CHFTJSYPZKYPUU-jpB1MG8OwRvnPiT07o7cYCvQMZLA,1235
|
|
71
|
+
llama_cloud/types/cloud_mongo_db_atlas_vector_search.py,sha256=R-3zF5aH1PvkhXpGLGCFdfgS6Ehey8iYQFX6N0GZNA8,1725
|
|
62
72
|
llama_cloud/types/cloud_notion_page_data_source.py,sha256=XMbp5dbcR3uTwamV8JlXYk_2UteNJUbvH43caVE0A48,1397
|
|
63
73
|
llama_cloud/types/cloud_one_drive_data_source.py,sha256=anI5y6si0PtrEiN9kTxF0UplDFHr-_yhL373fToUkdg,1686
|
|
64
74
|
llama_cloud/types/cloud_pinecone_vector_store.py,sha256=UyCFAawIDnmPAmTcWjrFCKatypqc4cC4LpuAUOsyzUc,1647
|
|
@@ -70,19 +80,20 @@ llama_cloud/types/cloud_slack_data_source.py,sha256=nj6atJkUYfHauWDBXol3WcDxPhDq
|
|
|
70
80
|
llama_cloud/types/cloud_weaviate_vector_store.py,sha256=D9ZKG9kpZyoncNCxD49e3RURYrI5tieZroPcTaJGE20,1309
|
|
71
81
|
llama_cloud/types/code_splitter.py,sha256=TMfGeunLMJagX3lvxr76nhosD48Reu4CexVzpU_03Rg,1971
|
|
72
82
|
llama_cloud/types/cohere_embedding.py,sha256=qpF6fMm5z5YGc47RfLUM9XPsHbYNRlhSlrBe-9OotHA,1969
|
|
73
|
-
llama_cloud/types/configurable_data_sink_names.py,sha256=
|
|
83
|
+
llama_cloud/types/configurable_data_sink_names.py,sha256=Cue3CIK0jXSOlbQ2Z44tyDW1fpObzbXiCe0zilxt7Xk,1572
|
|
74
84
|
llama_cloud/types/configurable_data_source_names.py,sha256=NrkTdTVUEjb4nrLlmZP2Piko70jToGRAxR2MPs3kEzA,1689
|
|
75
85
|
llama_cloud/types/configurable_transformation_definition.py,sha256=LDOhI5IDxlLDWM_p_xwCFM7qq1y-aGA8UxN7dnplDlU,1886
|
|
76
|
-
llama_cloud/types/configurable_transformation_names.py,sha256
|
|
86
|
+
llama_cloud/types/configurable_transformation_names.py,sha256=tQ8x9-NVisUd-I5vkY1Y_edNHM9pRjQ5cw0POjXUS2E,3216
|
|
77
87
|
llama_cloud/types/configured_transformation_item.py,sha256=9caK5ZOKgGCZc6ynJJIWwpxpScKHOHkZwHFlsBy-Fog,1826
|
|
78
88
|
llama_cloud/types/configured_transformation_item_component.py,sha256=RXQ1Ed2HlqQ-V7RSDA9sndPBbJUhwfczVpCWHRKQigY,311
|
|
79
|
-
llama_cloud/types/configured_transformation_item_component_one.py,sha256=
|
|
89
|
+
llama_cloud/types/configured_transformation_item_component_one.py,sha256=yO_3dv2u0i0RQI_ODrEpPx9aJpMTuqrUKhmDJUbfehU,1158
|
|
90
|
+
llama_cloud/types/custom_claims.py,sha256=BxiBjlyUrJE_WhWrC9U1itsQnXSy4KGFyguVLlkhYLE,2706
|
|
80
91
|
llama_cloud/types/data_sink.py,sha256=11LlzUEMkgT-20OsMlvZYgbOnOvtxD4Y0NGyiVpP1_M,1524
|
|
81
92
|
llama_cloud/types/data_sink_component.py,sha256=AMOlCar00ApJarc4sEVqYGoPWqjuiV19suOUvpIQtlg,224
|
|
82
|
-
llama_cloud/types/data_sink_component_one.py,sha256=
|
|
93
|
+
llama_cloud/types/data_sink_component_one.py,sha256=FVa5lyfY4kgGYRvM5qb18nM5uz9DL2Sjfe1yRB5jAF4,904
|
|
83
94
|
llama_cloud/types/data_sink_create.py,sha256=7hFoMZwd9YoP6pUjmXnzKsS1Ey5OeEQ-mIlNoh8tYAE,1288
|
|
84
95
|
llama_cloud/types/data_sink_create_component.py,sha256=u7UQVzTKLABtjIuk-CWE0MU5s90EXaAhWfBEuMI5Q54,249
|
|
85
|
-
llama_cloud/types/data_sink_create_component_one.py,sha256=
|
|
96
|
+
llama_cloud/types/data_sink_create_component_one.py,sha256=jt9qk-OSS_YI0cvnaeiMxtE8NDdme8ZXXxtUEbaoEfM,910
|
|
86
97
|
llama_cloud/types/data_sink_definition.py,sha256=5ve_pq02s8szc34-wWobMe6BAPj_c7e9n9FFsfDqEQ0,1561
|
|
87
98
|
llama_cloud/types/data_source.py,sha256=H98i0VlmB_eUczmUuhbAVG7uP1wcLLlE2gSKQWLTy7w,1830
|
|
88
99
|
llama_cloud/types/data_source_component.py,sha256=xx1-6EJUtfr2A6BgkOtFM4w5I_3zSHqO1qnRRHSNcTc,232
|
|
@@ -93,12 +104,14 @@ llama_cloud/types/data_source_create_component_one.py,sha256=rkVjFBxh1wA1BcsDWsJ
|
|
|
93
104
|
llama_cloud/types/data_source_create_custom_metadata_value.py,sha256=ejSsQNbszYQaUWFh9r9kQpHf88qbhuRv1SI9J_MOSC0,215
|
|
94
105
|
llama_cloud/types/data_source_custom_metadata_value.py,sha256=pTZn5yjZYmuOhsLABFJOKZblZUkRqo1CqLAuP5tKji4,209
|
|
95
106
|
llama_cloud/types/data_source_definition.py,sha256=HlSlTxzYcQJOSo_2OSroAE8vAr-otDvTNBSEkA54vL8,1575
|
|
107
|
+
llama_cloud/types/element_segmentation_config.py,sha256=QOBk8YFrgK0I2m3caqV5bpYaGXbk0fMSjZ4hUPZXZDI,959
|
|
96
108
|
llama_cloud/types/embedding_config.py,sha256=eqW7xg1IHZcXIEsqVzAn1thxcWGTeBwhpDuqpS6EYKw,1319
|
|
97
|
-
llama_cloud/types/embedding_config_component.py,sha256=
|
|
109
|
+
llama_cloud/types/embedding_config_component.py,sha256=glBnixqC6GkDSMitbVs5aiAw566XFMkKrHgiNM1hEBU,252
|
|
110
|
+
llama_cloud/types/embedding_config_component_one.py,sha256=vFoDX7Cf_Qw9uGDZsIXsH72A2eFoiksMMhCfob1dFFQ,604
|
|
98
111
|
llama_cloud/types/embedding_config_type.py,sha256=iLK2npXhXa0E1FjsZvCC1JTzwJ1ItHYkNgDJvTkofyc,1470
|
|
99
112
|
llama_cloud/types/eval_dataset.py,sha256=Uav-YJqAvyzCp1j2XavzzVLV975uki71beIBLkCt8LY,1408
|
|
100
113
|
llama_cloud/types/eval_dataset_job_params.py,sha256=vcXLJWO581uigNvGAurPDgMeEFtQURWucLF5pemdeS0,1343
|
|
101
|
-
llama_cloud/types/eval_dataset_job_record.py,sha256=
|
|
114
|
+
llama_cloud/types/eval_dataset_job_record.py,sha256=YcTXMBLNQaFw1jAvoQ6CnOsfm8XTDtgBumPr9--o380,2800
|
|
102
115
|
llama_cloud/types/eval_execution_params.py,sha256=TkdGGLfBIS2AeeUZtQBqC-Ln7_xPsU44JbN0yOBuP3Q,1382
|
|
103
116
|
llama_cloud/types/eval_execution_params_override.py,sha256=yhYHQvtk50FW_3oOFpOU-Swuh0MhBTY2-GNsXVWZJNY,1399
|
|
104
117
|
llama_cloud/types/eval_llm_model_data.py,sha256=H56AfhYsPA3eMKj1418_67tJ-5PsCDW36-6Zyif-f3M,1162
|
|
@@ -115,19 +128,20 @@ llama_cloud/types/file_resource_info_value.py,sha256=g6T6ELeLK9jgcvX6r-EuAl_4Jkw
|
|
|
115
128
|
llama_cloud/types/filter_condition.py,sha256=in8L0rP6KO3kd8rRakDjrxBZTWZen6VWVojQnZELORc,520
|
|
116
129
|
llama_cloud/types/filter_operator.py,sha256=nfd5LFzH46ZhUiCzT8poPf1NGibx4v833tLXu06dDE8,2071
|
|
117
130
|
llama_cloud/types/gemini_embedding.py,sha256=BR8A9feVz1c7EoaUQKzOMP3PgZq-mw-n6Di-vOI-0ww,2209
|
|
118
|
-
llama_cloud/types/html_node_parser.py,sha256=xjoKAejLrwhsK10ubEZh3OPuyus8f5XZYdl5SC__Kyw,1685
|
|
119
131
|
llama_cloud/types/http_validation_error.py,sha256=iOSKYv0dJGjyIq8DAeLVKNJY-GiM1b6yiXGpXrm4QS4,1058
|
|
120
132
|
llama_cloud/types/hugging_face_inference_api_embedding.py,sha256=_nXn3KkPnnQiuspEUsBASHJOjeGYHuDUq1eBfXr6xwg,3315
|
|
121
133
|
llama_cloud/types/hugging_face_inference_api_embedding_token.py,sha256=A7-_YryBcsP4G5uRyJ9acao3XwX5-YC3NRndTeDAPj4,144
|
|
122
|
-
llama_cloud/types/
|
|
134
|
+
llama_cloud/types/ingestion_error_response.py,sha256=8u0cyT44dnpkNeUKemTvJMUqi_WyPcYQKP_DMTqaFPY,1259
|
|
135
|
+
llama_cloud/types/job_name_mapping.py,sha256=scAbHrxvowCE3jHRZyYr2bBE5wvMMdBw7zpQ-lp5dY0,1433
|
|
123
136
|
llama_cloud/types/llama_parse_parameters.py,sha256=0oiflzEbTRwxnAYZ3iaQdawFSys5BdhhA7YDzlg7IAs,2047
|
|
124
|
-
llama_cloud/types/llama_parse_supported_file_extensions.py,sha256=
|
|
137
|
+
llama_cloud/types/llama_parse_supported_file_extensions.py,sha256=0IurzDxhIwdxCuTh1J9NXA_bU9VnKagDCs3853iREWY,11244
|
|
125
138
|
llama_cloud/types/llm.py,sha256=T-Uv5OO0E6Rscpn841302jx3c7G1uo9LJkdrGlNGk30,2238
|
|
139
|
+
llama_cloud/types/llm_parameters.py,sha256=K3OsG76rKzOrVwjO7EiJxEf0QMpg_5mfGKd51XdEE4w,1495
|
|
126
140
|
llama_cloud/types/local_eval.py,sha256=77NY_rq4zr0V3iB-PXE7Om6LcjRrytLbQ55f_ovAF-M,2050
|
|
127
141
|
llama_cloud/types/local_eval_results.py,sha256=G1rLE6vO2lEziHQ6bAbZvpJMTrkSYWFvsS1iyZZ44Jw,1449
|
|
128
142
|
llama_cloud/types/local_eval_sets.py,sha256=XJSSriwRvkma889pPiBQrpRakKejKOX3tWPu1TGb1ug,1181
|
|
129
143
|
llama_cloud/types/managed_ingestion_status.py,sha256=IW5WpBSofGlJfypFrl3mp4yx9Lq4eHFs-1IOl1R33dI,1128
|
|
130
|
-
llama_cloud/types/managed_ingestion_status_response.py,sha256=
|
|
144
|
+
llama_cloud/types/managed_ingestion_status_response.py,sha256=1DRxebcZlCKFqA9iwhyfhXlFQfKUjE-SsCzmIJnoobM,1529
|
|
131
145
|
llama_cloud/types/markdown_element_node_parser.py,sha256=N3HKe8ZVVzJvley9UxATSbXhNkgVafhJgtnyMePjMBU,2121
|
|
132
146
|
llama_cloud/types/markdown_node_parser.py,sha256=T4VNqkKmwQtItpdIC2uwfBnIGEfGQ8s6F9vR9ChW64M,1589
|
|
133
147
|
llama_cloud/types/message_role.py,sha256=38ES71HMWfKesfFqSkTBxDcqdNqJHlNYQr1pPKlxSXs,1208
|
|
@@ -137,19 +151,24 @@ llama_cloud/types/metadata_filters.py,sha256=uSf6sB4oQu6WzMPNFG6Tc4euqEiYcj_X14Y
|
|
|
137
151
|
llama_cloud/types/metadata_filters_filters_item.py,sha256=e8KhD2q6Qc2_aK6r5CvyxC0oWVYO4F4vBIcB9eMEPPM,246
|
|
138
152
|
llama_cloud/types/metric_result.py,sha256=jLHgHnU3Gj1rPqwYSzBZd2bnF-yGiHJj4Gi6uRf8BaA,1237
|
|
139
153
|
llama_cloud/types/node_parser.py,sha256=LlM3qsrLbFlAd4SkhAGJjkEJXk04D0faKhQNXWfLQ00,1364
|
|
154
|
+
llama_cloud/types/none_chunking_config.py,sha256=D062t314Vp-s4n9h8wNgsYfElI4PonPKmihvjEmaqdA,952
|
|
155
|
+
llama_cloud/types/none_segmentation_config.py,sha256=j3jUA6E8uFtwDMEu4TFG3Q4ZGCGiuUfUW9AMO1NNqXU,956
|
|
140
156
|
llama_cloud/types/object_type.py,sha256=-7wzQjscpmpUdM8vOIzq1EfAMhY9pS3wIaYIo6GndHQ,736
|
|
141
157
|
llama_cloud/types/open_ai_embedding.py,sha256=vAiDcIV129M7YT5hI99A2kheN42793m4kEJDu7fUKQg,3217
|
|
142
158
|
llama_cloud/types/organization.py,sha256=6mVWJDaDxrnMHuufnpQEhgWTPZW9bhsjGFtgtf4VyJg,1321
|
|
143
159
|
llama_cloud/types/organization_create.py,sha256=hUXRwArIx_0D_lilpL7z-B0oJJ5yEX8Sbu2xqfH_9so,1086
|
|
160
|
+
llama_cloud/types/page_segmentation_config.py,sha256=WE97h8QrwsFqnzQ8EX8P5Db8pn6nZaC0VD1o9C3KAGQ,956
|
|
161
|
+
llama_cloud/types/page_splitter_node_parser.py,sha256=AwdDkxX-WgJEMOc8Jvz_IByfYULNdVIM9CoD6gEcnhU,1476
|
|
144
162
|
llama_cloud/types/parser_languages.py,sha256=Ps3IlaSt6tyxEI657N3-vZL96r2puk8wsf31cWnO-SI,10840
|
|
145
163
|
llama_cloud/types/parsing_history_item.py,sha256=_MVzf43t84PbmjOzsMLZ_NBoyiisigLWz-fr0ZxU63g,1183
|
|
146
|
-
llama_cloud/types/parsing_job.py,sha256=
|
|
164
|
+
llama_cloud/types/parsing_job.py,sha256=9hoKN4h-t0fka4-fX-79VbvcK2EEZRk2bDDZvCjaYZo,1093
|
|
147
165
|
llama_cloud/types/parsing_job_json_result.py,sha256=vC0FNMklitCgcB0esthMfv_RbbyFOzvwzvQsh58Im8o,1040
|
|
148
166
|
llama_cloud/types/parsing_job_markdown_result.py,sha256=E3-CVNFH1IMyuGs_xzYfYdNgq9AdnDshA_CxOTXz_dQ,1094
|
|
149
167
|
llama_cloud/types/parsing_job_text_result.py,sha256=1QZielAWXuzPFOgr_DWshXPjmbExAAgAHKAEYVQVtJ8,1082
|
|
150
|
-
llama_cloud/types/parsing_usage.py,sha256=
|
|
168
|
+
llama_cloud/types/parsing_usage.py,sha256=JLlozu-vIkcRKqWaOVJ9Z2TrY7peJRTzOpYjOThGKGQ,1012
|
|
151
169
|
llama_cloud/types/pipeline.py,sha256=h-Xo7HirFCvgiu7NaqSrUTM2wJKd9WXzcqnZ_j_kRkU,2661
|
|
152
|
-
llama_cloud/types/pipeline_create.py,sha256=
|
|
170
|
+
llama_cloud/types/pipeline_create.py,sha256=usFxjKpz4PpzcAs66iqNDMxMPZRyT6Ezyyr4aRhJnE4,3102
|
|
171
|
+
llama_cloud/types/pipeline_create_transform_config.py,sha256=CiMil0NrwvxR34CAzrSWw9Uo0117tz409sptH1k_r48,854
|
|
153
172
|
llama_cloud/types/pipeline_data_source.py,sha256=A3AlRzTD7zr1y-u5O5LFESqIupbbG-fqUndQgeYj77w,2062
|
|
154
173
|
llama_cloud/types/pipeline_data_source_component.py,sha256=Pk_K0Gv7xSWe5BKCdxz82EFd6AQDvZGN-6t3zg9h8NY,265
|
|
155
174
|
llama_cloud/types/pipeline_data_source_component_one.py,sha256=sYaNaVl2gk-Clq2BCOKT2fUOGa_B7kcsw1P7aVdn-jA,873
|
|
@@ -173,24 +192,25 @@ llama_cloud/types/pydantic_program_mode.py,sha256=QfvpqR7TqyNuOxo78Sr58VOu7KDSBr
|
|
|
173
192
|
llama_cloud/types/related_node_info.py,sha256=YqdYiBxtj8njp-UiLMaTBqoYKTTCEu0-DBta4ZnFVo4,1241
|
|
174
193
|
llama_cloud/types/retrieval_mode.py,sha256=lVfSVelJCKMK1Da4yx7B9m9y6Rj35SGKTx-3Z2UOAPE,784
|
|
175
194
|
llama_cloud/types/retrieve_results.py,sha256=ysSEHTHKBmASTZchcfmD42YAAOoB0KJOyqsYokfTAmE,1523
|
|
195
|
+
llama_cloud/types/semantic_chunking_config.py,sha256=dFDniTVWpRc7UcmVFvljUoyL5Ztd-l-YrHII7U-yM-k,1053
|
|
196
|
+
llama_cloud/types/sentence_chunking_config.py,sha256=NA9xidK5ICxJPkEMQZWNcsV0Hw9Co_bzRWeYe4uSh9I,1116
|
|
176
197
|
llama_cloud/types/sentence_splitter.py,sha256=mkP5vQsXnLhn6iZZN4MrAfVoFdBYhZTIHoA5AewXwZY,2213
|
|
177
|
-
llama_cloud/types/simple_file_node_parser.py,sha256=C0B0eVB7TOKpmX_lMvUvCRzk4Be3R_gh5eOtQvLOnVY,1676
|
|
178
198
|
llama_cloud/types/status_enum.py,sha256=2kQLDa8PdvK45yJDSV2i53rBA3wCR1PJj-IdK0Dcr2E,868
|
|
179
199
|
llama_cloud/types/supported_eval_llm_model.py,sha256=CKWBCKPNa_NjjlmenTDLbc9tt113qzwjq2Xi3WJ6wq8,1364
|
|
180
|
-
llama_cloud/types/supported_eval_llm_model_names.py,sha256=
|
|
200
|
+
llama_cloud/types/supported_eval_llm_model_names.py,sha256=W6fIA0JC4e4hbuvZ_EFbDdRp0viZf--_e0v_b8MIPT0,1111
|
|
181
201
|
llama_cloud/types/text_node.py,sha256=ANT9oPqBs9IJFPhtq-6PC4l44FA3ZYjz_9nOE8h0RAM,2940
|
|
182
202
|
llama_cloud/types/text_node_relationships_value.py,sha256=qmXURTk1Xg7ZDzRSSV1uDEel0AXRLohND5ioezibHY0,217
|
|
183
203
|
llama_cloud/types/text_node_with_score.py,sha256=k-KYWO_mgJBvO6xUfOD5W6v1Ku9E586_HsvDoQbLfuQ,1229
|
|
204
|
+
llama_cloud/types/token_chunking_config.py,sha256=XNvnTsNd--YOMQ_Ad8hoqhYgQftqkBHKVn6i7nJnMqs,1067
|
|
184
205
|
llama_cloud/types/token_text_splitter.py,sha256=Mv8xBCvMXyYuQq1KInPe65O0YYCLWxs61pIbkBRfxG0,1883
|
|
185
|
-
llama_cloud/types/transform_config.py,sha256=Xci_UUMz_xzx_OzePxLNk-6NvXO0H2PZtgEOApoF2lk,1315
|
|
186
|
-
llama_cloud/types/transform_config_mode.py,sha256=4jH-_MnlkM758y0lzlMh9JwGtHrdgAHdm_V8ikk7CbY,518
|
|
187
206
|
llama_cloud/types/transformation_category_names.py,sha256=0xjYe-mDW9OKbTGqL5fSbTvqsfrG4LDu_stW_ubVLl4,582
|
|
207
|
+
llama_cloud/types/user.py,sha256=QOICcqETXK1z1gORIJEm10-UH2ZjJbtSMCM9VsVGCmU,1186
|
|
188
208
|
llama_cloud/types/user_organization.py,sha256=fLgTKr1phJ4EdhTXmr5086bRy9RTAUy4km6mQz_jgRI,1964
|
|
189
209
|
llama_cloud/types/user_organization_create.py,sha256=YESlfcI64710OFdQzgGD4a7aItgBwcIKdM1xFPs1Szw,1209
|
|
190
210
|
llama_cloud/types/user_organization_delete.py,sha256=Z8RSRXc0AGAuGxv6eQPC2S1XIdRfNCXBggfEefgPseM,1209
|
|
191
211
|
llama_cloud/types/validation_error.py,sha256=yZDLtjUHDY5w82Ra6CW0H9sLAr18R0RY1UNgJKR72DQ,1084
|
|
192
212
|
llama_cloud/types/validation_error_loc_item.py,sha256=LAtjCHIllWRBFXvAZ5QZpp7CPXjdtN9EB7HrLVo6EP0,128
|
|
193
|
-
llama_cloud-0.0.
|
|
194
|
-
llama_cloud-0.0.
|
|
195
|
-
llama_cloud-0.0.
|
|
196
|
-
llama_cloud-0.0.
|
|
213
|
+
llama_cloud-0.0.13.dist-info/LICENSE,sha256=_iNqtPcw1Ue7dZKwOwgPtbegMUkWVy15hC7bffAdNmY,1067
|
|
214
|
+
llama_cloud-0.0.13.dist-info/METADATA,sha256=pgfKBQAqdeqwvud1PkR3fr_36qkR3ZJz772qCHX6bnY,751
|
|
215
|
+
llama_cloud-0.0.13.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
216
|
+
llama_cloud-0.0.13.dist-info/RECORD,,
|
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
import enum
|
|
4
|
-
import typing
|
|
5
|
-
|
|
6
|
-
T_Result = typing.TypeVar("T_Result")
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class TransformConfigMode(str, enum.Enum):
|
|
10
|
-
"""
|
|
11
|
-
An enumeration.
|
|
12
|
-
"""
|
|
13
|
-
|
|
14
|
-
AUTO = "AUTO"
|
|
15
|
-
ADVANCED = "ADVANCED"
|
|
16
|
-
|
|
17
|
-
def visit(self, auto: typing.Callable[[], T_Result], advanced: typing.Callable[[], T_Result]) -> T_Result:
|
|
18
|
-
if self is TransformConfigMode.AUTO:
|
|
19
|
-
return auto()
|
|
20
|
-
if self is TransformConfigMode.ADVANCED:
|
|
21
|
-
return advanced()
|
|
File without changes
|
|
File without changes
|