llama-cloud 0.0.13__py3-none-any.whl → 0.0.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of llama-cloud might be problematic. Click here for more details.
- llama_cloud/__init__.py +76 -14
- llama_cloud/resources/__init__.py +14 -0
- llama_cloud/resources/data_sources/types/data_source_update_component_one.py +2 -0
- llama_cloud/resources/evals/client.py +5 -5
- llama_cloud/resources/parsing/client.py +8 -0
- llama_cloud/resources/pipelines/__init__.py +14 -0
- llama_cloud/resources/pipelines/client.py +285 -66
- llama_cloud/resources/pipelines/types/__init__.py +16 -0
- llama_cloud/resources/pipelines/types/pipeline_update_embedding_config.py +78 -0
- llama_cloud/types/__init__.py +68 -14
- llama_cloud/types/{embedding_config.py → azure_open_ai_embedding_config.py} +4 -6
- llama_cloud/types/bedrock_embedding_config.py +34 -0
- llama_cloud/types/box_auth_mechanism.py +21 -0
- llama_cloud/types/chat_data.py +1 -1
- llama_cloud/types/chat_message.py +14 -4
- llama_cloud/types/cloud_azure_ai_search_vector_store.py +3 -0
- llama_cloud/types/cloud_box_data_source.py +51 -0
- llama_cloud/types/cloud_document.py +3 -0
- llama_cloud/types/cloud_document_create.py +3 -0
- llama_cloud/types/cloud_sharepoint_data_source.py +2 -1
- llama_cloud/types/cohere_embedding_config.py +34 -0
- llama_cloud/types/configurable_data_source_names.py +4 -0
- llama_cloud/types/custom_claims.py +0 -3
- llama_cloud/types/data_source_component_one.py +2 -0
- llama_cloud/types/data_source_create_component_one.py +2 -0
- llama_cloud/types/eval_execution_params.py +2 -2
- llama_cloud/types/eval_execution_params_override.py +2 -2
- llama_cloud/types/filter_operator.py +4 -0
- llama_cloud/types/gemini_embedding_config.py +34 -0
- llama_cloud/types/hugging_face_inference_api_embedding_config.py +34 -0
- llama_cloud/types/input_message.py +42 -0
- llama_cloud/types/llama_parse_parameters.py +4 -1
- llama_cloud/types/{eval_llm_model_data.py → llm_model_data.py} +1 -1
- llama_cloud/types/llm_parameters.py +2 -2
- llama_cloud/types/{supported_eval_llm_model.py → message_annotation.py} +6 -6
- llama_cloud/types/metadata_filter.py +1 -1
- llama_cloud/types/open_ai_embedding_config.py +34 -0
- llama_cloud/types/page_segmentation_config.py +2 -0
- llama_cloud/types/pipeline.py +11 -1
- llama_cloud/types/pipeline_create.py +3 -3
- llama_cloud/types/pipeline_create_embedding_config.py +78 -0
- llama_cloud/types/pipeline_data_source.py +7 -0
- llama_cloud/types/pipeline_data_source_component_one.py +2 -0
- llama_cloud/types/pipeline_data_source_create.py +3 -0
- llama_cloud/types/pipeline_embedding_config.py +78 -0
- llama_cloud/types/pipeline_transform_config.py +31 -0
- llama_cloud/types/playground_session.py +51 -0
- llama_cloud/types/supported_llm_model.py +41 -0
- llama_cloud/types/{supported_eval_llm_model_names.py → supported_llm_model_names.py} +10 -6
- {llama_cloud-0.0.13.dist-info → llama_cloud-0.0.15.dist-info}/METADATA +1 -1
- {llama_cloud-0.0.13.dist-info → llama_cloud-0.0.15.dist-info}/RECORD +53 -42
- llama_cloud/types/embedding_config_component.py +0 -7
- llama_cloud/types/embedding_config_component_one.py +0 -19
- llama_cloud/types/embedding_config_type.py +0 -41
- {llama_cloud-0.0.13.dist-info → llama_cloud-0.0.15.dist-info}/LICENSE +0 -0
- {llama_cloud-0.0.13.dist-info → llama_cloud-0.0.15.dist-info}/WHEEL +0 -0
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
import typing
|
|
4
4
|
|
|
5
5
|
from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
|
|
6
|
+
from .cloud_box_data_source import CloudBoxDataSource
|
|
6
7
|
from .cloud_confluence_data_source import CloudConfluenceDataSource
|
|
7
8
|
from .cloud_jira_data_source import CloudJiraDataSource
|
|
8
9
|
from .cloud_notion_page_data_source import CloudNotionPageDataSource
|
|
@@ -20,4 +21,5 @@ DataSourceCreateComponentOne = typing.Union[
|
|
|
20
21
|
CloudNotionPageDataSource,
|
|
21
22
|
CloudConfluenceDataSource,
|
|
22
23
|
CloudJiraDataSource,
|
|
24
|
+
CloudBoxDataSource,
|
|
23
25
|
]
|
|
@@ -4,7 +4,7 @@ import datetime as dt
|
|
|
4
4
|
import typing
|
|
5
5
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
|
-
from .
|
|
7
|
+
from .supported_llm_model_names import SupportedLlmModelNames
|
|
8
8
|
|
|
9
9
|
try:
|
|
10
10
|
import pydantic
|
|
@@ -20,7 +20,7 @@ class EvalExecutionParams(pydantic.BaseModel):
|
|
|
20
20
|
Schema for the params for an eval execution.
|
|
21
21
|
"""
|
|
22
22
|
|
|
23
|
-
llm_model: typing.Optional[
|
|
23
|
+
llm_model: typing.Optional[SupportedLlmModelNames] = pydantic.Field(
|
|
24
24
|
description="The LLM model to use within eval execution."
|
|
25
25
|
)
|
|
26
26
|
qa_prompt_tmpl: typing.Optional[str] = pydantic.Field(
|
|
@@ -4,7 +4,7 @@ import datetime as dt
|
|
|
4
4
|
import typing
|
|
5
5
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
|
-
from .
|
|
7
|
+
from .supported_llm_model_names import SupportedLlmModelNames
|
|
8
8
|
|
|
9
9
|
try:
|
|
10
10
|
import pydantic
|
|
@@ -20,7 +20,7 @@ class EvalExecutionParamsOverride(pydantic.BaseModel):
|
|
|
20
20
|
Schema for the params override for an eval execution.
|
|
21
21
|
"""
|
|
22
22
|
|
|
23
|
-
llm_model: typing.Optional[
|
|
23
|
+
llm_model: typing.Optional[SupportedLlmModelNames] = pydantic.Field(
|
|
24
24
|
description="The LLM model to use within eval execution."
|
|
25
25
|
)
|
|
26
26
|
qa_prompt_tmpl: typing.Optional[str] = pydantic.Field(
|
|
@@ -23,6 +23,7 @@ class FilterOperator(str, enum.Enum):
|
|
|
23
23
|
ALL = "all"
|
|
24
24
|
TEXT_MATCH = "text_match"
|
|
25
25
|
CONTAINS = "contains"
|
|
26
|
+
IS_EMPTY = "is_empty"
|
|
26
27
|
|
|
27
28
|
def visit(
|
|
28
29
|
self,
|
|
@@ -38,6 +39,7 @@ class FilterOperator(str, enum.Enum):
|
|
|
38
39
|
all: typing.Callable[[], T_Result],
|
|
39
40
|
text_match: typing.Callable[[], T_Result],
|
|
40
41
|
contains: typing.Callable[[], T_Result],
|
|
42
|
+
is_empty: typing.Callable[[], T_Result],
|
|
41
43
|
) -> T_Result:
|
|
42
44
|
if self is FilterOperator.EQUAL_TO:
|
|
43
45
|
return equal_to()
|
|
@@ -63,3 +65,5 @@ class FilterOperator(str, enum.Enum):
|
|
|
63
65
|
return text_match()
|
|
64
66
|
if self is FilterOperator.CONTAINS:
|
|
65
67
|
return contains()
|
|
68
|
+
if self is FilterOperator.IS_EMPTY:
|
|
69
|
+
return is_empty()
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
from .gemini_embedding import GeminiEmbedding
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
import pydantic
|
|
11
|
+
if pydantic.__version__.startswith("1."):
|
|
12
|
+
raise ImportError
|
|
13
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
14
|
+
except ImportError:
|
|
15
|
+
import pydantic # type: ignore
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class GeminiEmbeddingConfig(pydantic.BaseModel):
|
|
19
|
+
component: typing.Optional[GeminiEmbedding] = pydantic.Field(
|
|
20
|
+
description="Configuration for the Gemini embedding model."
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
24
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
25
|
+
return super().json(**kwargs_with_defaults)
|
|
26
|
+
|
|
27
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
28
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
29
|
+
return super().dict(**kwargs_with_defaults)
|
|
30
|
+
|
|
31
|
+
class Config:
|
|
32
|
+
frozen = True
|
|
33
|
+
smart_union = True
|
|
34
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
from .hugging_face_inference_api_embedding import HuggingFaceInferenceApiEmbedding
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
import pydantic
|
|
11
|
+
if pydantic.__version__.startswith("1."):
|
|
12
|
+
raise ImportError
|
|
13
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
14
|
+
except ImportError:
|
|
15
|
+
import pydantic # type: ignore
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class HuggingFaceInferenceApiEmbeddingConfig(pydantic.BaseModel):
|
|
19
|
+
component: typing.Optional[HuggingFaceInferenceApiEmbedding] = pydantic.Field(
|
|
20
|
+
description="Configuration for the HuggingFace Inference API embedding model."
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
24
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
25
|
+
return super().json(**kwargs_with_defaults)
|
|
26
|
+
|
|
27
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
28
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
29
|
+
return super().dict(**kwargs_with_defaults)
|
|
30
|
+
|
|
31
|
+
class Config:
|
|
32
|
+
frozen = True
|
|
33
|
+
smart_union = True
|
|
34
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
from .message_role import MessageRole
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
import pydantic
|
|
11
|
+
if pydantic.__version__.startswith("1."):
|
|
12
|
+
raise ImportError
|
|
13
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
14
|
+
except ImportError:
|
|
15
|
+
import pydantic # type: ignore
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class InputMessage(pydantic.BaseModel):
|
|
19
|
+
"""
|
|
20
|
+
This is distinct from a ChatMessage because this schema is enforced by the AI Chat library used in the frontend
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
id: typing.Optional[str] = pydantic.Field(description="ID of the message, if any. Not necessarily a UUID.")
|
|
24
|
+
role: MessageRole
|
|
25
|
+
content: str
|
|
26
|
+
data: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field(
|
|
27
|
+
description="Additional data to be stored with the message."
|
|
28
|
+
)
|
|
29
|
+
class_name: typing.Optional[str]
|
|
30
|
+
|
|
31
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
32
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
33
|
+
return super().json(**kwargs_with_defaults)
|
|
34
|
+
|
|
35
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
36
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
37
|
+
return super().dict(**kwargs_with_defaults)
|
|
38
|
+
|
|
39
|
+
class Config:
|
|
40
|
+
frozen = True
|
|
41
|
+
smart_union = True
|
|
42
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -17,7 +17,7 @@ except ImportError:
|
|
|
17
17
|
|
|
18
18
|
class LlamaParseParameters(pydantic.BaseModel):
|
|
19
19
|
"""
|
|
20
|
-
Settings that can be configured for how to use LlamaParse to parse files
|
|
20
|
+
Settings that can be configured for how to use LlamaParse to parse files within a LlamaCloud pipeline.
|
|
21
21
|
"""
|
|
22
22
|
|
|
23
23
|
languages: typing.Optional[typing.List[ParserLanguages]]
|
|
@@ -38,6 +38,9 @@ class LlamaParseParameters(pydantic.BaseModel):
|
|
|
38
38
|
vendor_multimodal_api_key: typing.Optional[str]
|
|
39
39
|
page_prefix: typing.Optional[str]
|
|
40
40
|
page_suffix: typing.Optional[str]
|
|
41
|
+
take_screenshot: typing.Optional[bool]
|
|
42
|
+
s_3_input_path: typing.Optional[str] = pydantic.Field(alias="s3_input_path")
|
|
43
|
+
s_3_output_path_prefix: typing.Optional[str] = pydantic.Field(alias="s3_output_path_prefix")
|
|
41
44
|
|
|
42
45
|
def json(self, **kwargs: typing.Any) -> str:
|
|
43
46
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
@@ -20,8 +20,8 @@ class LlmParameters(pydantic.BaseModel):
|
|
|
20
20
|
Comes with special serialization logic for types used commonly in platform codebase.
|
|
21
21
|
"""
|
|
22
22
|
|
|
23
|
-
model_name: typing.Optional[str] = pydantic.Field(description="The name of the model to use for
|
|
24
|
-
system_prompt: typing.Optional[str] = pydantic.Field(description="The system prompt to use for the
|
|
23
|
+
model_name: typing.Optional[str] = pydantic.Field(description="The name of the model to use for LLM completions.")
|
|
24
|
+
system_prompt: typing.Optional[str] = pydantic.Field(description="The system prompt to use for the completion.")
|
|
25
25
|
temperature: typing.Optional[float] = pydantic.Field(description="The temperature value for the model.")
|
|
26
26
|
class_name: typing.Optional[str]
|
|
27
27
|
|
|
@@ -4,8 +4,6 @@ import datetime as dt
|
|
|
4
4
|
import typing
|
|
5
5
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
|
-
from .eval_llm_model_data import EvalLlmModelData
|
|
8
|
-
from .supported_eval_llm_model_names import SupportedEvalLlmModelNames
|
|
9
7
|
|
|
10
8
|
try:
|
|
11
9
|
import pydantic
|
|
@@ -16,13 +14,15 @@ except ImportError:
|
|
|
16
14
|
import pydantic # type: ignore
|
|
17
15
|
|
|
18
16
|
|
|
19
|
-
class
|
|
17
|
+
class MessageAnnotation(pydantic.BaseModel):
|
|
20
18
|
"""
|
|
21
|
-
|
|
19
|
+
Base schema model for BaseComponent classes used in the platform.
|
|
20
|
+
Comes with special serialization logic for types used commonly in platform codebase.
|
|
22
21
|
"""
|
|
23
22
|
|
|
24
|
-
|
|
25
|
-
|
|
23
|
+
type: str
|
|
24
|
+
data: typing.Optional[typing.Any]
|
|
25
|
+
class_name: typing.Optional[str]
|
|
26
26
|
|
|
27
27
|
def json(self, **kwargs: typing.Any) -> str:
|
|
28
28
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
from .open_ai_embedding import OpenAiEmbedding
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
import pydantic
|
|
11
|
+
if pydantic.__version__.startswith("1."):
|
|
12
|
+
raise ImportError
|
|
13
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
14
|
+
except ImportError:
|
|
15
|
+
import pydantic # type: ignore
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class OpenAiEmbeddingConfig(pydantic.BaseModel):
|
|
19
|
+
component: typing.Optional[OpenAiEmbedding] = pydantic.Field(
|
|
20
|
+
description="Configuration for the OpenAI embedding model."
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
24
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
25
|
+
return super().json(**kwargs_with_defaults)
|
|
26
|
+
|
|
27
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
28
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
29
|
+
return super().dict(**kwargs_with_defaults)
|
|
30
|
+
|
|
31
|
+
class Config:
|
|
32
|
+
frozen = True
|
|
33
|
+
smart_union = True
|
|
34
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -15,6 +15,8 @@ except ImportError:
|
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
class PageSegmentationConfig(pydantic.BaseModel):
|
|
18
|
+
page_separator: typing.Optional[str]
|
|
19
|
+
|
|
18
20
|
def json(self, **kwargs: typing.Any) -> str:
|
|
19
21
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
20
22
|
return super().json(**kwargs_with_defaults)
|
llama_cloud/types/pipeline.py
CHANGED
|
@@ -8,6 +8,8 @@ from .configured_transformation_item import ConfiguredTransformationItem
|
|
|
8
8
|
from .data_sink import DataSink
|
|
9
9
|
from .eval_execution_params import EvalExecutionParams
|
|
10
10
|
from .llama_parse_parameters import LlamaParseParameters
|
|
11
|
+
from .pipeline_embedding_config import PipelineEmbeddingConfig
|
|
12
|
+
from .pipeline_transform_config import PipelineTransformConfig
|
|
11
13
|
from .pipeline_type import PipelineType
|
|
12
14
|
from .preset_retrieval_params import PresetRetrievalParams
|
|
13
15
|
|
|
@@ -25,7 +27,6 @@ class Pipeline(pydantic.BaseModel):
|
|
|
25
27
|
Schema for a pipeline.
|
|
26
28
|
"""
|
|
27
29
|
|
|
28
|
-
configured_transformations: typing.List[ConfiguredTransformationItem]
|
|
29
30
|
id: str = pydantic.Field(description="Unique identifier")
|
|
30
31
|
created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
|
|
31
32
|
updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
|
|
@@ -37,6 +38,15 @@ class Pipeline(pydantic.BaseModel):
|
|
|
37
38
|
managed_pipeline_id: typing.Optional[str] = pydantic.Field(
|
|
38
39
|
description="The ID of the ManagedPipeline this playground pipeline is linked to."
|
|
39
40
|
)
|
|
41
|
+
embedding_config: typing.Optional[PipelineEmbeddingConfig] = pydantic.Field(
|
|
42
|
+
description="Configuration for the embedding model."
|
|
43
|
+
)
|
|
44
|
+
configured_transformations: typing.Optional[typing.List[ConfiguredTransformationItem]] = pydantic.Field(
|
|
45
|
+
description="Deprecated don't use it, List of configured transformations."
|
|
46
|
+
)
|
|
47
|
+
transform_config: typing.Optional[PipelineTransformConfig] = pydantic.Field(
|
|
48
|
+
description="Configuration for the transformation."
|
|
49
|
+
)
|
|
40
50
|
preset_retrieval_parameters: typing.Optional[PresetRetrievalParams] = pydantic.Field(
|
|
41
51
|
description="Preset retrieval parameters for the pipeline."
|
|
42
52
|
)
|
|
@@ -6,9 +6,9 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
from .configured_transformation_item import ConfiguredTransformationItem
|
|
8
8
|
from .data_sink_create import DataSinkCreate
|
|
9
|
-
from .embedding_config import EmbeddingConfig
|
|
10
9
|
from .eval_execution_params import EvalExecutionParams
|
|
11
10
|
from .llama_parse_parameters import LlamaParseParameters
|
|
11
|
+
from .pipeline_create_embedding_config import PipelineCreateEmbeddingConfig
|
|
12
12
|
from .pipeline_create_transform_config import PipelineCreateTransformConfig
|
|
13
13
|
from .pipeline_type import PipelineType
|
|
14
14
|
from .preset_retrieval_params import PresetRetrievalParams
|
|
@@ -27,14 +27,14 @@ class PipelineCreate(pydantic.BaseModel):
|
|
|
27
27
|
Schema for creating a pipeline.
|
|
28
28
|
"""
|
|
29
29
|
|
|
30
|
-
embedding_config: typing.Optional[
|
|
30
|
+
embedding_config: typing.Optional[PipelineCreateEmbeddingConfig] = pydantic.Field(
|
|
31
31
|
description="Configuration for the embedding model."
|
|
32
32
|
)
|
|
33
33
|
transform_config: typing.Optional[PipelineCreateTransformConfig] = pydantic.Field(
|
|
34
34
|
description="Configuration for the transformation."
|
|
35
35
|
)
|
|
36
36
|
configured_transformations: typing.Optional[typing.List[ConfiguredTransformationItem]] = pydantic.Field(
|
|
37
|
-
description="
|
|
37
|
+
description="Deprecated, use embedding_config or transform_config instead. configured transformations for the pipeline."
|
|
38
38
|
)
|
|
39
39
|
data_sink_id: typing.Optional[str] = pydantic.Field(
|
|
40
40
|
description="Data sink ID. When provided instead of data_sink, the data sink will be looked up by ID."
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import typing
|
|
6
|
+
|
|
7
|
+
import typing_extensions
|
|
8
|
+
|
|
9
|
+
from .azure_open_ai_embedding_config import AzureOpenAiEmbeddingConfig
|
|
10
|
+
from .bedrock_embedding_config import BedrockEmbeddingConfig
|
|
11
|
+
from .cohere_embedding_config import CohereEmbeddingConfig
|
|
12
|
+
from .gemini_embedding_config import GeminiEmbeddingConfig
|
|
13
|
+
from .hugging_face_inference_api_embedding_config import HuggingFaceInferenceApiEmbeddingConfig
|
|
14
|
+
from .open_ai_embedding_config import OpenAiEmbeddingConfig
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class PipelineCreateEmbeddingConfig_OpenaiEmbedding(OpenAiEmbeddingConfig):
|
|
18
|
+
type: typing_extensions.Literal["OPENAI_EMBEDDING"]
|
|
19
|
+
|
|
20
|
+
class Config:
|
|
21
|
+
frozen = True
|
|
22
|
+
smart_union = True
|
|
23
|
+
allow_population_by_field_name = True
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class PipelineCreateEmbeddingConfig_AzureEmbedding(AzureOpenAiEmbeddingConfig):
|
|
27
|
+
type: typing_extensions.Literal["AZURE_EMBEDDING"]
|
|
28
|
+
|
|
29
|
+
class Config:
|
|
30
|
+
frozen = True
|
|
31
|
+
smart_union = True
|
|
32
|
+
allow_population_by_field_name = True
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class PipelineCreateEmbeddingConfig_HuggingfaceApiEmbedding(HuggingFaceInferenceApiEmbeddingConfig):
|
|
36
|
+
type: typing_extensions.Literal["HUGGINGFACE_API_EMBEDDING"]
|
|
37
|
+
|
|
38
|
+
class Config:
|
|
39
|
+
frozen = True
|
|
40
|
+
smart_union = True
|
|
41
|
+
allow_population_by_field_name = True
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class PipelineCreateEmbeddingConfig_BedrockEmbedding(BedrockEmbeddingConfig):
|
|
45
|
+
type: typing_extensions.Literal["BEDROCK_EMBEDDING"]
|
|
46
|
+
|
|
47
|
+
class Config:
|
|
48
|
+
frozen = True
|
|
49
|
+
smart_union = True
|
|
50
|
+
allow_population_by_field_name = True
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class PipelineCreateEmbeddingConfig_GeminiEmbedding(GeminiEmbeddingConfig):
|
|
54
|
+
type: typing_extensions.Literal["GEMINI_EMBEDDING"]
|
|
55
|
+
|
|
56
|
+
class Config:
|
|
57
|
+
frozen = True
|
|
58
|
+
smart_union = True
|
|
59
|
+
allow_population_by_field_name = True
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class PipelineCreateEmbeddingConfig_CohereEmbedding(CohereEmbeddingConfig):
|
|
63
|
+
type: typing_extensions.Literal["COHERE_EMBEDDING"]
|
|
64
|
+
|
|
65
|
+
class Config:
|
|
66
|
+
frozen = True
|
|
67
|
+
smart_union = True
|
|
68
|
+
allow_population_by_field_name = True
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
PipelineCreateEmbeddingConfig = typing.Union[
|
|
72
|
+
PipelineCreateEmbeddingConfig_OpenaiEmbedding,
|
|
73
|
+
PipelineCreateEmbeddingConfig_AzureEmbedding,
|
|
74
|
+
PipelineCreateEmbeddingConfig_HuggingfaceApiEmbedding,
|
|
75
|
+
PipelineCreateEmbeddingConfig_BedrockEmbedding,
|
|
76
|
+
PipelineCreateEmbeddingConfig_GeminiEmbedding,
|
|
77
|
+
PipelineCreateEmbeddingConfig_CohereEmbedding,
|
|
78
|
+
]
|
|
@@ -34,6 +34,13 @@ class PipelineDataSource(pydantic.BaseModel):
|
|
|
34
34
|
project_id: str
|
|
35
35
|
data_source_id: str = pydantic.Field(description="The ID of the data source.")
|
|
36
36
|
pipeline_id: str = pydantic.Field(description="The ID of the pipeline.")
|
|
37
|
+
last_synced_at: dt.datetime = pydantic.Field(description="The last time the data source was automatically synced.")
|
|
38
|
+
sync_interval: typing.Optional[float] = pydantic.Field(
|
|
39
|
+
description="The interval at which the data source should be synced."
|
|
40
|
+
)
|
|
41
|
+
sync_schedule_set_by: typing.Optional[str] = pydantic.Field(
|
|
42
|
+
description="The id of the user who set the sync schedule."
|
|
43
|
+
)
|
|
37
44
|
|
|
38
45
|
def json(self, **kwargs: typing.Any) -> str:
|
|
39
46
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
import typing
|
|
4
4
|
|
|
5
5
|
from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
|
|
6
|
+
from .cloud_box_data_source import CloudBoxDataSource
|
|
6
7
|
from .cloud_confluence_data_source import CloudConfluenceDataSource
|
|
7
8
|
from .cloud_jira_data_source import CloudJiraDataSource
|
|
8
9
|
from .cloud_notion_page_data_source import CloudNotionPageDataSource
|
|
@@ -20,4 +21,5 @@ PipelineDataSourceComponentOne = typing.Union[
|
|
|
20
21
|
CloudNotionPageDataSource,
|
|
21
22
|
CloudConfluenceDataSource,
|
|
22
23
|
CloudJiraDataSource,
|
|
24
|
+
CloudBoxDataSource,
|
|
23
25
|
]
|
|
@@ -20,6 +20,9 @@ class PipelineDataSourceCreate(pydantic.BaseModel):
|
|
|
20
20
|
"""
|
|
21
21
|
|
|
22
22
|
data_source_id: str = pydantic.Field(description="The ID of the data source.")
|
|
23
|
+
sync_interval: typing.Optional[float] = pydantic.Field(
|
|
24
|
+
description="The interval at which the data source should be synced."
|
|
25
|
+
)
|
|
23
26
|
|
|
24
27
|
def json(self, **kwargs: typing.Any) -> str:
|
|
25
28
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import typing
|
|
6
|
+
|
|
7
|
+
import typing_extensions
|
|
8
|
+
|
|
9
|
+
from .azure_open_ai_embedding_config import AzureOpenAiEmbeddingConfig
|
|
10
|
+
from .bedrock_embedding_config import BedrockEmbeddingConfig
|
|
11
|
+
from .cohere_embedding_config import CohereEmbeddingConfig
|
|
12
|
+
from .gemini_embedding_config import GeminiEmbeddingConfig
|
|
13
|
+
from .hugging_face_inference_api_embedding_config import HuggingFaceInferenceApiEmbeddingConfig
|
|
14
|
+
from .open_ai_embedding_config import OpenAiEmbeddingConfig
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class PipelineEmbeddingConfig_OpenaiEmbedding(OpenAiEmbeddingConfig):
|
|
18
|
+
type: typing_extensions.Literal["OPENAI_EMBEDDING"]
|
|
19
|
+
|
|
20
|
+
class Config:
|
|
21
|
+
frozen = True
|
|
22
|
+
smart_union = True
|
|
23
|
+
allow_population_by_field_name = True
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class PipelineEmbeddingConfig_AzureEmbedding(AzureOpenAiEmbeddingConfig):
|
|
27
|
+
type: typing_extensions.Literal["AZURE_EMBEDDING"]
|
|
28
|
+
|
|
29
|
+
class Config:
|
|
30
|
+
frozen = True
|
|
31
|
+
smart_union = True
|
|
32
|
+
allow_population_by_field_name = True
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class PipelineEmbeddingConfig_HuggingfaceApiEmbedding(HuggingFaceInferenceApiEmbeddingConfig):
|
|
36
|
+
type: typing_extensions.Literal["HUGGINGFACE_API_EMBEDDING"]
|
|
37
|
+
|
|
38
|
+
class Config:
|
|
39
|
+
frozen = True
|
|
40
|
+
smart_union = True
|
|
41
|
+
allow_population_by_field_name = True
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class PipelineEmbeddingConfig_BedrockEmbedding(BedrockEmbeddingConfig):
|
|
45
|
+
type: typing_extensions.Literal["BEDROCK_EMBEDDING"]
|
|
46
|
+
|
|
47
|
+
class Config:
|
|
48
|
+
frozen = True
|
|
49
|
+
smart_union = True
|
|
50
|
+
allow_population_by_field_name = True
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class PipelineEmbeddingConfig_GeminiEmbedding(GeminiEmbeddingConfig):
|
|
54
|
+
type: typing_extensions.Literal["GEMINI_EMBEDDING"]
|
|
55
|
+
|
|
56
|
+
class Config:
|
|
57
|
+
frozen = True
|
|
58
|
+
smart_union = True
|
|
59
|
+
allow_population_by_field_name = True
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class PipelineEmbeddingConfig_CohereEmbedding(CohereEmbeddingConfig):
|
|
63
|
+
type: typing_extensions.Literal["COHERE_EMBEDDING"]
|
|
64
|
+
|
|
65
|
+
class Config:
|
|
66
|
+
frozen = True
|
|
67
|
+
smart_union = True
|
|
68
|
+
allow_population_by_field_name = True
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
PipelineEmbeddingConfig = typing.Union[
|
|
72
|
+
PipelineEmbeddingConfig_OpenaiEmbedding,
|
|
73
|
+
PipelineEmbeddingConfig_AzureEmbedding,
|
|
74
|
+
PipelineEmbeddingConfig_HuggingfaceApiEmbedding,
|
|
75
|
+
PipelineEmbeddingConfig_BedrockEmbedding,
|
|
76
|
+
PipelineEmbeddingConfig_GeminiEmbedding,
|
|
77
|
+
PipelineEmbeddingConfig_CohereEmbedding,
|
|
78
|
+
]
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import typing
|
|
6
|
+
|
|
7
|
+
import typing_extensions
|
|
8
|
+
|
|
9
|
+
from .advanced_mode_transform_config import AdvancedModeTransformConfig
|
|
10
|
+
from .auto_transform_config import AutoTransformConfig
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class PipelineTransformConfig_Auto(AutoTransformConfig):
|
|
14
|
+
mode: typing_extensions.Literal["auto"]
|
|
15
|
+
|
|
16
|
+
class Config:
|
|
17
|
+
frozen = True
|
|
18
|
+
smart_union = True
|
|
19
|
+
allow_population_by_field_name = True
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class PipelineTransformConfig_Advanced(AdvancedModeTransformConfig):
|
|
23
|
+
mode: typing_extensions.Literal["advanced"]
|
|
24
|
+
|
|
25
|
+
class Config:
|
|
26
|
+
frozen = True
|
|
27
|
+
smart_union = True
|
|
28
|
+
allow_population_by_field_name = True
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
PipelineTransformConfig = typing.Union[PipelineTransformConfig_Auto, PipelineTransformConfig_Advanced]
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
from .chat_message import ChatMessage
|
|
8
|
+
from .llm_parameters import LlmParameters
|
|
9
|
+
from .preset_retrieval_params import PresetRetrievalParams
|
|
10
|
+
|
|
11
|
+
try:
|
|
12
|
+
import pydantic
|
|
13
|
+
if pydantic.__version__.startswith("1."):
|
|
14
|
+
raise ImportError
|
|
15
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
16
|
+
except ImportError:
|
|
17
|
+
import pydantic # type: ignore
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class PlaygroundSession(pydantic.BaseModel):
|
|
21
|
+
"""
|
|
22
|
+
A playground session for a user.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
id: str = pydantic.Field(description="Unique identifier")
|
|
26
|
+
created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
|
|
27
|
+
updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
|
|
28
|
+
pipeline_id: str
|
|
29
|
+
user_id: str
|
|
30
|
+
llm_params_id: str
|
|
31
|
+
llm_params: typing.Optional[LlmParameters] = pydantic.Field(description="LLM parameters last used in this session.")
|
|
32
|
+
retrieval_params_id: str
|
|
33
|
+
retrieval_params: typing.Optional[PresetRetrievalParams] = pydantic.Field(
|
|
34
|
+
description="Preset retrieval parameters last used in this session."
|
|
35
|
+
)
|
|
36
|
+
chat_messages: typing.Optional[typing.List[ChatMessage]] = pydantic.Field(
|
|
37
|
+
description="Chat message history for this session."
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
41
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
42
|
+
return super().json(**kwargs_with_defaults)
|
|
43
|
+
|
|
44
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
45
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
46
|
+
return super().dict(**kwargs_with_defaults)
|
|
47
|
+
|
|
48
|
+
class Config:
|
|
49
|
+
frozen = True
|
|
50
|
+
smart_union = True
|
|
51
|
+
json_encoders = {dt.datetime: serialize_datetime}
|