llama-cloud 0.0.12__py3-none-any.whl → 0.0.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (56) hide show
  1. llama_cloud/__init__.py +76 -14
  2. llama_cloud/resources/__init__.py +14 -0
  3. llama_cloud/resources/data_sources/types/data_source_update_component_one.py +2 -0
  4. llama_cloud/resources/evals/client.py +5 -5
  5. llama_cloud/resources/parsing/client.py +8 -0
  6. llama_cloud/resources/pipelines/__init__.py +14 -0
  7. llama_cloud/resources/pipelines/client.py +115 -66
  8. llama_cloud/resources/pipelines/types/__init__.py +16 -0
  9. llama_cloud/resources/pipelines/types/pipeline_update_embedding_config.py +78 -0
  10. llama_cloud/types/__init__.py +68 -14
  11. llama_cloud/types/{embedding_config.py → azure_open_ai_embedding_config.py} +4 -6
  12. llama_cloud/types/bedrock_embedding_config.py +34 -0
  13. llama_cloud/types/box_auth_mechanism.py +21 -0
  14. llama_cloud/types/chat_data.py +1 -1
  15. llama_cloud/types/chat_message.py +14 -4
  16. llama_cloud/types/cloud_azure_ai_search_vector_store.py +3 -0
  17. llama_cloud/types/cloud_box_data_source.py +51 -0
  18. llama_cloud/types/cloud_document.py +3 -0
  19. llama_cloud/types/cloud_document_create.py +3 -0
  20. llama_cloud/types/cloud_sharepoint_data_source.py +2 -1
  21. llama_cloud/types/cohere_embedding_config.py +34 -0
  22. llama_cloud/types/configurable_data_source_names.py +4 -0
  23. llama_cloud/types/custom_claims.py +0 -3
  24. llama_cloud/types/data_source_component_one.py +2 -0
  25. llama_cloud/types/data_source_create_component_one.py +2 -0
  26. llama_cloud/types/eval_execution_params.py +2 -2
  27. llama_cloud/types/eval_execution_params_override.py +2 -2
  28. llama_cloud/types/filter_operator.py +4 -0
  29. llama_cloud/types/gemini_embedding_config.py +34 -0
  30. llama_cloud/types/hugging_face_inference_api_embedding_config.py +34 -0
  31. llama_cloud/types/input_message.py +42 -0
  32. llama_cloud/types/llama_parse_parameters.py +4 -1
  33. llama_cloud/types/{eval_llm_model_data.py → llm_model_data.py} +1 -1
  34. llama_cloud/types/llm_parameters.py +2 -2
  35. llama_cloud/types/{supported_eval_llm_model.py → message_annotation.py} +6 -6
  36. llama_cloud/types/metadata_filter.py +1 -1
  37. llama_cloud/types/open_ai_embedding_config.py +34 -0
  38. llama_cloud/types/page_segmentation_config.py +2 -0
  39. llama_cloud/types/parsing_usage.py +1 -1
  40. llama_cloud/types/pipeline.py +11 -1
  41. llama_cloud/types/pipeline_create.py +3 -3
  42. llama_cloud/types/pipeline_create_embedding_config.py +78 -0
  43. llama_cloud/types/pipeline_data_source_component_one.py +2 -0
  44. llama_cloud/types/pipeline_embedding_config.py +78 -0
  45. llama_cloud/types/pipeline_transform_config.py +31 -0
  46. llama_cloud/types/playground_session.py +51 -0
  47. llama_cloud/types/supported_llm_model.py +41 -0
  48. llama_cloud/types/supported_llm_model_names.py +41 -0
  49. {llama_cloud-0.0.12.dist-info → llama_cloud-0.0.14.dist-info}/METADATA +1 -1
  50. {llama_cloud-0.0.12.dist-info → llama_cloud-0.0.14.dist-info}/RECORD +52 -41
  51. llama_cloud/types/embedding_config_component.py +0 -7
  52. llama_cloud/types/embedding_config_component_one.py +0 -19
  53. llama_cloud/types/embedding_config_type.py +0 -41
  54. llama_cloud/types/supported_eval_llm_model_names.py +0 -29
  55. {llama_cloud-0.0.12.dist-info → llama_cloud-0.0.14.dist-info}/LICENSE +0 -0
  56. {llama_cloud-0.0.12.dist-info → llama_cloud-0.0.14.dist-info}/WHEEL +0 -0
@@ -0,0 +1,34 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .gemini_embedding import GeminiEmbedding
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class GeminiEmbeddingConfig(pydantic.BaseModel):
19
+ component: typing.Optional[GeminiEmbedding] = pydantic.Field(
20
+ description="Configuration for the Gemini embedding model."
21
+ )
22
+
23
+ def json(self, **kwargs: typing.Any) -> str:
24
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
25
+ return super().json(**kwargs_with_defaults)
26
+
27
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
28
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
29
+ return super().dict(**kwargs_with_defaults)
30
+
31
+ class Config:
32
+ frozen = True
33
+ smart_union = True
34
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,34 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .hugging_face_inference_api_embedding import HuggingFaceInferenceApiEmbedding
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class HuggingFaceInferenceApiEmbeddingConfig(pydantic.BaseModel):
19
+ component: typing.Optional[HuggingFaceInferenceApiEmbedding] = pydantic.Field(
20
+ description="Configuration for the HuggingFace Inference API embedding model."
21
+ )
22
+
23
+ def json(self, **kwargs: typing.Any) -> str:
24
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
25
+ return super().json(**kwargs_with_defaults)
26
+
27
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
28
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
29
+ return super().dict(**kwargs_with_defaults)
30
+
31
+ class Config:
32
+ frozen = True
33
+ smart_union = True
34
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,42 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .message_role import MessageRole
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class InputMessage(pydantic.BaseModel):
19
+ """
20
+ This is distinct from a ChatMessage because this schema is enforced by the AI Chat library used in the frontend
21
+ """
22
+
23
+ id: typing.Optional[str] = pydantic.Field(description="ID of the message, if any. Not necessarily a UUID.")
24
+ role: MessageRole
25
+ content: str
26
+ data: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field(
27
+ description="Additional data to be stored with the message."
28
+ )
29
+ class_name: typing.Optional[str]
30
+
31
+ def json(self, **kwargs: typing.Any) -> str:
32
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
33
+ return super().json(**kwargs_with_defaults)
34
+
35
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
36
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
37
+ return super().dict(**kwargs_with_defaults)
38
+
39
+ class Config:
40
+ frozen = True
41
+ smart_union = True
42
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -17,7 +17,7 @@ except ImportError:
17
17
 
18
18
  class LlamaParseParameters(pydantic.BaseModel):
19
19
  """
20
- Settings that can be configured for how to use LlamaParse to parse files witin a LlamaCloud pipeline.
20
+ Settings that can be configured for how to use LlamaParse to parse files within a LlamaCloud pipeline.
21
21
  """
22
22
 
23
23
  languages: typing.Optional[typing.List[ParserLanguages]]
@@ -38,6 +38,9 @@ class LlamaParseParameters(pydantic.BaseModel):
38
38
  vendor_multimodal_api_key: typing.Optional[str]
39
39
  page_prefix: typing.Optional[str]
40
40
  page_suffix: typing.Optional[str]
41
+ take_screenshot: typing.Optional[bool]
42
+ s_3_input_path: typing.Optional[str] = pydantic.Field(alias="s3_input_path")
43
+ s_3_output_path_prefix: typing.Optional[str] = pydantic.Field(alias="s3_output_path_prefix")
41
44
 
42
45
  def json(self, **kwargs: typing.Any) -> str:
43
46
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -14,7 +14,7 @@ except ImportError:
14
14
  import pydantic # type: ignore
15
15
 
16
16
 
17
- class EvalLlmModelData(pydantic.BaseModel):
17
+ class LlmModelData(pydantic.BaseModel):
18
18
  """
19
19
  Schema for an eval LLM model.
20
20
  """
@@ -20,8 +20,8 @@ class LlmParameters(pydantic.BaseModel):
20
20
  Comes with special serialization logic for types used commonly in platform codebase.
21
21
  """
22
22
 
23
- model_name: typing.Optional[str] = pydantic.Field(description="The name of the model to use for retrieval.")
24
- system_prompt: typing.Optional[str] = pydantic.Field(description="The system prompt to use for the model.")
23
+ model_name: typing.Optional[str] = pydantic.Field(description="The name of the model to use for LLM completions.")
24
+ system_prompt: typing.Optional[str] = pydantic.Field(description="The system prompt to use for the completion.")
25
25
  temperature: typing.Optional[float] = pydantic.Field(description="The temperature value for the model.")
26
26
  class_name: typing.Optional[str]
27
27
 
@@ -4,8 +4,6 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
- from .eval_llm_model_data import EvalLlmModelData
8
- from .supported_eval_llm_model_names import SupportedEvalLlmModelNames
9
7
 
10
8
  try:
11
9
  import pydantic
@@ -16,13 +14,15 @@ except ImportError:
16
14
  import pydantic # type: ignore
17
15
 
18
16
 
19
- class SupportedEvalLlmModel(pydantic.BaseModel):
17
+ class MessageAnnotation(pydantic.BaseModel):
20
18
  """
21
- Response Schema for a supported eval LLM model.
19
+ Base schema model for BaseComponent classes used in the platform.
20
+ Comes with special serialization logic for types used commonly in platform codebase.
22
21
  """
23
22
 
24
- name: SupportedEvalLlmModelNames = pydantic.Field(description="The name of the supported eval LLM model.")
25
- details: EvalLlmModelData = pydantic.Field(description="The details of the supported eval LLM model.")
23
+ type: str
24
+ data: typing.Optional[typing.Any]
25
+ class_name: typing.Optional[str]
26
26
 
27
27
  def json(self, **kwargs: typing.Any) -> str:
28
28
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -27,7 +27,7 @@ class MetadataFilter(pydantic.BaseModel):
27
27
  """
28
28
 
29
29
  key: str
30
- value: MetadataFilterValue
30
+ value: typing.Optional[MetadataFilterValue]
31
31
  operator: typing.Optional[FilterOperator]
32
32
 
33
33
  def json(self, **kwargs: typing.Any) -> str:
@@ -0,0 +1,34 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .open_ai_embedding import OpenAiEmbedding
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class OpenAiEmbeddingConfig(pydantic.BaseModel):
19
+ component: typing.Optional[OpenAiEmbedding] = pydantic.Field(
20
+ description="Configuration for the OpenAI embedding model."
21
+ )
22
+
23
+ def json(self, **kwargs: typing.Any) -> str:
24
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
25
+ return super().json(**kwargs_with_defaults)
26
+
27
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
28
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
29
+ return super().dict(**kwargs_with_defaults)
30
+
31
+ class Config:
32
+ frozen = True
33
+ smart_union = True
34
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -15,6 +15,8 @@ except ImportError:
15
15
 
16
16
 
17
17
  class PageSegmentationConfig(pydantic.BaseModel):
18
+ page_separator: typing.Optional[str]
19
+
18
20
  def json(self, **kwargs: typing.Any) -> str:
19
21
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
20
22
  return super().json(**kwargs_with_defaults)
@@ -16,7 +16,7 @@ except ImportError:
16
16
 
17
17
  class ParsingUsage(pydantic.BaseModel):
18
18
  usage_pdf_pages: int
19
- max_pdf_pages: int
19
+ max_pdf_pages: typing.Optional[int]
20
20
 
21
21
  def json(self, **kwargs: typing.Any) -> str:
22
22
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -8,6 +8,8 @@ from .configured_transformation_item import ConfiguredTransformationItem
8
8
  from .data_sink import DataSink
9
9
  from .eval_execution_params import EvalExecutionParams
10
10
  from .llama_parse_parameters import LlamaParseParameters
11
+ from .pipeline_embedding_config import PipelineEmbeddingConfig
12
+ from .pipeline_transform_config import PipelineTransformConfig
11
13
  from .pipeline_type import PipelineType
12
14
  from .preset_retrieval_params import PresetRetrievalParams
13
15
 
@@ -25,7 +27,6 @@ class Pipeline(pydantic.BaseModel):
25
27
  Schema for a pipeline.
26
28
  """
27
29
 
28
- configured_transformations: typing.List[ConfiguredTransformationItem]
29
30
  id: str = pydantic.Field(description="Unique identifier")
30
31
  created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
31
32
  updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
@@ -37,6 +38,15 @@ class Pipeline(pydantic.BaseModel):
37
38
  managed_pipeline_id: typing.Optional[str] = pydantic.Field(
38
39
  description="The ID of the ManagedPipeline this playground pipeline is linked to."
39
40
  )
41
+ embedding_config: typing.Optional[PipelineEmbeddingConfig] = pydantic.Field(
42
+ description="Configuration for the embedding model."
43
+ )
44
+ configured_transformations: typing.Optional[typing.List[ConfiguredTransformationItem]] = pydantic.Field(
45
+ description="Deprecated don't use it, List of configured transformations."
46
+ )
47
+ transform_config: typing.Optional[PipelineTransformConfig] = pydantic.Field(
48
+ description="Configuration for the transformation."
49
+ )
40
50
  preset_retrieval_parameters: typing.Optional[PresetRetrievalParams] = pydantic.Field(
41
51
  description="Preset retrieval parameters for the pipeline."
42
52
  )
@@ -6,9 +6,9 @@ import typing
6
6
  from ..core.datetime_utils import serialize_datetime
7
7
  from .configured_transformation_item import ConfiguredTransformationItem
8
8
  from .data_sink_create import DataSinkCreate
9
- from .embedding_config import EmbeddingConfig
10
9
  from .eval_execution_params import EvalExecutionParams
11
10
  from .llama_parse_parameters import LlamaParseParameters
11
+ from .pipeline_create_embedding_config import PipelineCreateEmbeddingConfig
12
12
  from .pipeline_create_transform_config import PipelineCreateTransformConfig
13
13
  from .pipeline_type import PipelineType
14
14
  from .preset_retrieval_params import PresetRetrievalParams
@@ -27,14 +27,14 @@ class PipelineCreate(pydantic.BaseModel):
27
27
  Schema for creating a pipeline.
28
28
  """
29
29
 
30
- embedding_config: typing.Optional[EmbeddingConfig] = pydantic.Field(
30
+ embedding_config: typing.Optional[PipelineCreateEmbeddingConfig] = pydantic.Field(
31
31
  description="Configuration for the embedding model."
32
32
  )
33
33
  transform_config: typing.Optional[PipelineCreateTransformConfig] = pydantic.Field(
34
34
  description="Configuration for the transformation."
35
35
  )
36
36
  configured_transformations: typing.Optional[typing.List[ConfiguredTransformationItem]] = pydantic.Field(
37
- description="List of configured transformations."
37
+ description="Deprecated, use embedding_config or transform_config instead. configured transformations for the pipeline."
38
38
  )
39
39
  data_sink_id: typing.Optional[str] = pydantic.Field(
40
40
  description="Data sink ID. When provided instead of data_sink, the data sink will be looked up by ID."
@@ -0,0 +1,78 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing
6
+
7
+ import typing_extensions
8
+
9
+ from .azure_open_ai_embedding_config import AzureOpenAiEmbeddingConfig
10
+ from .bedrock_embedding_config import BedrockEmbeddingConfig
11
+ from .cohere_embedding_config import CohereEmbeddingConfig
12
+ from .gemini_embedding_config import GeminiEmbeddingConfig
13
+ from .hugging_face_inference_api_embedding_config import HuggingFaceInferenceApiEmbeddingConfig
14
+ from .open_ai_embedding_config import OpenAiEmbeddingConfig
15
+
16
+
17
+ class PipelineCreateEmbeddingConfig_OpenaiEmbedding(OpenAiEmbeddingConfig):
18
+ type: typing_extensions.Literal["OPENAI_EMBEDDING"]
19
+
20
+ class Config:
21
+ frozen = True
22
+ smart_union = True
23
+ allow_population_by_field_name = True
24
+
25
+
26
+ class PipelineCreateEmbeddingConfig_AzureEmbedding(AzureOpenAiEmbeddingConfig):
27
+ type: typing_extensions.Literal["AZURE_EMBEDDING"]
28
+
29
+ class Config:
30
+ frozen = True
31
+ smart_union = True
32
+ allow_population_by_field_name = True
33
+
34
+
35
+ class PipelineCreateEmbeddingConfig_HuggingfaceApiEmbedding(HuggingFaceInferenceApiEmbeddingConfig):
36
+ type: typing_extensions.Literal["HUGGINGFACE_API_EMBEDDING"]
37
+
38
+ class Config:
39
+ frozen = True
40
+ smart_union = True
41
+ allow_population_by_field_name = True
42
+
43
+
44
+ class PipelineCreateEmbeddingConfig_BedrockEmbedding(BedrockEmbeddingConfig):
45
+ type: typing_extensions.Literal["BEDROCK_EMBEDDING"]
46
+
47
+ class Config:
48
+ frozen = True
49
+ smart_union = True
50
+ allow_population_by_field_name = True
51
+
52
+
53
+ class PipelineCreateEmbeddingConfig_GeminiEmbedding(GeminiEmbeddingConfig):
54
+ type: typing_extensions.Literal["GEMINI_EMBEDDING"]
55
+
56
+ class Config:
57
+ frozen = True
58
+ smart_union = True
59
+ allow_population_by_field_name = True
60
+
61
+
62
+ class PipelineCreateEmbeddingConfig_CohereEmbedding(CohereEmbeddingConfig):
63
+ type: typing_extensions.Literal["COHERE_EMBEDDING"]
64
+
65
+ class Config:
66
+ frozen = True
67
+ smart_union = True
68
+ allow_population_by_field_name = True
69
+
70
+
71
+ PipelineCreateEmbeddingConfig = typing.Union[
72
+ PipelineCreateEmbeddingConfig_OpenaiEmbedding,
73
+ PipelineCreateEmbeddingConfig_AzureEmbedding,
74
+ PipelineCreateEmbeddingConfig_HuggingfaceApiEmbedding,
75
+ PipelineCreateEmbeddingConfig_BedrockEmbedding,
76
+ PipelineCreateEmbeddingConfig_GeminiEmbedding,
77
+ PipelineCreateEmbeddingConfig_CohereEmbedding,
78
+ ]
@@ -3,6 +3,7 @@
3
3
  import typing
4
4
 
5
5
  from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
6
+ from .cloud_box_data_source import CloudBoxDataSource
6
7
  from .cloud_confluence_data_source import CloudConfluenceDataSource
7
8
  from .cloud_jira_data_source import CloudJiraDataSource
8
9
  from .cloud_notion_page_data_source import CloudNotionPageDataSource
@@ -20,4 +21,5 @@ PipelineDataSourceComponentOne = typing.Union[
20
21
  CloudNotionPageDataSource,
21
22
  CloudConfluenceDataSource,
22
23
  CloudJiraDataSource,
24
+ CloudBoxDataSource,
23
25
  ]
@@ -0,0 +1,78 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing
6
+
7
+ import typing_extensions
8
+
9
+ from .azure_open_ai_embedding_config import AzureOpenAiEmbeddingConfig
10
+ from .bedrock_embedding_config import BedrockEmbeddingConfig
11
+ from .cohere_embedding_config import CohereEmbeddingConfig
12
+ from .gemini_embedding_config import GeminiEmbeddingConfig
13
+ from .hugging_face_inference_api_embedding_config import HuggingFaceInferenceApiEmbeddingConfig
14
+ from .open_ai_embedding_config import OpenAiEmbeddingConfig
15
+
16
+
17
+ class PipelineEmbeddingConfig_OpenaiEmbedding(OpenAiEmbeddingConfig):
18
+ type: typing_extensions.Literal["OPENAI_EMBEDDING"]
19
+
20
+ class Config:
21
+ frozen = True
22
+ smart_union = True
23
+ allow_population_by_field_name = True
24
+
25
+
26
+ class PipelineEmbeddingConfig_AzureEmbedding(AzureOpenAiEmbeddingConfig):
27
+ type: typing_extensions.Literal["AZURE_EMBEDDING"]
28
+
29
+ class Config:
30
+ frozen = True
31
+ smart_union = True
32
+ allow_population_by_field_name = True
33
+
34
+
35
+ class PipelineEmbeddingConfig_HuggingfaceApiEmbedding(HuggingFaceInferenceApiEmbeddingConfig):
36
+ type: typing_extensions.Literal["HUGGINGFACE_API_EMBEDDING"]
37
+
38
+ class Config:
39
+ frozen = True
40
+ smart_union = True
41
+ allow_population_by_field_name = True
42
+
43
+
44
+ class PipelineEmbeddingConfig_BedrockEmbedding(BedrockEmbeddingConfig):
45
+ type: typing_extensions.Literal["BEDROCK_EMBEDDING"]
46
+
47
+ class Config:
48
+ frozen = True
49
+ smart_union = True
50
+ allow_population_by_field_name = True
51
+
52
+
53
+ class PipelineEmbeddingConfig_GeminiEmbedding(GeminiEmbeddingConfig):
54
+ type: typing_extensions.Literal["GEMINI_EMBEDDING"]
55
+
56
+ class Config:
57
+ frozen = True
58
+ smart_union = True
59
+ allow_population_by_field_name = True
60
+
61
+
62
+ class PipelineEmbeddingConfig_CohereEmbedding(CohereEmbeddingConfig):
63
+ type: typing_extensions.Literal["COHERE_EMBEDDING"]
64
+
65
+ class Config:
66
+ frozen = True
67
+ smart_union = True
68
+ allow_population_by_field_name = True
69
+
70
+
71
+ PipelineEmbeddingConfig = typing.Union[
72
+ PipelineEmbeddingConfig_OpenaiEmbedding,
73
+ PipelineEmbeddingConfig_AzureEmbedding,
74
+ PipelineEmbeddingConfig_HuggingfaceApiEmbedding,
75
+ PipelineEmbeddingConfig_BedrockEmbedding,
76
+ PipelineEmbeddingConfig_GeminiEmbedding,
77
+ PipelineEmbeddingConfig_CohereEmbedding,
78
+ ]
@@ -0,0 +1,31 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing
6
+
7
+ import typing_extensions
8
+
9
+ from .advanced_mode_transform_config import AdvancedModeTransformConfig
10
+ from .auto_transform_config import AutoTransformConfig
11
+
12
+
13
+ class PipelineTransformConfig_Auto(AutoTransformConfig):
14
+ mode: typing_extensions.Literal["auto"]
15
+
16
+ class Config:
17
+ frozen = True
18
+ smart_union = True
19
+ allow_population_by_field_name = True
20
+
21
+
22
+ class PipelineTransformConfig_Advanced(AdvancedModeTransformConfig):
23
+ mode: typing_extensions.Literal["advanced"]
24
+
25
+ class Config:
26
+ frozen = True
27
+ smart_union = True
28
+ allow_population_by_field_name = True
29
+
30
+
31
+ PipelineTransformConfig = typing.Union[PipelineTransformConfig_Auto, PipelineTransformConfig_Advanced]
@@ -0,0 +1,51 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .chat_message import ChatMessage
8
+ from .llm_parameters import LlmParameters
9
+ from .preset_retrieval_params import PresetRetrievalParams
10
+
11
+ try:
12
+ import pydantic
13
+ if pydantic.__version__.startswith("1."):
14
+ raise ImportError
15
+ import pydantic.v1 as pydantic # type: ignore
16
+ except ImportError:
17
+ import pydantic # type: ignore
18
+
19
+
20
+ class PlaygroundSession(pydantic.BaseModel):
21
+ """
22
+ A playground session for a user.
23
+ """
24
+
25
+ id: str = pydantic.Field(description="Unique identifier")
26
+ created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
27
+ updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
28
+ pipeline_id: str
29
+ user_id: str
30
+ llm_params_id: str
31
+ llm_params: typing.Optional[LlmParameters] = pydantic.Field(description="LLM parameters last used in this session.")
32
+ retrieval_params_id: str
33
+ retrieval_params: typing.Optional[PresetRetrievalParams] = pydantic.Field(
34
+ description="Preset retrieval parameters last used in this session."
35
+ )
36
+ chat_messages: typing.Optional[typing.List[ChatMessage]] = pydantic.Field(
37
+ description="Chat message history for this session."
38
+ )
39
+
40
+ def json(self, **kwargs: typing.Any) -> str:
41
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
42
+ return super().json(**kwargs_with_defaults)
43
+
44
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
45
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
46
+ return super().dict(**kwargs_with_defaults)
47
+
48
+ class Config:
49
+ frozen = True
50
+ smart_union = True
51
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,41 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .llm_model_data import LlmModelData
8
+ from .supported_llm_model_names import SupportedLlmModelNames
9
+
10
+ try:
11
+ import pydantic
12
+ if pydantic.__version__.startswith("1."):
13
+ raise ImportError
14
+ import pydantic.v1 as pydantic # type: ignore
15
+ except ImportError:
16
+ import pydantic # type: ignore
17
+
18
+
19
+ class SupportedLlmModel(pydantic.BaseModel):
20
+ """
21
+ Response Schema for a supported eval LLM model.
22
+ """
23
+
24
+ name: SupportedLlmModelNames = pydantic.Field(description="The name of the supported LLM model.")
25
+ enabled: typing.Optional[bool] = pydantic.Field(
26
+ description="Whether the LLM model is enabled for use in LlamaCloud."
27
+ )
28
+ details: LlmModelData = pydantic.Field(description="The details of the supported LLM model.")
29
+
30
+ def json(self, **kwargs: typing.Any) -> str:
31
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
32
+ return super().json(**kwargs_with_defaults)
33
+
34
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
35
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
36
+ return super().dict(**kwargs_with_defaults)
37
+
38
+ class Config:
39
+ frozen = True
40
+ smart_union = True
41
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,41 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class SupportedLlmModelNames(str, enum.Enum):
10
+ """
11
+ An enumeration.
12
+ """
13
+
14
+ GPT_3_5_TURBO = "GPT_3_5_TURBO"
15
+ GPT_4 = "GPT_4"
16
+ GPT_4_TURBO = "GPT_4_TURBO"
17
+ GPT_4_O = "GPT_4O"
18
+ GPT_4_O_MINI = "GPT_4O_MINI"
19
+ AZURE_OPENAI = "AZURE_OPENAI"
20
+
21
+ def visit(
22
+ self,
23
+ gpt_3_5_turbo: typing.Callable[[], T_Result],
24
+ gpt_4: typing.Callable[[], T_Result],
25
+ gpt_4_turbo: typing.Callable[[], T_Result],
26
+ gpt_4_o: typing.Callable[[], T_Result],
27
+ gpt_4_o_mini: typing.Callable[[], T_Result],
28
+ azure_openai: typing.Callable[[], T_Result],
29
+ ) -> T_Result:
30
+ if self is SupportedLlmModelNames.GPT_3_5_TURBO:
31
+ return gpt_3_5_turbo()
32
+ if self is SupportedLlmModelNames.GPT_4:
33
+ return gpt_4()
34
+ if self is SupportedLlmModelNames.GPT_4_TURBO:
35
+ return gpt_4_turbo()
36
+ if self is SupportedLlmModelNames.GPT_4_O:
37
+ return gpt_4_o()
38
+ if self is SupportedLlmModelNames.GPT_4_O_MINI:
39
+ return gpt_4_o_mini()
40
+ if self is SupportedLlmModelNames.AZURE_OPENAI:
41
+ return azure_openai()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: llama-cloud
3
- Version: 0.0.12
3
+ Version: 0.0.14
4
4
  Summary:
5
5
  Author: Logan Markewich
6
6
  Author-email: logan@runllama.ai