llama-cloud 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of llama-cloud might be problematic. Click here for more details.
- llama_cloud/__init__.py +295 -0
- llama_cloud/client.py +72 -0
- llama_cloud/core/__init__.py +17 -0
- llama_cloud/core/api_error.py +15 -0
- llama_cloud/core/client_wrapper.py +51 -0
- llama_cloud/core/datetime_utils.py +28 -0
- llama_cloud/core/jsonable_encoder.py +103 -0
- llama_cloud/core/remove_none_from_dict.py +11 -0
- llama_cloud/errors/__init__.py +5 -0
- llama_cloud/errors/unprocessable_entity_error.py +9 -0
- llama_cloud/resources/__init__.py +40 -0
- llama_cloud/resources/api_keys/__init__.py +2 -0
- llama_cloud/resources/api_keys/client.py +302 -0
- llama_cloud/resources/billing/__init__.py +2 -0
- llama_cloud/resources/billing/client.py +234 -0
- llama_cloud/resources/component_definitions/__init__.py +2 -0
- llama_cloud/resources/component_definitions/client.py +192 -0
- llama_cloud/resources/data_sinks/__init__.py +5 -0
- llama_cloud/resources/data_sinks/client.py +506 -0
- llama_cloud/resources/data_sinks/types/__init__.py +6 -0
- llama_cloud/resources/data_sinks/types/data_sink_update_component.py +7 -0
- llama_cloud/resources/data_sinks/types/data_sink_update_component_one.py +17 -0
- llama_cloud/resources/data_sources/__init__.py +5 -0
- llama_cloud/resources/data_sources/client.py +521 -0
- llama_cloud/resources/data_sources/types/__init__.py +7 -0
- llama_cloud/resources/data_sources/types/data_source_update_component.py +7 -0
- llama_cloud/resources/data_sources/types/data_source_update_component_one.py +19 -0
- llama_cloud/resources/data_sources/types/data_source_update_custom_metadata_value.py +7 -0
- llama_cloud/resources/deprecated/__init__.py +2 -0
- llama_cloud/resources/deprecated/client.py +982 -0
- llama_cloud/resources/evals/__init__.py +2 -0
- llama_cloud/resources/evals/client.py +745 -0
- llama_cloud/resources/files/__init__.py +5 -0
- llama_cloud/resources/files/client.py +560 -0
- llama_cloud/resources/files/types/__init__.py +5 -0
- llama_cloud/resources/files/types/file_create_resource_info_value.py +5 -0
- llama_cloud/resources/parsing/__init__.py +2 -0
- llama_cloud/resources/parsing/client.py +982 -0
- llama_cloud/resources/pipelines/__init__.py +5 -0
- llama_cloud/resources/pipelines/client.py +2599 -0
- llama_cloud/resources/pipelines/types/__init__.py +5 -0
- llama_cloud/resources/pipelines/types/pipeline_file_update_custom_metadata_value.py +7 -0
- llama_cloud/resources/projects/__init__.py +2 -0
- llama_cloud/resources/projects/client.py +1231 -0
- llama_cloud/types/__init__.py +253 -0
- llama_cloud/types/api_key.py +37 -0
- llama_cloud/types/azure_open_ai_embedding.py +75 -0
- llama_cloud/types/base.py +26 -0
- llama_cloud/types/base_prompt_template.py +44 -0
- llama_cloud/types/bedrock_embedding.py +56 -0
- llama_cloud/types/chat_message.py +35 -0
- llama_cloud/types/cloud_az_storage_blob_data_source.py +40 -0
- llama_cloud/types/cloud_chroma_vector_store.py +40 -0
- llama_cloud/types/cloud_document.py +36 -0
- llama_cloud/types/cloud_document_create.py +36 -0
- llama_cloud/types/cloud_gcs_data_source.py +37 -0
- llama_cloud/types/cloud_google_drive_data_source.py +36 -0
- llama_cloud/types/cloud_one_drive_data_source.py +38 -0
- llama_cloud/types/cloud_pinecone_vector_store.py +46 -0
- llama_cloud/types/cloud_postgres_vector_store.py +44 -0
- llama_cloud/types/cloud_qdrant_vector_store.py +48 -0
- llama_cloud/types/cloud_s_3_data_source.py +42 -0
- llama_cloud/types/cloud_sharepoint_data_source.py +38 -0
- llama_cloud/types/cloud_weaviate_vector_store.py +38 -0
- llama_cloud/types/code_splitter.py +46 -0
- llama_cloud/types/cohere_embedding.py +46 -0
- llama_cloud/types/configurable_data_sink_names.py +37 -0
- llama_cloud/types/configurable_data_source_names.py +41 -0
- llama_cloud/types/configurable_transformation_definition.py +45 -0
- llama_cloud/types/configurable_transformation_names.py +73 -0
- llama_cloud/types/configured_transformation_item.py +43 -0
- llama_cloud/types/configured_transformation_item_component.py +9 -0
- llama_cloud/types/configured_transformation_item_component_one.py +35 -0
- llama_cloud/types/data_sink.py +40 -0
- llama_cloud/types/data_sink_component.py +7 -0
- llama_cloud/types/data_sink_component_one.py +17 -0
- llama_cloud/types/data_sink_create.py +36 -0
- llama_cloud/types/data_sink_create_component.py +7 -0
- llama_cloud/types/data_sink_create_component_one.py +17 -0
- llama_cloud/types/data_sink_definition.py +41 -0
- llama_cloud/types/data_source.py +44 -0
- llama_cloud/types/data_source_component.py +7 -0
- llama_cloud/types/data_source_component_one.py +19 -0
- llama_cloud/types/data_source_create.py +40 -0
- llama_cloud/types/data_source_create_component.py +7 -0
- llama_cloud/types/data_source_create_component_one.py +19 -0
- llama_cloud/types/data_source_create_custom_metadata_value.py +7 -0
- llama_cloud/types/data_source_custom_metadata_value.py +7 -0
- llama_cloud/types/data_source_definition.py +41 -0
- llama_cloud/types/eval_dataset.py +37 -0
- llama_cloud/types/eval_dataset_job_params.py +36 -0
- llama_cloud/types/eval_dataset_job_record.py +59 -0
- llama_cloud/types/eval_execution_params.py +38 -0
- llama_cloud/types/eval_execution_params_override.py +38 -0
- llama_cloud/types/eval_llm_model_data.py +33 -0
- llama_cloud/types/eval_question.py +39 -0
- llama_cloud/types/eval_question_create.py +28 -0
- llama_cloud/types/eval_question_result.py +49 -0
- llama_cloud/types/file.py +46 -0
- llama_cloud/types/file_resource_info_value.py +5 -0
- llama_cloud/types/filter_condition.py +21 -0
- llama_cloud/types/filter_operator.py +65 -0
- llama_cloud/types/gemini_embedding.py +51 -0
- llama_cloud/types/html_node_parser.py +44 -0
- llama_cloud/types/http_validation_error.py +29 -0
- llama_cloud/types/hugging_face_inference_api_embedding.py +68 -0
- llama_cloud/types/hugging_face_inference_api_embedding_token.py +5 -0
- llama_cloud/types/json_node_parser.py +43 -0
- llama_cloud/types/llama_parse_supported_file_extensions.py +161 -0
- llama_cloud/types/llm.py +55 -0
- llama_cloud/types/local_eval.py +46 -0
- llama_cloud/types/local_eval_results.py +37 -0
- llama_cloud/types/local_eval_sets.py +30 -0
- llama_cloud/types/managed_ingestion_status.py +37 -0
- llama_cloud/types/markdown_element_node_parser.py +49 -0
- llama_cloud/types/markdown_node_parser.py +43 -0
- llama_cloud/types/message_role.py +45 -0
- llama_cloud/types/metadata_filter.py +41 -0
- llama_cloud/types/metadata_filter_value.py +5 -0
- llama_cloud/types/metadata_filters.py +41 -0
- llama_cloud/types/metadata_filters_filters_item.py +8 -0
- llama_cloud/types/metric_result.py +30 -0
- llama_cloud/types/node_parser.py +37 -0
- llama_cloud/types/object_type.py +33 -0
- llama_cloud/types/open_ai_embedding.py +73 -0
- llama_cloud/types/parser_languages.py +361 -0
- llama_cloud/types/parsing_history_item.py +36 -0
- llama_cloud/types/parsing_job.py +30 -0
- llama_cloud/types/parsing_job_json_result.py +29 -0
- llama_cloud/types/parsing_job_markdown_result.py +29 -0
- llama_cloud/types/parsing_job_text_result.py +29 -0
- llama_cloud/types/parsing_usage.py +29 -0
- llama_cloud/types/pipeline.py +64 -0
- llama_cloud/types/pipeline_create.py +61 -0
- llama_cloud/types/pipeline_data_source.py +46 -0
- llama_cloud/types/pipeline_data_source_component.py +7 -0
- llama_cloud/types/pipeline_data_source_component_one.py +19 -0
- llama_cloud/types/pipeline_data_source_create.py +32 -0
- llama_cloud/types/pipeline_data_source_custom_metadata_value.py +7 -0
- llama_cloud/types/pipeline_deployment.py +38 -0
- llama_cloud/types/pipeline_file.py +52 -0
- llama_cloud/types/pipeline_file_create.py +36 -0
- llama_cloud/types/pipeline_file_create_custom_metadata_value.py +7 -0
- llama_cloud/types/pipeline_file_custom_metadata_value.py +7 -0
- llama_cloud/types/pipeline_file_resource_info_value.py +7 -0
- llama_cloud/types/pipeline_file_status_response.py +35 -0
- llama_cloud/types/pipeline_type.py +21 -0
- llama_cloud/types/pooling.py +29 -0
- llama_cloud/types/preset_retrieval_params.py +40 -0
- llama_cloud/types/presigned_url.py +36 -0
- llama_cloud/types/project.py +42 -0
- llama_cloud/types/project_create.py +32 -0
- llama_cloud/types/prompt_mixin_prompts.py +36 -0
- llama_cloud/types/prompt_spec.py +35 -0
- llama_cloud/types/pydantic_program_mode.py +41 -0
- llama_cloud/types/related_node_info.py +37 -0
- llama_cloud/types/retrieve_results.py +40 -0
- llama_cloud/types/sentence_splitter.py +48 -0
- llama_cloud/types/simple_file_node_parser.py +44 -0
- llama_cloud/types/status_enum.py +33 -0
- llama_cloud/types/supported_eval_llm_model.py +35 -0
- llama_cloud/types/supported_eval_llm_model_names.py +29 -0
- llama_cloud/types/text_node.py +62 -0
- llama_cloud/types/text_node_relationships_value.py +7 -0
- llama_cloud/types/text_node_with_score.py +36 -0
- llama_cloud/types/token_text_splitter.py +43 -0
- llama_cloud/types/transformation_category_names.py +21 -0
- llama_cloud/types/validation_error.py +31 -0
- llama_cloud/types/validation_error_loc_item.py +5 -0
- llama_cloud-0.0.1.dist-info/LICENSE +21 -0
- llama_cloud-0.0.1.dist-info/METADATA +25 -0
- llama_cloud-0.0.1.dist-info/RECORD +173 -0
- llama_cloud-0.0.1.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
from .api_key import ApiKey
|
|
4
|
+
from .azure_open_ai_embedding import AzureOpenAiEmbedding
|
|
5
|
+
from .base import Base
|
|
6
|
+
from .base_prompt_template import BasePromptTemplate
|
|
7
|
+
from .bedrock_embedding import BedrockEmbedding
|
|
8
|
+
from .chat_message import ChatMessage
|
|
9
|
+
from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
|
|
10
|
+
from .cloud_chroma_vector_store import CloudChromaVectorStore
|
|
11
|
+
from .cloud_document import CloudDocument
|
|
12
|
+
from .cloud_document_create import CloudDocumentCreate
|
|
13
|
+
from .cloud_gcs_data_source import CloudGcsDataSource
|
|
14
|
+
from .cloud_google_drive_data_source import CloudGoogleDriveDataSource
|
|
15
|
+
from .cloud_one_drive_data_source import CloudOneDriveDataSource
|
|
16
|
+
from .cloud_pinecone_vector_store import CloudPineconeVectorStore
|
|
17
|
+
from .cloud_postgres_vector_store import CloudPostgresVectorStore
|
|
18
|
+
from .cloud_qdrant_vector_store import CloudQdrantVectorStore
|
|
19
|
+
from .cloud_s_3_data_source import CloudS3DataSource
|
|
20
|
+
from .cloud_sharepoint_data_source import CloudSharepointDataSource
|
|
21
|
+
from .cloud_weaviate_vector_store import CloudWeaviateVectorStore
|
|
22
|
+
from .code_splitter import CodeSplitter
|
|
23
|
+
from .cohere_embedding import CohereEmbedding
|
|
24
|
+
from .configurable_data_sink_names import ConfigurableDataSinkNames
|
|
25
|
+
from .configurable_data_source_names import ConfigurableDataSourceNames
|
|
26
|
+
from .configurable_transformation_definition import ConfigurableTransformationDefinition
|
|
27
|
+
from .configurable_transformation_names import ConfigurableTransformationNames
|
|
28
|
+
from .configured_transformation_item import ConfiguredTransformationItem
|
|
29
|
+
from .configured_transformation_item_component import ConfiguredTransformationItemComponent
|
|
30
|
+
from .configured_transformation_item_component_one import ConfiguredTransformationItemComponentOne
|
|
31
|
+
from .data_sink import DataSink
|
|
32
|
+
from .data_sink_component import DataSinkComponent
|
|
33
|
+
from .data_sink_component_one import DataSinkComponentOne
|
|
34
|
+
from .data_sink_create import DataSinkCreate
|
|
35
|
+
from .data_sink_create_component import DataSinkCreateComponent
|
|
36
|
+
from .data_sink_create_component_one import DataSinkCreateComponentOne
|
|
37
|
+
from .data_sink_definition import DataSinkDefinition
|
|
38
|
+
from .data_source import DataSource
|
|
39
|
+
from .data_source_component import DataSourceComponent
|
|
40
|
+
from .data_source_component_one import DataSourceComponentOne
|
|
41
|
+
from .data_source_create import DataSourceCreate
|
|
42
|
+
from .data_source_create_component import DataSourceCreateComponent
|
|
43
|
+
from .data_source_create_component_one import DataSourceCreateComponentOne
|
|
44
|
+
from .data_source_create_custom_metadata_value import DataSourceCreateCustomMetadataValue
|
|
45
|
+
from .data_source_custom_metadata_value import DataSourceCustomMetadataValue
|
|
46
|
+
from .data_source_definition import DataSourceDefinition
|
|
47
|
+
from .eval_dataset import EvalDataset
|
|
48
|
+
from .eval_dataset_job_params import EvalDatasetJobParams
|
|
49
|
+
from .eval_dataset_job_record import EvalDatasetJobRecord
|
|
50
|
+
from .eval_execution_params import EvalExecutionParams
|
|
51
|
+
from .eval_execution_params_override import EvalExecutionParamsOverride
|
|
52
|
+
from .eval_llm_model_data import EvalLlmModelData
|
|
53
|
+
from .eval_question import EvalQuestion
|
|
54
|
+
from .eval_question_create import EvalQuestionCreate
|
|
55
|
+
from .eval_question_result import EvalQuestionResult
|
|
56
|
+
from .file import File
|
|
57
|
+
from .file_resource_info_value import FileResourceInfoValue
|
|
58
|
+
from .filter_condition import FilterCondition
|
|
59
|
+
from .filter_operator import FilterOperator
|
|
60
|
+
from .gemini_embedding import GeminiEmbedding
|
|
61
|
+
from .html_node_parser import HtmlNodeParser
|
|
62
|
+
from .http_validation_error import HttpValidationError
|
|
63
|
+
from .hugging_face_inference_api_embedding import HuggingFaceInferenceApiEmbedding
|
|
64
|
+
from .hugging_face_inference_api_embedding_token import HuggingFaceInferenceApiEmbeddingToken
|
|
65
|
+
from .json_node_parser import JsonNodeParser
|
|
66
|
+
from .llama_parse_supported_file_extensions import LlamaParseSupportedFileExtensions
|
|
67
|
+
from .llm import Llm
|
|
68
|
+
from .local_eval import LocalEval
|
|
69
|
+
from .local_eval_results import LocalEvalResults
|
|
70
|
+
from .local_eval_sets import LocalEvalSets
|
|
71
|
+
from .managed_ingestion_status import ManagedIngestionStatus
|
|
72
|
+
from .markdown_element_node_parser import MarkdownElementNodeParser
|
|
73
|
+
from .markdown_node_parser import MarkdownNodeParser
|
|
74
|
+
from .message_role import MessageRole
|
|
75
|
+
from .metadata_filter import MetadataFilter
|
|
76
|
+
from .metadata_filter_value import MetadataFilterValue
|
|
77
|
+
from .metadata_filters import MetadataFilters
|
|
78
|
+
from .metadata_filters_filters_item import MetadataFiltersFiltersItem
|
|
79
|
+
from .metric_result import MetricResult
|
|
80
|
+
from .node_parser import NodeParser
|
|
81
|
+
from .object_type import ObjectType
|
|
82
|
+
from .open_ai_embedding import OpenAiEmbedding
|
|
83
|
+
from .parser_languages import ParserLanguages
|
|
84
|
+
from .parsing_history_item import ParsingHistoryItem
|
|
85
|
+
from .parsing_job import ParsingJob
|
|
86
|
+
from .parsing_job_json_result import ParsingJobJsonResult
|
|
87
|
+
from .parsing_job_markdown_result import ParsingJobMarkdownResult
|
|
88
|
+
from .parsing_job_text_result import ParsingJobTextResult
|
|
89
|
+
from .parsing_usage import ParsingUsage
|
|
90
|
+
from .pipeline import Pipeline
|
|
91
|
+
from .pipeline_create import PipelineCreate
|
|
92
|
+
from .pipeline_data_source import PipelineDataSource
|
|
93
|
+
from .pipeline_data_source_component import PipelineDataSourceComponent
|
|
94
|
+
from .pipeline_data_source_component_one import PipelineDataSourceComponentOne
|
|
95
|
+
from .pipeline_data_source_create import PipelineDataSourceCreate
|
|
96
|
+
from .pipeline_data_source_custom_metadata_value import PipelineDataSourceCustomMetadataValue
|
|
97
|
+
from .pipeline_deployment import PipelineDeployment
|
|
98
|
+
from .pipeline_file import PipelineFile
|
|
99
|
+
from .pipeline_file_create import PipelineFileCreate
|
|
100
|
+
from .pipeline_file_create_custom_metadata_value import PipelineFileCreateCustomMetadataValue
|
|
101
|
+
from .pipeline_file_custom_metadata_value import PipelineFileCustomMetadataValue
|
|
102
|
+
from .pipeline_file_resource_info_value import PipelineFileResourceInfoValue
|
|
103
|
+
from .pipeline_file_status_response import PipelineFileStatusResponse
|
|
104
|
+
from .pipeline_type import PipelineType
|
|
105
|
+
from .pooling import Pooling
|
|
106
|
+
from .preset_retrieval_params import PresetRetrievalParams
|
|
107
|
+
from .presigned_url import PresignedUrl
|
|
108
|
+
from .project import Project
|
|
109
|
+
from .project_create import ProjectCreate
|
|
110
|
+
from .prompt_mixin_prompts import PromptMixinPrompts
|
|
111
|
+
from .prompt_spec import PromptSpec
|
|
112
|
+
from .pydantic_program_mode import PydanticProgramMode
|
|
113
|
+
from .related_node_info import RelatedNodeInfo
|
|
114
|
+
from .retrieve_results import RetrieveResults
|
|
115
|
+
from .sentence_splitter import SentenceSplitter
|
|
116
|
+
from .simple_file_node_parser import SimpleFileNodeParser
|
|
117
|
+
from .status_enum import StatusEnum
|
|
118
|
+
from .supported_eval_llm_model import SupportedEvalLlmModel
|
|
119
|
+
from .supported_eval_llm_model_names import SupportedEvalLlmModelNames
|
|
120
|
+
from .text_node import TextNode
|
|
121
|
+
from .text_node_relationships_value import TextNodeRelationshipsValue
|
|
122
|
+
from .text_node_with_score import TextNodeWithScore
|
|
123
|
+
from .token_text_splitter import TokenTextSplitter
|
|
124
|
+
from .transformation_category_names import TransformationCategoryNames
|
|
125
|
+
from .validation_error import ValidationError
|
|
126
|
+
from .validation_error_loc_item import ValidationErrorLocItem
|
|
127
|
+
|
|
128
|
+
__all__ = [
|
|
129
|
+
"ApiKey",
|
|
130
|
+
"AzureOpenAiEmbedding",
|
|
131
|
+
"Base",
|
|
132
|
+
"BasePromptTemplate",
|
|
133
|
+
"BedrockEmbedding",
|
|
134
|
+
"ChatMessage",
|
|
135
|
+
"CloudAzStorageBlobDataSource",
|
|
136
|
+
"CloudChromaVectorStore",
|
|
137
|
+
"CloudDocument",
|
|
138
|
+
"CloudDocumentCreate",
|
|
139
|
+
"CloudGcsDataSource",
|
|
140
|
+
"CloudGoogleDriveDataSource",
|
|
141
|
+
"CloudOneDriveDataSource",
|
|
142
|
+
"CloudPineconeVectorStore",
|
|
143
|
+
"CloudPostgresVectorStore",
|
|
144
|
+
"CloudQdrantVectorStore",
|
|
145
|
+
"CloudS3DataSource",
|
|
146
|
+
"CloudSharepointDataSource",
|
|
147
|
+
"CloudWeaviateVectorStore",
|
|
148
|
+
"CodeSplitter",
|
|
149
|
+
"CohereEmbedding",
|
|
150
|
+
"ConfigurableDataSinkNames",
|
|
151
|
+
"ConfigurableDataSourceNames",
|
|
152
|
+
"ConfigurableTransformationDefinition",
|
|
153
|
+
"ConfigurableTransformationNames",
|
|
154
|
+
"ConfiguredTransformationItem",
|
|
155
|
+
"ConfiguredTransformationItemComponent",
|
|
156
|
+
"ConfiguredTransformationItemComponentOne",
|
|
157
|
+
"DataSink",
|
|
158
|
+
"DataSinkComponent",
|
|
159
|
+
"DataSinkComponentOne",
|
|
160
|
+
"DataSinkCreate",
|
|
161
|
+
"DataSinkCreateComponent",
|
|
162
|
+
"DataSinkCreateComponentOne",
|
|
163
|
+
"DataSinkDefinition",
|
|
164
|
+
"DataSource",
|
|
165
|
+
"DataSourceComponent",
|
|
166
|
+
"DataSourceComponentOne",
|
|
167
|
+
"DataSourceCreate",
|
|
168
|
+
"DataSourceCreateComponent",
|
|
169
|
+
"DataSourceCreateComponentOne",
|
|
170
|
+
"DataSourceCreateCustomMetadataValue",
|
|
171
|
+
"DataSourceCustomMetadataValue",
|
|
172
|
+
"DataSourceDefinition",
|
|
173
|
+
"EvalDataset",
|
|
174
|
+
"EvalDatasetJobParams",
|
|
175
|
+
"EvalDatasetJobRecord",
|
|
176
|
+
"EvalExecutionParams",
|
|
177
|
+
"EvalExecutionParamsOverride",
|
|
178
|
+
"EvalLlmModelData",
|
|
179
|
+
"EvalQuestion",
|
|
180
|
+
"EvalQuestionCreate",
|
|
181
|
+
"EvalQuestionResult",
|
|
182
|
+
"File",
|
|
183
|
+
"FileResourceInfoValue",
|
|
184
|
+
"FilterCondition",
|
|
185
|
+
"FilterOperator",
|
|
186
|
+
"GeminiEmbedding",
|
|
187
|
+
"HtmlNodeParser",
|
|
188
|
+
"HttpValidationError",
|
|
189
|
+
"HuggingFaceInferenceApiEmbedding",
|
|
190
|
+
"HuggingFaceInferenceApiEmbeddingToken",
|
|
191
|
+
"JsonNodeParser",
|
|
192
|
+
"LlamaParseSupportedFileExtensions",
|
|
193
|
+
"Llm",
|
|
194
|
+
"LocalEval",
|
|
195
|
+
"LocalEvalResults",
|
|
196
|
+
"LocalEvalSets",
|
|
197
|
+
"ManagedIngestionStatus",
|
|
198
|
+
"MarkdownElementNodeParser",
|
|
199
|
+
"MarkdownNodeParser",
|
|
200
|
+
"MessageRole",
|
|
201
|
+
"MetadataFilter",
|
|
202
|
+
"MetadataFilterValue",
|
|
203
|
+
"MetadataFilters",
|
|
204
|
+
"MetadataFiltersFiltersItem",
|
|
205
|
+
"MetricResult",
|
|
206
|
+
"NodeParser",
|
|
207
|
+
"ObjectType",
|
|
208
|
+
"OpenAiEmbedding",
|
|
209
|
+
"ParserLanguages",
|
|
210
|
+
"ParsingHistoryItem",
|
|
211
|
+
"ParsingJob",
|
|
212
|
+
"ParsingJobJsonResult",
|
|
213
|
+
"ParsingJobMarkdownResult",
|
|
214
|
+
"ParsingJobTextResult",
|
|
215
|
+
"ParsingUsage",
|
|
216
|
+
"Pipeline",
|
|
217
|
+
"PipelineCreate",
|
|
218
|
+
"PipelineDataSource",
|
|
219
|
+
"PipelineDataSourceComponent",
|
|
220
|
+
"PipelineDataSourceComponentOne",
|
|
221
|
+
"PipelineDataSourceCreate",
|
|
222
|
+
"PipelineDataSourceCustomMetadataValue",
|
|
223
|
+
"PipelineDeployment",
|
|
224
|
+
"PipelineFile",
|
|
225
|
+
"PipelineFileCreate",
|
|
226
|
+
"PipelineFileCreateCustomMetadataValue",
|
|
227
|
+
"PipelineFileCustomMetadataValue",
|
|
228
|
+
"PipelineFileResourceInfoValue",
|
|
229
|
+
"PipelineFileStatusResponse",
|
|
230
|
+
"PipelineType",
|
|
231
|
+
"Pooling",
|
|
232
|
+
"PresetRetrievalParams",
|
|
233
|
+
"PresignedUrl",
|
|
234
|
+
"Project",
|
|
235
|
+
"ProjectCreate",
|
|
236
|
+
"PromptMixinPrompts",
|
|
237
|
+
"PromptSpec",
|
|
238
|
+
"PydanticProgramMode",
|
|
239
|
+
"RelatedNodeInfo",
|
|
240
|
+
"RetrieveResults",
|
|
241
|
+
"SentenceSplitter",
|
|
242
|
+
"SimpleFileNodeParser",
|
|
243
|
+
"StatusEnum",
|
|
244
|
+
"SupportedEvalLlmModel",
|
|
245
|
+
"SupportedEvalLlmModelNames",
|
|
246
|
+
"TextNode",
|
|
247
|
+
"TextNodeRelationshipsValue",
|
|
248
|
+
"TextNodeWithScore",
|
|
249
|
+
"TokenTextSplitter",
|
|
250
|
+
"TransformationCategoryNames",
|
|
251
|
+
"ValidationError",
|
|
252
|
+
"ValidationErrorLocItem",
|
|
253
|
+
]
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
10
|
+
except ImportError:
|
|
11
|
+
import pydantic # type: ignore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ApiKey(pydantic.BaseModel):
|
|
15
|
+
"""
|
|
16
|
+
Schema for an API Key.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
id: str = pydantic.Field(description="Unique identifier")
|
|
20
|
+
created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
|
|
21
|
+
updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
|
|
22
|
+
name: typing.Optional[str]
|
|
23
|
+
user_id: str
|
|
24
|
+
redacted_api_key: str
|
|
25
|
+
|
|
26
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
27
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
28
|
+
return super().json(**kwargs_with_defaults)
|
|
29
|
+
|
|
30
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
31
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
32
|
+
return super().dict(**kwargs_with_defaults)
|
|
33
|
+
|
|
34
|
+
class Config:
|
|
35
|
+
frozen = True
|
|
36
|
+
smart_union = True
|
|
37
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
10
|
+
except ImportError:
|
|
11
|
+
import pydantic # type: ignore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class AzureOpenAiEmbedding(pydantic.BaseModel):
|
|
15
|
+
"""
|
|
16
|
+
OpenAI class for embeddings.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
mode (str): Mode for embedding.
|
|
20
|
+
Defaults to OpenAIEmbeddingMode.TEXT_SEARCH_MODE.
|
|
21
|
+
Options are:
|
|
22
|
+
|
|
23
|
+
- OpenAIEmbeddingMode.SIMILARITY_MODE
|
|
24
|
+
- OpenAIEmbeddingMode.TEXT_SEARCH_MODE
|
|
25
|
+
|
|
26
|
+
model (str): Model for embedding.
|
|
27
|
+
Defaults to OpenAIEmbeddingModelType.TEXT_EMBED_ADA_002.
|
|
28
|
+
Options are:
|
|
29
|
+
|
|
30
|
+
- OpenAIEmbeddingModelType.DAVINCI
|
|
31
|
+
- OpenAIEmbeddingModelType.CURIE
|
|
32
|
+
- OpenAIEmbeddingModelType.BABBAGE
|
|
33
|
+
- OpenAIEmbeddingModelType.ADA
|
|
34
|
+
- OpenAIEmbeddingModelType.TEXT_EMBED_ADA_002
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
model_name: typing.Optional[str] = pydantic.Field(description="The name of the embedding model.")
|
|
38
|
+
embed_batch_size: typing.Optional[int] = pydantic.Field(description="The batch size for embedding calls.")
|
|
39
|
+
callback_manager: typing.Optional[typing.Dict[str, typing.Any]]
|
|
40
|
+
num_workers: typing.Optional[int] = pydantic.Field(
|
|
41
|
+
description="The number of workers to use for async embedding calls."
|
|
42
|
+
)
|
|
43
|
+
additional_kwargs: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field(
|
|
44
|
+
description="Additional kwargs for the OpenAI API."
|
|
45
|
+
)
|
|
46
|
+
api_key: str = pydantic.Field(description="The OpenAI API key.")
|
|
47
|
+
api_base: typing.Optional[str] = pydantic.Field(description="The base URL for Azure deployment.")
|
|
48
|
+
api_version: typing.Optional[str] = pydantic.Field(description="The version for Azure OpenAI API.")
|
|
49
|
+
max_retries: typing.Optional[int] = pydantic.Field(description="Maximum number of retries.")
|
|
50
|
+
timeout: typing.Optional[float] = pydantic.Field(description="Timeout for each request.")
|
|
51
|
+
default_headers: typing.Optional[typing.Dict[str, str]] = pydantic.Field(
|
|
52
|
+
description="The default headers for API requests."
|
|
53
|
+
)
|
|
54
|
+
reuse_client: typing.Optional[bool] = pydantic.Field(
|
|
55
|
+
description="Reuse the OpenAI client between requests. When doing anything with large volumes of async API calls, setting this to false can improve stability."
|
|
56
|
+
)
|
|
57
|
+
dimensions: typing.Optional[int] = pydantic.Field(
|
|
58
|
+
description="The number of dimensions on the output embedding vectors. Works only with v3 embedding models."
|
|
59
|
+
)
|
|
60
|
+
azure_endpoint: typing.Optional[str] = pydantic.Field(description="The Azure endpoint to use.")
|
|
61
|
+
azure_deployment: typing.Optional[str] = pydantic.Field(description="The Azure deployment to use.")
|
|
62
|
+
class_name: typing.Optional[str]
|
|
63
|
+
|
|
64
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
65
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
66
|
+
return super().json(**kwargs_with_defaults)
|
|
67
|
+
|
|
68
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
69
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
70
|
+
return super().dict(**kwargs_with_defaults)
|
|
71
|
+
|
|
72
|
+
class Config:
|
|
73
|
+
frozen = True
|
|
74
|
+
smart_union = True
|
|
75
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
10
|
+
except ImportError:
|
|
11
|
+
import pydantic # type: ignore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Base(pydantic.BaseModel):
|
|
15
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
16
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
17
|
+
return super().json(**kwargs_with_defaults)
|
|
18
|
+
|
|
19
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
20
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
21
|
+
return super().dict(**kwargs_with_defaults)
|
|
22
|
+
|
|
23
|
+
class Config:
|
|
24
|
+
frozen = True
|
|
25
|
+
smart_union = True
|
|
26
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
10
|
+
except ImportError:
|
|
11
|
+
import pydantic # type: ignore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class BasePromptTemplate(pydantic.BaseModel):
|
|
15
|
+
"""
|
|
16
|
+
Chainable mixin.
|
|
17
|
+
|
|
18
|
+
A module that can produce a `QueryComponent` from a set of inputs through
|
|
19
|
+
`as_query_component`.
|
|
20
|
+
|
|
21
|
+
If plugged in directly into a `QueryPipeline`, the `ChainableMixin` will be
|
|
22
|
+
converted into a `QueryComponent` with default parameters.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
metadata: typing.Dict[str, typing.Any]
|
|
26
|
+
template_vars: typing.List[str]
|
|
27
|
+
kwargs: typing.Dict[str, str]
|
|
28
|
+
output_parser: typing.Optional[typing.Dict[str, typing.Any]]
|
|
29
|
+
template_var_mappings: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field(
|
|
30
|
+
description="Template variable mappings (Optional)."
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
34
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
35
|
+
return super().json(**kwargs_with_defaults)
|
|
36
|
+
|
|
37
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
38
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
39
|
+
return super().dict(**kwargs_with_defaults)
|
|
40
|
+
|
|
41
|
+
class Config:
|
|
42
|
+
frozen = True
|
|
43
|
+
smart_union = True
|
|
44
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
10
|
+
except ImportError:
|
|
11
|
+
import pydantic # type: ignore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class BedrockEmbedding(pydantic.BaseModel):
|
|
15
|
+
"""
|
|
16
|
+
Base class for embeddings.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
model_name: str = pydantic.Field(description="The modelId of the Bedrock model to use.")
|
|
20
|
+
embed_batch_size: typing.Optional[int] = pydantic.Field(description="The batch size for embedding calls.")
|
|
21
|
+
callback_manager: typing.Optional[typing.Dict[str, typing.Any]]
|
|
22
|
+
num_workers: typing.Optional[int] = pydantic.Field(
|
|
23
|
+
description="The number of workers to use for async embedding calls."
|
|
24
|
+
)
|
|
25
|
+
profile_name: typing.Optional[str] = pydantic.Field(
|
|
26
|
+
description="The name of aws profile to use. If not given, then the default profile is used."
|
|
27
|
+
)
|
|
28
|
+
aws_access_key_id: typing.Optional[str] = pydantic.Field(description="AWS Access Key ID to use")
|
|
29
|
+
aws_secret_access_key: typing.Optional[str] = pydantic.Field(description="AWS Secret Access Key to use")
|
|
30
|
+
aws_session_token: typing.Optional[str] = pydantic.Field(description="AWS Session Token to use")
|
|
31
|
+
region_name: typing.Optional[str] = pydantic.Field(
|
|
32
|
+
description="AWS region name to use. Uses region configured in AWS CLI if not passed"
|
|
33
|
+
)
|
|
34
|
+
botocore_session: typing.Optional[typing.Any]
|
|
35
|
+
botocore_config: typing.Optional[typing.Any]
|
|
36
|
+
max_retries: typing.Optional[int] = pydantic.Field(description="The maximum number of API retries.")
|
|
37
|
+
timeout: typing.Optional[float] = pydantic.Field(
|
|
38
|
+
description="The timeout for the Bedrock API request in seconds. It will be used for both connect and read timeouts."
|
|
39
|
+
)
|
|
40
|
+
additional_kwargs: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field(
|
|
41
|
+
description="Additional kwargs for the bedrock client."
|
|
42
|
+
)
|
|
43
|
+
class_name: typing.Optional[str]
|
|
44
|
+
|
|
45
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
46
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
47
|
+
return super().json(**kwargs_with_defaults)
|
|
48
|
+
|
|
49
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
50
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
51
|
+
return super().dict(**kwargs_with_defaults)
|
|
52
|
+
|
|
53
|
+
class Config:
|
|
54
|
+
frozen = True
|
|
55
|
+
smart_union = True
|
|
56
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
from .message_role import MessageRole
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
11
|
+
except ImportError:
|
|
12
|
+
import pydantic # type: ignore
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class ChatMessage(pydantic.BaseModel):
|
|
16
|
+
"""
|
|
17
|
+
Chat message.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
role: typing.Optional[MessageRole]
|
|
21
|
+
content: typing.Optional[typing.Any]
|
|
22
|
+
additional_kwargs: typing.Optional[typing.Dict[str, typing.Any]]
|
|
23
|
+
|
|
24
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
25
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
26
|
+
return super().json(**kwargs_with_defaults)
|
|
27
|
+
|
|
28
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
29
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
30
|
+
return super().dict(**kwargs_with_defaults)
|
|
31
|
+
|
|
32
|
+
class Config:
|
|
33
|
+
frozen = True
|
|
34
|
+
smart_union = True
|
|
35
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
10
|
+
except ImportError:
|
|
11
|
+
import pydantic # type: ignore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class CloudAzStorageBlobDataSource(pydantic.BaseModel):
|
|
15
|
+
"""
|
|
16
|
+
Base component object to capture class names.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
container_name: str = pydantic.Field(description="The name of the Azure Storage Blob container to read from.")
|
|
20
|
+
account_url: str = pydantic.Field(description="The Azure Storage Blob account URL to use for authentication.")
|
|
21
|
+
blob: typing.Optional[str] = pydantic.Field(description="The blob name to read from.")
|
|
22
|
+
prefix: typing.Optional[str] = pydantic.Field(
|
|
23
|
+
description="The prefix of the Azure Storage Blob objects to read from."
|
|
24
|
+
)
|
|
25
|
+
account_name: str = pydantic.Field(description="The Azure Storage Blob account name to use for authentication.")
|
|
26
|
+
account_key: str = pydantic.Field(description="The Azure Storage Blob account key to use for authentication.")
|
|
27
|
+
class_name: typing.Optional[str]
|
|
28
|
+
|
|
29
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
30
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
31
|
+
return super().json(**kwargs_with_defaults)
|
|
32
|
+
|
|
33
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
34
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
35
|
+
return super().dict(**kwargs_with_defaults)
|
|
36
|
+
|
|
37
|
+
class Config:
|
|
38
|
+
frozen = True
|
|
39
|
+
smart_union = True
|
|
40
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
10
|
+
except ImportError:
|
|
11
|
+
import pydantic # type: ignore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class CloudChromaVectorStore(pydantic.BaseModel):
|
|
15
|
+
"""
|
|
16
|
+
Base class for cloud vector stores.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
supports_nested_metadata_filters: typing.Optional[bool]
|
|
20
|
+
collection_name: typing.Optional[str]
|
|
21
|
+
host: typing.Optional[str]
|
|
22
|
+
port: typing.Optional[str]
|
|
23
|
+
ssl: bool
|
|
24
|
+
headers: typing.Optional[typing.Dict[str, str]]
|
|
25
|
+
persist_dir: typing.Optional[str]
|
|
26
|
+
collection_kwargs: typing.Optional[typing.Dict[str, typing.Any]]
|
|
27
|
+
class_name: typing.Optional[str]
|
|
28
|
+
|
|
29
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
30
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
31
|
+
return super().json(**kwargs_with_defaults)
|
|
32
|
+
|
|
33
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
34
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
35
|
+
return super().dict(**kwargs_with_defaults)
|
|
36
|
+
|
|
37
|
+
class Config:
|
|
38
|
+
frozen = True
|
|
39
|
+
smart_union = True
|
|
40
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
10
|
+
except ImportError:
|
|
11
|
+
import pydantic # type: ignore
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class CloudDocument(pydantic.BaseModel):
|
|
15
|
+
"""
|
|
16
|
+
Cloud document stored in S3.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
text: str
|
|
20
|
+
metadata: typing.Dict[str, typing.Any]
|
|
21
|
+
excluded_embed_metadata_keys: typing.Optional[typing.List[str]]
|
|
22
|
+
excluded_llm_metadata_keys: typing.Optional[typing.List[str]]
|
|
23
|
+
id: str
|
|
24
|
+
|
|
25
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
26
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
27
|
+
return super().json(**kwargs_with_defaults)
|
|
28
|
+
|
|
29
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
30
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
31
|
+
return super().dict(**kwargs_with_defaults)
|
|
32
|
+
|
|
33
|
+
class Config:
|
|
34
|
+
frozen = True
|
|
35
|
+
smart_union = True
|
|
36
|
+
json_encoders = {dt.datetime: serialize_datetime}
|