llama-cloud 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (173) hide show
  1. llama_cloud/__init__.py +295 -0
  2. llama_cloud/client.py +72 -0
  3. llama_cloud/core/__init__.py +17 -0
  4. llama_cloud/core/api_error.py +15 -0
  5. llama_cloud/core/client_wrapper.py +51 -0
  6. llama_cloud/core/datetime_utils.py +28 -0
  7. llama_cloud/core/jsonable_encoder.py +103 -0
  8. llama_cloud/core/remove_none_from_dict.py +11 -0
  9. llama_cloud/errors/__init__.py +5 -0
  10. llama_cloud/errors/unprocessable_entity_error.py +9 -0
  11. llama_cloud/resources/__init__.py +40 -0
  12. llama_cloud/resources/api_keys/__init__.py +2 -0
  13. llama_cloud/resources/api_keys/client.py +302 -0
  14. llama_cloud/resources/billing/__init__.py +2 -0
  15. llama_cloud/resources/billing/client.py +234 -0
  16. llama_cloud/resources/component_definitions/__init__.py +2 -0
  17. llama_cloud/resources/component_definitions/client.py +192 -0
  18. llama_cloud/resources/data_sinks/__init__.py +5 -0
  19. llama_cloud/resources/data_sinks/client.py +506 -0
  20. llama_cloud/resources/data_sinks/types/__init__.py +6 -0
  21. llama_cloud/resources/data_sinks/types/data_sink_update_component.py +7 -0
  22. llama_cloud/resources/data_sinks/types/data_sink_update_component_one.py +17 -0
  23. llama_cloud/resources/data_sources/__init__.py +5 -0
  24. llama_cloud/resources/data_sources/client.py +521 -0
  25. llama_cloud/resources/data_sources/types/__init__.py +7 -0
  26. llama_cloud/resources/data_sources/types/data_source_update_component.py +7 -0
  27. llama_cloud/resources/data_sources/types/data_source_update_component_one.py +19 -0
  28. llama_cloud/resources/data_sources/types/data_source_update_custom_metadata_value.py +7 -0
  29. llama_cloud/resources/deprecated/__init__.py +2 -0
  30. llama_cloud/resources/deprecated/client.py +982 -0
  31. llama_cloud/resources/evals/__init__.py +2 -0
  32. llama_cloud/resources/evals/client.py +745 -0
  33. llama_cloud/resources/files/__init__.py +5 -0
  34. llama_cloud/resources/files/client.py +560 -0
  35. llama_cloud/resources/files/types/__init__.py +5 -0
  36. llama_cloud/resources/files/types/file_create_resource_info_value.py +5 -0
  37. llama_cloud/resources/parsing/__init__.py +2 -0
  38. llama_cloud/resources/parsing/client.py +982 -0
  39. llama_cloud/resources/pipelines/__init__.py +5 -0
  40. llama_cloud/resources/pipelines/client.py +2599 -0
  41. llama_cloud/resources/pipelines/types/__init__.py +5 -0
  42. llama_cloud/resources/pipelines/types/pipeline_file_update_custom_metadata_value.py +7 -0
  43. llama_cloud/resources/projects/__init__.py +2 -0
  44. llama_cloud/resources/projects/client.py +1231 -0
  45. llama_cloud/types/__init__.py +253 -0
  46. llama_cloud/types/api_key.py +37 -0
  47. llama_cloud/types/azure_open_ai_embedding.py +75 -0
  48. llama_cloud/types/base.py +26 -0
  49. llama_cloud/types/base_prompt_template.py +44 -0
  50. llama_cloud/types/bedrock_embedding.py +56 -0
  51. llama_cloud/types/chat_message.py +35 -0
  52. llama_cloud/types/cloud_az_storage_blob_data_source.py +40 -0
  53. llama_cloud/types/cloud_chroma_vector_store.py +40 -0
  54. llama_cloud/types/cloud_document.py +36 -0
  55. llama_cloud/types/cloud_document_create.py +36 -0
  56. llama_cloud/types/cloud_gcs_data_source.py +37 -0
  57. llama_cloud/types/cloud_google_drive_data_source.py +36 -0
  58. llama_cloud/types/cloud_one_drive_data_source.py +38 -0
  59. llama_cloud/types/cloud_pinecone_vector_store.py +46 -0
  60. llama_cloud/types/cloud_postgres_vector_store.py +44 -0
  61. llama_cloud/types/cloud_qdrant_vector_store.py +48 -0
  62. llama_cloud/types/cloud_s_3_data_source.py +42 -0
  63. llama_cloud/types/cloud_sharepoint_data_source.py +38 -0
  64. llama_cloud/types/cloud_weaviate_vector_store.py +38 -0
  65. llama_cloud/types/code_splitter.py +46 -0
  66. llama_cloud/types/cohere_embedding.py +46 -0
  67. llama_cloud/types/configurable_data_sink_names.py +37 -0
  68. llama_cloud/types/configurable_data_source_names.py +41 -0
  69. llama_cloud/types/configurable_transformation_definition.py +45 -0
  70. llama_cloud/types/configurable_transformation_names.py +73 -0
  71. llama_cloud/types/configured_transformation_item.py +43 -0
  72. llama_cloud/types/configured_transformation_item_component.py +9 -0
  73. llama_cloud/types/configured_transformation_item_component_one.py +35 -0
  74. llama_cloud/types/data_sink.py +40 -0
  75. llama_cloud/types/data_sink_component.py +7 -0
  76. llama_cloud/types/data_sink_component_one.py +17 -0
  77. llama_cloud/types/data_sink_create.py +36 -0
  78. llama_cloud/types/data_sink_create_component.py +7 -0
  79. llama_cloud/types/data_sink_create_component_one.py +17 -0
  80. llama_cloud/types/data_sink_definition.py +41 -0
  81. llama_cloud/types/data_source.py +44 -0
  82. llama_cloud/types/data_source_component.py +7 -0
  83. llama_cloud/types/data_source_component_one.py +19 -0
  84. llama_cloud/types/data_source_create.py +40 -0
  85. llama_cloud/types/data_source_create_component.py +7 -0
  86. llama_cloud/types/data_source_create_component_one.py +19 -0
  87. llama_cloud/types/data_source_create_custom_metadata_value.py +7 -0
  88. llama_cloud/types/data_source_custom_metadata_value.py +7 -0
  89. llama_cloud/types/data_source_definition.py +41 -0
  90. llama_cloud/types/eval_dataset.py +37 -0
  91. llama_cloud/types/eval_dataset_job_params.py +36 -0
  92. llama_cloud/types/eval_dataset_job_record.py +59 -0
  93. llama_cloud/types/eval_execution_params.py +38 -0
  94. llama_cloud/types/eval_execution_params_override.py +38 -0
  95. llama_cloud/types/eval_llm_model_data.py +33 -0
  96. llama_cloud/types/eval_question.py +39 -0
  97. llama_cloud/types/eval_question_create.py +28 -0
  98. llama_cloud/types/eval_question_result.py +49 -0
  99. llama_cloud/types/file.py +46 -0
  100. llama_cloud/types/file_resource_info_value.py +5 -0
  101. llama_cloud/types/filter_condition.py +21 -0
  102. llama_cloud/types/filter_operator.py +65 -0
  103. llama_cloud/types/gemini_embedding.py +51 -0
  104. llama_cloud/types/html_node_parser.py +44 -0
  105. llama_cloud/types/http_validation_error.py +29 -0
  106. llama_cloud/types/hugging_face_inference_api_embedding.py +68 -0
  107. llama_cloud/types/hugging_face_inference_api_embedding_token.py +5 -0
  108. llama_cloud/types/json_node_parser.py +43 -0
  109. llama_cloud/types/llama_parse_supported_file_extensions.py +161 -0
  110. llama_cloud/types/llm.py +55 -0
  111. llama_cloud/types/local_eval.py +46 -0
  112. llama_cloud/types/local_eval_results.py +37 -0
  113. llama_cloud/types/local_eval_sets.py +30 -0
  114. llama_cloud/types/managed_ingestion_status.py +37 -0
  115. llama_cloud/types/markdown_element_node_parser.py +49 -0
  116. llama_cloud/types/markdown_node_parser.py +43 -0
  117. llama_cloud/types/message_role.py +45 -0
  118. llama_cloud/types/metadata_filter.py +41 -0
  119. llama_cloud/types/metadata_filter_value.py +5 -0
  120. llama_cloud/types/metadata_filters.py +41 -0
  121. llama_cloud/types/metadata_filters_filters_item.py +8 -0
  122. llama_cloud/types/metric_result.py +30 -0
  123. llama_cloud/types/node_parser.py +37 -0
  124. llama_cloud/types/object_type.py +33 -0
  125. llama_cloud/types/open_ai_embedding.py +73 -0
  126. llama_cloud/types/parser_languages.py +361 -0
  127. llama_cloud/types/parsing_history_item.py +36 -0
  128. llama_cloud/types/parsing_job.py +30 -0
  129. llama_cloud/types/parsing_job_json_result.py +29 -0
  130. llama_cloud/types/parsing_job_markdown_result.py +29 -0
  131. llama_cloud/types/parsing_job_text_result.py +29 -0
  132. llama_cloud/types/parsing_usage.py +29 -0
  133. llama_cloud/types/pipeline.py +64 -0
  134. llama_cloud/types/pipeline_create.py +61 -0
  135. llama_cloud/types/pipeline_data_source.py +46 -0
  136. llama_cloud/types/pipeline_data_source_component.py +7 -0
  137. llama_cloud/types/pipeline_data_source_component_one.py +19 -0
  138. llama_cloud/types/pipeline_data_source_create.py +32 -0
  139. llama_cloud/types/pipeline_data_source_custom_metadata_value.py +7 -0
  140. llama_cloud/types/pipeline_deployment.py +38 -0
  141. llama_cloud/types/pipeline_file.py +52 -0
  142. llama_cloud/types/pipeline_file_create.py +36 -0
  143. llama_cloud/types/pipeline_file_create_custom_metadata_value.py +7 -0
  144. llama_cloud/types/pipeline_file_custom_metadata_value.py +7 -0
  145. llama_cloud/types/pipeline_file_resource_info_value.py +7 -0
  146. llama_cloud/types/pipeline_file_status_response.py +35 -0
  147. llama_cloud/types/pipeline_type.py +21 -0
  148. llama_cloud/types/pooling.py +29 -0
  149. llama_cloud/types/preset_retrieval_params.py +40 -0
  150. llama_cloud/types/presigned_url.py +36 -0
  151. llama_cloud/types/project.py +42 -0
  152. llama_cloud/types/project_create.py +32 -0
  153. llama_cloud/types/prompt_mixin_prompts.py +36 -0
  154. llama_cloud/types/prompt_spec.py +35 -0
  155. llama_cloud/types/pydantic_program_mode.py +41 -0
  156. llama_cloud/types/related_node_info.py +37 -0
  157. llama_cloud/types/retrieve_results.py +40 -0
  158. llama_cloud/types/sentence_splitter.py +48 -0
  159. llama_cloud/types/simple_file_node_parser.py +44 -0
  160. llama_cloud/types/status_enum.py +33 -0
  161. llama_cloud/types/supported_eval_llm_model.py +35 -0
  162. llama_cloud/types/supported_eval_llm_model_names.py +29 -0
  163. llama_cloud/types/text_node.py +62 -0
  164. llama_cloud/types/text_node_relationships_value.py +7 -0
  165. llama_cloud/types/text_node_with_score.py +36 -0
  166. llama_cloud/types/token_text_splitter.py +43 -0
  167. llama_cloud/types/transformation_category_names.py +21 -0
  168. llama_cloud/types/validation_error.py +31 -0
  169. llama_cloud/types/validation_error_loc_item.py +5 -0
  170. llama_cloud-0.0.1.dist-info/LICENSE +21 -0
  171. llama_cloud-0.0.1.dist-info/METADATA +25 -0
  172. llama_cloud-0.0.1.dist-info/RECORD +173 -0
  173. llama_cloud-0.0.1.dist-info/WHEEL +4 -0
@@ -0,0 +1,73 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import enum
4
+ import typing
5
+
6
+ T_Result = typing.TypeVar("T_Result")
7
+
8
+
9
+ class ConfigurableTransformationNames(str, enum.Enum):
10
+ """
11
+ An enumeration.
12
+ """
13
+
14
+ CODE_NODE_PARSER = "CODE_NODE_PARSER"
15
+ SENTENCE_AWARE_NODE_PARSER = "SENTENCE_AWARE_NODE_PARSER"
16
+ TOKEN_AWARE_NODE_PARSER = "TOKEN_AWARE_NODE_PARSER"
17
+ HTML_NODE_PARSER = "HTML_NODE_PARSER"
18
+ MARKDOWN_NODE_PARSER = "MARKDOWN_NODE_PARSER"
19
+ JSON_NODE_PARSER = "JSON_NODE_PARSER"
20
+ SIMPLE_FILE_NODE_PARSER = "SIMPLE_FILE_NODE_PARSER"
21
+ MARKDOWN_ELEMENT_NODE_PARSER = "MARKDOWN_ELEMENT_NODE_PARSER"
22
+ OPENAI_EMBEDDING = "OPENAI_EMBEDDING"
23
+ AZURE_EMBEDDING = "AZURE_EMBEDDING"
24
+ COHERE_EMBEDDING = "COHERE_EMBEDDING"
25
+ BEDROCK_EMBEDDING = "BEDROCK_EMBEDDING"
26
+ HUGGINGFACE_API_EMBEDDING = "HUGGINGFACE_API_EMBEDDING"
27
+ GEMINI_EMBEDDING = "GEMINI_EMBEDDING"
28
+
29
+ def visit(
30
+ self,
31
+ code_node_parser: typing.Callable[[], T_Result],
32
+ sentence_aware_node_parser: typing.Callable[[], T_Result],
33
+ token_aware_node_parser: typing.Callable[[], T_Result],
34
+ html_node_parser: typing.Callable[[], T_Result],
35
+ markdown_node_parser: typing.Callable[[], T_Result],
36
+ json_node_parser: typing.Callable[[], T_Result],
37
+ simple_file_node_parser: typing.Callable[[], T_Result],
38
+ markdown_element_node_parser: typing.Callable[[], T_Result],
39
+ openai_embedding: typing.Callable[[], T_Result],
40
+ azure_embedding: typing.Callable[[], T_Result],
41
+ cohere_embedding: typing.Callable[[], T_Result],
42
+ bedrock_embedding: typing.Callable[[], T_Result],
43
+ huggingface_api_embedding: typing.Callable[[], T_Result],
44
+ gemini_embedding: typing.Callable[[], T_Result],
45
+ ) -> T_Result:
46
+ if self is ConfigurableTransformationNames.CODE_NODE_PARSER:
47
+ return code_node_parser()
48
+ if self is ConfigurableTransformationNames.SENTENCE_AWARE_NODE_PARSER:
49
+ return sentence_aware_node_parser()
50
+ if self is ConfigurableTransformationNames.TOKEN_AWARE_NODE_PARSER:
51
+ return token_aware_node_parser()
52
+ if self is ConfigurableTransformationNames.HTML_NODE_PARSER:
53
+ return html_node_parser()
54
+ if self is ConfigurableTransformationNames.MARKDOWN_NODE_PARSER:
55
+ return markdown_node_parser()
56
+ if self is ConfigurableTransformationNames.JSON_NODE_PARSER:
57
+ return json_node_parser()
58
+ if self is ConfigurableTransformationNames.SIMPLE_FILE_NODE_PARSER:
59
+ return simple_file_node_parser()
60
+ if self is ConfigurableTransformationNames.MARKDOWN_ELEMENT_NODE_PARSER:
61
+ return markdown_element_node_parser()
62
+ if self is ConfigurableTransformationNames.OPENAI_EMBEDDING:
63
+ return openai_embedding()
64
+ if self is ConfigurableTransformationNames.AZURE_EMBEDDING:
65
+ return azure_embedding()
66
+ if self is ConfigurableTransformationNames.COHERE_EMBEDDING:
67
+ return cohere_embedding()
68
+ if self is ConfigurableTransformationNames.BEDROCK_EMBEDDING:
69
+ return bedrock_embedding()
70
+ if self is ConfigurableTransformationNames.HUGGINGFACE_API_EMBEDDING:
71
+ return huggingface_api_embedding()
72
+ if self is ConfigurableTransformationNames.GEMINI_EMBEDDING:
73
+ return gemini_embedding()
@@ -0,0 +1,43 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .configurable_transformation_names import ConfigurableTransformationNames
8
+ from .configured_transformation_item_component import ConfiguredTransformationItemComponent
9
+
10
+ try:
11
+ import pydantic.v1 as pydantic # type: ignore
12
+ except ImportError:
13
+ import pydantic # type: ignore
14
+
15
+
16
+ class ConfiguredTransformationItem(pydantic.BaseModel):
17
+ """
18
+ Configured transformations for pipelines.
19
+
20
+ Similar to ConfigurableTransformation but includes a few
21
+ more fields that are useful to the platform.
22
+ """
23
+
24
+ id: typing.Optional[str]
25
+ configurable_transformation_type: ConfigurableTransformationNames = pydantic.Field(
26
+ description="Name for the type of transformation this is (e.g. SIMPLE_NODE_PARSER). Can also be an enum instance of llama_index.ingestion.transformations.ConfigurableTransformations. This will be converted to ConfigurableTransformationNames."
27
+ )
28
+ component: ConfiguredTransformationItemComponent = pydantic.Field(
29
+ description="Component that implements the transformation"
30
+ )
31
+
32
+ def json(self, **kwargs: typing.Any) -> str:
33
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
34
+ return super().json(**kwargs_with_defaults)
35
+
36
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
37
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
38
+ return super().dict(**kwargs_with_defaults)
39
+
40
+ class Config:
41
+ frozen = True
42
+ smart_union = True
43
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,9 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from .configured_transformation_item_component_one import ConfiguredTransformationItemComponentOne
6
+
7
+ ConfiguredTransformationItemComponent = typing.Union[
8
+ typing.Dict[str, typing.Any], ConfiguredTransformationItemComponentOne
9
+ ]
@@ -0,0 +1,35 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from .azure_open_ai_embedding import AzureOpenAiEmbedding
6
+ from .bedrock_embedding import BedrockEmbedding
7
+ from .code_splitter import CodeSplitter
8
+ from .cohere_embedding import CohereEmbedding
9
+ from .gemini_embedding import GeminiEmbedding
10
+ from .html_node_parser import HtmlNodeParser
11
+ from .hugging_face_inference_api_embedding import HuggingFaceInferenceApiEmbedding
12
+ from .json_node_parser import JsonNodeParser
13
+ from .markdown_element_node_parser import MarkdownElementNodeParser
14
+ from .markdown_node_parser import MarkdownNodeParser
15
+ from .open_ai_embedding import OpenAiEmbedding
16
+ from .sentence_splitter import SentenceSplitter
17
+ from .simple_file_node_parser import SimpleFileNodeParser
18
+ from .token_text_splitter import TokenTextSplitter
19
+
20
+ ConfiguredTransformationItemComponentOne = typing.Union[
21
+ CodeSplitter,
22
+ SentenceSplitter,
23
+ TokenTextSplitter,
24
+ HtmlNodeParser,
25
+ MarkdownNodeParser,
26
+ JsonNodeParser,
27
+ SimpleFileNodeParser,
28
+ MarkdownElementNodeParser,
29
+ OpenAiEmbedding,
30
+ AzureOpenAiEmbedding,
31
+ CohereEmbedding,
32
+ BedrockEmbedding,
33
+ HuggingFaceInferenceApiEmbedding,
34
+ GeminiEmbedding,
35
+ ]
@@ -0,0 +1,40 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .configurable_data_sink_names import ConfigurableDataSinkNames
8
+ from .data_sink_component import DataSinkComponent
9
+
10
+ try:
11
+ import pydantic.v1 as pydantic # type: ignore
12
+ except ImportError:
13
+ import pydantic # type: ignore
14
+
15
+
16
+ class DataSink(pydantic.BaseModel):
17
+ """
18
+ Schema for a data sink.
19
+ """
20
+
21
+ id: str = pydantic.Field(description="Unique identifier")
22
+ created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
23
+ updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
24
+ name: str = pydantic.Field(description="The name of the data sink.")
25
+ sink_type: ConfigurableDataSinkNames
26
+ component: DataSinkComponent
27
+ project_id: str
28
+
29
+ def json(self, **kwargs: typing.Any) -> str:
30
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
31
+ return super().json(**kwargs_with_defaults)
32
+
33
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
34
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
35
+ return super().dict(**kwargs_with_defaults)
36
+
37
+ class Config:
38
+ frozen = True
39
+ smart_union = True
40
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from .data_sink_component_one import DataSinkComponentOne
6
+
7
+ DataSinkComponent = typing.Union[typing.Dict[str, typing.Any], DataSinkComponentOne]
@@ -0,0 +1,17 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from .cloud_chroma_vector_store import CloudChromaVectorStore
6
+ from .cloud_pinecone_vector_store import CloudPineconeVectorStore
7
+ from .cloud_postgres_vector_store import CloudPostgresVectorStore
8
+ from .cloud_qdrant_vector_store import CloudQdrantVectorStore
9
+ from .cloud_weaviate_vector_store import CloudWeaviateVectorStore
10
+
11
+ DataSinkComponentOne = typing.Union[
12
+ CloudChromaVectorStore,
13
+ CloudPineconeVectorStore,
14
+ CloudPostgresVectorStore,
15
+ CloudQdrantVectorStore,
16
+ CloudWeaviateVectorStore,
17
+ ]
@@ -0,0 +1,36 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .configurable_data_sink_names import ConfigurableDataSinkNames
8
+ from .data_sink_create_component import DataSinkCreateComponent
9
+
10
+ try:
11
+ import pydantic.v1 as pydantic # type: ignore
12
+ except ImportError:
13
+ import pydantic # type: ignore
14
+
15
+
16
+ class DataSinkCreate(pydantic.BaseModel):
17
+ """
18
+ Schema for creating a data sink.
19
+ """
20
+
21
+ name: str = pydantic.Field(description="The name of the data sink.")
22
+ sink_type: ConfigurableDataSinkNames
23
+ component: DataSinkCreateComponent
24
+
25
+ def json(self, **kwargs: typing.Any) -> str:
26
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
27
+ return super().json(**kwargs_with_defaults)
28
+
29
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
30
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
31
+ return super().dict(**kwargs_with_defaults)
32
+
33
+ class Config:
34
+ frozen = True
35
+ smart_union = True
36
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from .data_sink_create_component_one import DataSinkCreateComponentOne
6
+
7
+ DataSinkCreateComponent = typing.Union[typing.Dict[str, typing.Any], DataSinkCreateComponentOne]
@@ -0,0 +1,17 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from .cloud_chroma_vector_store import CloudChromaVectorStore
6
+ from .cloud_pinecone_vector_store import CloudPineconeVectorStore
7
+ from .cloud_postgres_vector_store import CloudPostgresVectorStore
8
+ from .cloud_qdrant_vector_store import CloudQdrantVectorStore
9
+ from .cloud_weaviate_vector_store import CloudWeaviateVectorStore
10
+
11
+ DataSinkCreateComponentOne = typing.Union[
12
+ CloudChromaVectorStore,
13
+ CloudPineconeVectorStore,
14
+ CloudPostgresVectorStore,
15
+ CloudQdrantVectorStore,
16
+ CloudWeaviateVectorStore,
17
+ ]
@@ -0,0 +1,41 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .configurable_data_sink_names import ConfigurableDataSinkNames
8
+
9
+ try:
10
+ import pydantic.v1 as pydantic # type: ignore
11
+ except ImportError:
12
+ import pydantic # type: ignore
13
+
14
+
15
+ class DataSinkDefinition(pydantic.BaseModel):
16
+ """
17
+ Schema for a data sink definition.
18
+ """
19
+
20
+ label: str = pydantic.Field(
21
+ description="The label field will be used to display the name of the component in the UI"
22
+ )
23
+ json_schema: typing.Dict[str, typing.Any] = pydantic.Field(
24
+ description="The json_schema field can be used by clients to determine how to construct the component"
25
+ )
26
+ sink_type: ConfigurableDataSinkNames = pydantic.Field(
27
+ description="The name field will act as the unique identifier of DataSinkDefinition objects"
28
+ )
29
+
30
+ def json(self, **kwargs: typing.Any) -> str:
31
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
32
+ return super().json(**kwargs_with_defaults)
33
+
34
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
35
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
36
+ return super().dict(**kwargs_with_defaults)
37
+
38
+ class Config:
39
+ frozen = True
40
+ smart_union = True
41
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,44 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .configurable_data_source_names import ConfigurableDataSourceNames
8
+ from .data_source_component import DataSourceComponent
9
+ from .data_source_custom_metadata_value import DataSourceCustomMetadataValue
10
+
11
+ try:
12
+ import pydantic.v1 as pydantic # type: ignore
13
+ except ImportError:
14
+ import pydantic # type: ignore
15
+
16
+
17
+ class DataSource(pydantic.BaseModel):
18
+ """
19
+ Schema for a data source.
20
+ """
21
+
22
+ id: str = pydantic.Field(description="Unique identifier")
23
+ created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
24
+ updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
25
+ name: str = pydantic.Field(description="The name of the data source.")
26
+ source_type: ConfigurableDataSourceNames
27
+ custom_metadata: typing.Optional[typing.Dict[str, DataSourceCustomMetadataValue]] = pydantic.Field(
28
+ description="Custom metadata that will be present on all data loaded from the data source"
29
+ )
30
+ component: DataSourceComponent
31
+ project_id: str
32
+
33
+ def json(self, **kwargs: typing.Any) -> str:
34
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
35
+ return super().json(**kwargs_with_defaults)
36
+
37
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
38
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
39
+ return super().dict(**kwargs_with_defaults)
40
+
41
+ class Config:
42
+ frozen = True
43
+ smart_union = True
44
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from .data_source_component_one import DataSourceComponentOne
6
+
7
+ DataSourceComponent = typing.Union[typing.Dict[str, typing.Any], DataSourceComponentOne]
@@ -0,0 +1,19 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
6
+ from .cloud_gcs_data_source import CloudGcsDataSource
7
+ from .cloud_google_drive_data_source import CloudGoogleDriveDataSource
8
+ from .cloud_one_drive_data_source import CloudOneDriveDataSource
9
+ from .cloud_s_3_data_source import CloudS3DataSource
10
+ from .cloud_sharepoint_data_source import CloudSharepointDataSource
11
+
12
+ DataSourceComponentOne = typing.Union[
13
+ CloudS3DataSource,
14
+ CloudAzStorageBlobDataSource,
15
+ CloudGcsDataSource,
16
+ CloudGoogleDriveDataSource,
17
+ CloudOneDriveDataSource,
18
+ CloudSharepointDataSource,
19
+ ]
@@ -0,0 +1,40 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .configurable_data_source_names import ConfigurableDataSourceNames
8
+ from .data_source_create_component import DataSourceCreateComponent
9
+ from .data_source_create_custom_metadata_value import DataSourceCreateCustomMetadataValue
10
+
11
+ try:
12
+ import pydantic.v1 as pydantic # type: ignore
13
+ except ImportError:
14
+ import pydantic # type: ignore
15
+
16
+
17
+ class DataSourceCreate(pydantic.BaseModel):
18
+ """
19
+ Schema for creating a data source.
20
+ """
21
+
22
+ name: str = pydantic.Field(description="The name of the data source.")
23
+ source_type: ConfigurableDataSourceNames
24
+ custom_metadata: typing.Optional[typing.Dict[str, DataSourceCreateCustomMetadataValue]] = pydantic.Field(
25
+ description="Custom metadata that will be present on all data loaded from the data source"
26
+ )
27
+ component: DataSourceCreateComponent
28
+
29
+ def json(self, **kwargs: typing.Any) -> str:
30
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
31
+ return super().json(**kwargs_with_defaults)
32
+
33
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
34
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
35
+ return super().dict(**kwargs_with_defaults)
36
+
37
+ class Config:
38
+ frozen = True
39
+ smart_union = True
40
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from .data_source_create_component_one import DataSourceCreateComponentOne
6
+
7
+ DataSourceCreateComponent = typing.Union[typing.Dict[str, typing.Any], DataSourceCreateComponentOne]
@@ -0,0 +1,19 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
6
+ from .cloud_gcs_data_source import CloudGcsDataSource
7
+ from .cloud_google_drive_data_source import CloudGoogleDriveDataSource
8
+ from .cloud_one_drive_data_source import CloudOneDriveDataSource
9
+ from .cloud_s_3_data_source import CloudS3DataSource
10
+ from .cloud_sharepoint_data_source import CloudSharepointDataSource
11
+
12
+ DataSourceCreateComponentOne = typing.Union[
13
+ CloudS3DataSource,
14
+ CloudAzStorageBlobDataSource,
15
+ CloudGcsDataSource,
16
+ CloudGoogleDriveDataSource,
17
+ CloudOneDriveDataSource,
18
+ CloudSharepointDataSource,
19
+ ]
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ DataSourceCreateCustomMetadataValue = typing.Union[
6
+ typing.Dict[str, typing.Any], typing.List[typing.Any], str, int, float, bool
7
+ ]
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ DataSourceCustomMetadataValue = typing.Union[
6
+ typing.Dict[str, typing.Any], typing.List[typing.Any], str, int, float, bool
7
+ ]
@@ -0,0 +1,41 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .configurable_data_source_names import ConfigurableDataSourceNames
8
+
9
+ try:
10
+ import pydantic.v1 as pydantic # type: ignore
11
+ except ImportError:
12
+ import pydantic # type: ignore
13
+
14
+
15
+ class DataSourceDefinition(pydantic.BaseModel):
16
+ """
17
+ Schema for a data source definition.
18
+ """
19
+
20
+ label: str = pydantic.Field(
21
+ description="The label field will be used to display the name of the component in the UI"
22
+ )
23
+ json_schema: typing.Dict[str, typing.Any] = pydantic.Field(
24
+ description="The json_schema field can be used by clients to determine how to construct the component"
25
+ )
26
+ source_type: ConfigurableDataSourceNames = pydantic.Field(
27
+ description="The name field will act as the unique identifier of DataSourceDefinition objects"
28
+ )
29
+
30
+ def json(self, **kwargs: typing.Any) -> str:
31
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
32
+ return super().json(**kwargs_with_defaults)
33
+
34
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
35
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
36
+ return super().dict(**kwargs_with_defaults)
37
+
38
+ class Config:
39
+ frozen = True
40
+ smart_union = True
41
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,37 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+
8
+ try:
9
+ import pydantic.v1 as pydantic # type: ignore
10
+ except ImportError:
11
+ import pydantic # type: ignore
12
+
13
+
14
+ class EvalDataset(pydantic.BaseModel):
15
+ """
16
+ Schema for an eval dataset.
17
+ Includes the other DB fields like id, created_at, & updated_at.
18
+ """
19
+
20
+ id: str = pydantic.Field(description="Unique identifier")
21
+ created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
22
+ updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
23
+ name: str = pydantic.Field(description="The name of the EvalDataset.")
24
+ project_id: str
25
+
26
+ def json(self, **kwargs: typing.Any) -> str:
27
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
28
+ return super().json(**kwargs_with_defaults)
29
+
30
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
31
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
32
+ return super().dict(**kwargs_with_defaults)
33
+
34
+ class Config:
35
+ frozen = True
36
+ smart_union = True
37
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,36 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .eval_execution_params import EvalExecutionParams
8
+
9
+ try:
10
+ import pydantic.v1 as pydantic # type: ignore
11
+ except ImportError:
12
+ import pydantic # type: ignore
13
+
14
+
15
+ class EvalDatasetJobParams(pydantic.BaseModel):
16
+ """
17
+ Schema for the parameters of an eval dataset job.
18
+ """
19
+
20
+ eval_question_ids: typing.List[str] = pydantic.Field(
21
+ description="The IDs for the EvalQuestions this execution ran against."
22
+ )
23
+ eval_execution_params: EvalExecutionParams = pydantic.Field(description="The parameters for the eval execution.")
24
+
25
+ def json(self, **kwargs: typing.Any) -> str:
26
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
27
+ return super().json(**kwargs_with_defaults)
28
+
29
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
30
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
31
+ return super().dict(**kwargs_with_defaults)
32
+
33
+ class Config:
34
+ frozen = True
35
+ smart_union = True
36
+ json_encoders = {dt.datetime: serialize_datetime}