llama-cloud 0.0.17__py3-none-any.whl → 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (136) hide show
  1. llama_cloud/__init__.py +8 -36
  2. llama_cloud/client.py +0 -3
  3. llama_cloud/resources/__init__.py +2 -10
  4. llama_cloud/resources/data_sinks/__init__.py +2 -2
  5. llama_cloud/resources/data_sinks/client.py +8 -8
  6. llama_cloud/resources/data_sinks/types/__init__.py +1 -2
  7. llama_cloud/resources/data_sinks/types/data_sink_update_component.py +15 -2
  8. llama_cloud/resources/data_sources/__init__.py +2 -2
  9. llama_cloud/resources/data_sources/client.py +6 -6
  10. llama_cloud/resources/data_sources/types/__init__.py +1 -2
  11. llama_cloud/resources/data_sources/types/data_source_update_component.py +23 -2
  12. llama_cloud/resources/extraction/client.py +14 -14
  13. llama_cloud/resources/files/client.py +10 -10
  14. llama_cloud/resources/organizations/client.py +2 -2
  15. llama_cloud/resources/parsing/client.py +100 -60
  16. llama_cloud/resources/pipelines/__init__.py +0 -4
  17. llama_cloud/resources/pipelines/client.py +50 -340
  18. llama_cloud/resources/pipelines/types/__init__.py +1 -7
  19. llama_cloud/resources/pipelines/types/pipeline_update_embedding_config.py +15 -15
  20. llama_cloud/resources/pipelines/types/pipeline_update_transform_config.py +1 -24
  21. llama_cloud/types/__init__.py +9 -29
  22. llama_cloud/types/azure_open_ai_embedding.py +7 -39
  23. llama_cloud/types/base_prompt_template.py +3 -14
  24. llama_cloud/types/bedrock_embedding.py +7 -20
  25. llama_cloud/types/box_auth_mechanism.py +0 -4
  26. llama_cloud/types/character_splitter.py +3 -4
  27. llama_cloud/types/chat_data.py +0 -5
  28. llama_cloud/types/chat_message.py +1 -6
  29. llama_cloud/types/cloud_az_storage_blob_data_source.py +7 -18
  30. llama_cloud/types/cloud_box_data_source.py +6 -16
  31. llama_cloud/types/cloud_confluence_data_source.py +6 -10
  32. llama_cloud/types/cloud_document.py +1 -3
  33. llama_cloud/types/cloud_document_create.py +1 -3
  34. llama_cloud/types/cloud_google_drive_data_source.py +0 -4
  35. llama_cloud/types/cloud_jira_data_source.py +4 -6
  36. llama_cloud/types/cloud_notion_page_data_source.py +2 -6
  37. llama_cloud/types/cloud_one_drive_data_source.py +2 -6
  38. llama_cloud/types/cloud_pinecone_vector_store.py +1 -1
  39. llama_cloud/types/cloud_postgres_vector_store.py +0 -4
  40. llama_cloud/types/cloud_s_3_data_source.py +4 -12
  41. llama_cloud/types/cloud_sharepoint_data_source.py +5 -9
  42. llama_cloud/types/cloud_slack_data_source.py +6 -10
  43. llama_cloud/types/code_splitter.py +2 -1
  44. llama_cloud/types/cohere_embedding.py +6 -15
  45. llama_cloud/types/configurable_data_sink_names.py +0 -12
  46. llama_cloud/types/configurable_data_source_names.py +0 -4
  47. llama_cloud/types/configurable_transformation_names.py +0 -32
  48. llama_cloud/types/configured_transformation_item_component.py +15 -2
  49. llama_cloud/types/data_sink.py +2 -2
  50. llama_cloud/types/data_sink_component.py +15 -2
  51. llama_cloud/types/data_sink_create_component.py +15 -2
  52. llama_cloud/types/data_source.py +3 -5
  53. llama_cloud/types/data_source_component.py +23 -2
  54. llama_cloud/types/data_source_create.py +1 -3
  55. llama_cloud/types/data_source_create_component.py +23 -2
  56. llama_cloud/types/eval_dataset.py +2 -2
  57. llama_cloud/types/eval_dataset_job_record.py +7 -13
  58. llama_cloud/types/eval_execution_params_override.py +2 -6
  59. llama_cloud/types/eval_metric.py +17 -0
  60. llama_cloud/types/eval_question.py +2 -6
  61. llama_cloud/types/extraction_result.py +5 -3
  62. llama_cloud/types/extraction_schema.py +3 -5
  63. llama_cloud/types/file.py +7 -11
  64. llama_cloud/types/gemini_embedding.py +7 -22
  65. llama_cloud/types/hugging_face_inference_api_embedding.py +9 -34
  66. llama_cloud/types/input_message.py +2 -4
  67. llama_cloud/types/llama_parse_parameters.py +5 -0
  68. llama_cloud/types/llama_parse_supported_file_extensions.py +0 -4
  69. llama_cloud/types/llm.py +9 -8
  70. llama_cloud/types/llm_parameters.py +2 -7
  71. llama_cloud/types/local_eval.py +8 -10
  72. llama_cloud/types/local_eval_results.py +1 -1
  73. llama_cloud/types/managed_ingestion_status_response.py +3 -5
  74. llama_cloud/types/markdown_element_node_parser.py +4 -5
  75. llama_cloud/types/markdown_node_parser.py +2 -1
  76. llama_cloud/types/message_annotation.py +1 -6
  77. llama_cloud/types/metric_result.py +3 -3
  78. llama_cloud/types/node_parser.py +2 -1
  79. llama_cloud/types/node_relationship.py +44 -0
  80. llama_cloud/types/object_type.py +0 -4
  81. llama_cloud/types/open_ai_embedding.py +7 -36
  82. llama_cloud/types/organization.py +2 -2
  83. llama_cloud/types/page_splitter_node_parser.py +3 -2
  84. llama_cloud/types/parsing_job_json_result.py +2 -2
  85. llama_cloud/types/parsing_job_markdown_result.py +1 -1
  86. llama_cloud/types/parsing_job_text_result.py +1 -1
  87. llama_cloud/types/partition_names.py +45 -0
  88. llama_cloud/types/pipeline.py +7 -17
  89. llama_cloud/types/pipeline_configuration_hashes.py +3 -3
  90. llama_cloud/types/pipeline_create.py +6 -18
  91. llama_cloud/types/pipeline_create_embedding_config.py +15 -15
  92. llama_cloud/types/pipeline_create_transform_config.py +1 -24
  93. llama_cloud/types/pipeline_data_source.py +5 -11
  94. llama_cloud/types/pipeline_data_source_component.py +23 -2
  95. llama_cloud/types/pipeline_data_source_create.py +1 -3
  96. llama_cloud/types/pipeline_deployment.py +4 -8
  97. llama_cloud/types/pipeline_embedding_config.py +15 -15
  98. llama_cloud/types/pipeline_file.py +10 -18
  99. llama_cloud/types/pipeline_file_create.py +1 -3
  100. llama_cloud/types/playground_session.py +2 -2
  101. llama_cloud/types/preset_retrieval_params.py +8 -11
  102. llama_cloud/types/presigned_url.py +1 -3
  103. llama_cloud/types/project.py +2 -2
  104. llama_cloud/types/prompt_mixin_prompts.py +1 -1
  105. llama_cloud/types/prompt_spec.py +2 -4
  106. llama_cloud/types/related_node_info.py +0 -4
  107. llama_cloud/types/retrieval_mode.py +0 -4
  108. llama_cloud/types/sentence_splitter.py +3 -4
  109. llama_cloud/types/supported_llm_model_names.py +0 -4
  110. llama_cloud/types/text_node.py +3 -9
  111. llama_cloud/types/token_text_splitter.py +2 -1
  112. llama_cloud/types/transformation_category_names.py +0 -4
  113. llama_cloud/types/user_organization.py +5 -9
  114. llama_cloud/types/user_organization_create.py +2 -2
  115. llama_cloud/types/user_organization_delete.py +2 -2
  116. llama_cloud/types/vertex_ai_embedding_config.py +2 -2
  117. llama_cloud/types/{extend_vertex_text_embedding.py → vertex_text_embedding.py} +10 -23
  118. {llama_cloud-0.0.17.dist-info → llama_cloud-0.1.1.dist-info}/METADATA +1 -1
  119. llama_cloud-0.1.1.dist-info/RECORD +224 -0
  120. llama_cloud/resources/auth/__init__.py +0 -2
  121. llama_cloud/resources/auth/client.py +0 -124
  122. llama_cloud/resources/data_sinks/types/data_sink_update_component_one.py +0 -23
  123. llama_cloud/resources/data_sources/types/data_source_update_component_one.py +0 -27
  124. llama_cloud/types/cloud_chroma_vector_store.py +0 -43
  125. llama_cloud/types/cloud_weaviate_vector_store.py +0 -41
  126. llama_cloud/types/configured_transformation_item_component_one.py +0 -35
  127. llama_cloud/types/custom_claims.py +0 -58
  128. llama_cloud/types/data_sink_component_one.py +0 -23
  129. llama_cloud/types/data_sink_create_component_one.py +0 -23
  130. llama_cloud/types/data_source_component_one.py +0 -27
  131. llama_cloud/types/data_source_create_component_one.py +0 -27
  132. llama_cloud/types/pipeline_data_source_component_one.py +0 -27
  133. llama_cloud/types/user.py +0 -35
  134. llama_cloud-0.0.17.dist-info/RECORD +0 -235
  135. {llama_cloud-0.0.17.dist-info → llama_cloud-0.1.1.dist-info}/LICENSE +0 -0
  136. {llama_cloud-0.0.17.dist-info → llama_cloud-0.1.1.dist-info}/WHEEL +0 -0
@@ -15,8 +15,8 @@ from .open_ai_embedding_config import OpenAiEmbeddingConfig
15
15
  from .vertex_ai_embedding_config import VertexAiEmbeddingConfig
16
16
 
17
17
 
18
- class PipelineCreateEmbeddingConfig_OpenaiEmbedding(OpenAiEmbeddingConfig):
19
- type: typing_extensions.Literal["OPENAI_EMBEDDING"]
18
+ class PipelineCreateEmbeddingConfig_AzureEmbedding(AzureOpenAiEmbeddingConfig):
19
+ type: typing_extensions.Literal["AZURE_EMBEDDING"]
20
20
 
21
21
  class Config:
22
22
  frozen = True
@@ -24,8 +24,8 @@ class PipelineCreateEmbeddingConfig_OpenaiEmbedding(OpenAiEmbeddingConfig):
24
24
  allow_population_by_field_name = True
25
25
 
26
26
 
27
- class PipelineCreateEmbeddingConfig_AzureEmbedding(AzureOpenAiEmbeddingConfig):
28
- type: typing_extensions.Literal["AZURE_EMBEDDING"]
27
+ class PipelineCreateEmbeddingConfig_BedrockEmbedding(BedrockEmbeddingConfig):
28
+ type: typing_extensions.Literal["BEDROCK_EMBEDDING"]
29
29
 
30
30
  class Config:
31
31
  frozen = True
@@ -33,8 +33,8 @@ class PipelineCreateEmbeddingConfig_AzureEmbedding(AzureOpenAiEmbeddingConfig):
33
33
  allow_population_by_field_name = True
34
34
 
35
35
 
36
- class PipelineCreateEmbeddingConfig_HuggingfaceApiEmbedding(HuggingFaceInferenceApiEmbeddingConfig):
37
- type: typing_extensions.Literal["HUGGINGFACE_API_EMBEDDING"]
36
+ class PipelineCreateEmbeddingConfig_CohereEmbedding(CohereEmbeddingConfig):
37
+ type: typing_extensions.Literal["COHERE_EMBEDDING"]
38
38
 
39
39
  class Config:
40
40
  frozen = True
@@ -42,8 +42,8 @@ class PipelineCreateEmbeddingConfig_HuggingfaceApiEmbedding(HuggingFaceInference
42
42
  allow_population_by_field_name = True
43
43
 
44
44
 
45
- class PipelineCreateEmbeddingConfig_BedrockEmbedding(BedrockEmbeddingConfig):
46
- type: typing_extensions.Literal["BEDROCK_EMBEDDING"]
45
+ class PipelineCreateEmbeddingConfig_GeminiEmbedding(GeminiEmbeddingConfig):
46
+ type: typing_extensions.Literal["GEMINI_EMBEDDING"]
47
47
 
48
48
  class Config:
49
49
  frozen = True
@@ -51,8 +51,8 @@ class PipelineCreateEmbeddingConfig_BedrockEmbedding(BedrockEmbeddingConfig):
51
51
  allow_population_by_field_name = True
52
52
 
53
53
 
54
- class PipelineCreateEmbeddingConfig_GeminiEmbedding(GeminiEmbeddingConfig):
55
- type: typing_extensions.Literal["GEMINI_EMBEDDING"]
54
+ class PipelineCreateEmbeddingConfig_HuggingfaceApiEmbedding(HuggingFaceInferenceApiEmbeddingConfig):
55
+ type: typing_extensions.Literal["HUGGINGFACE_API_EMBEDDING"]
56
56
 
57
57
  class Config:
58
58
  frozen = True
@@ -60,8 +60,8 @@ class PipelineCreateEmbeddingConfig_GeminiEmbedding(GeminiEmbeddingConfig):
60
60
  allow_population_by_field_name = True
61
61
 
62
62
 
63
- class PipelineCreateEmbeddingConfig_CohereEmbedding(CohereEmbeddingConfig):
64
- type: typing_extensions.Literal["COHERE_EMBEDDING"]
63
+ class PipelineCreateEmbeddingConfig_OpenaiEmbedding(OpenAiEmbeddingConfig):
64
+ type: typing_extensions.Literal["OPENAI_EMBEDDING"]
65
65
 
66
66
  class Config:
67
67
  frozen = True
@@ -79,11 +79,11 @@ class PipelineCreateEmbeddingConfig_VertexaiEmbedding(VertexAiEmbeddingConfig):
79
79
 
80
80
 
81
81
  PipelineCreateEmbeddingConfig = typing.Union[
82
- PipelineCreateEmbeddingConfig_OpenaiEmbedding,
83
82
  PipelineCreateEmbeddingConfig_AzureEmbedding,
84
- PipelineCreateEmbeddingConfig_HuggingfaceApiEmbedding,
85
83
  PipelineCreateEmbeddingConfig_BedrockEmbedding,
86
- PipelineCreateEmbeddingConfig_GeminiEmbedding,
87
84
  PipelineCreateEmbeddingConfig_CohereEmbedding,
85
+ PipelineCreateEmbeddingConfig_GeminiEmbedding,
86
+ PipelineCreateEmbeddingConfig_HuggingfaceApiEmbedding,
87
+ PipelineCreateEmbeddingConfig_OpenaiEmbedding,
88
88
  PipelineCreateEmbeddingConfig_VertexaiEmbedding,
89
89
  ]
@@ -1,31 +1,8 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- from __future__ import annotations
4
-
5
3
  import typing
6
4
 
7
- import typing_extensions
8
-
9
5
  from .advanced_mode_transform_config import AdvancedModeTransformConfig
10
6
  from .auto_transform_config import AutoTransformConfig
11
7
 
12
-
13
- class PipelineCreateTransformConfig_Auto(AutoTransformConfig):
14
- mode: typing_extensions.Literal["auto"]
15
-
16
- class Config:
17
- frozen = True
18
- smart_union = True
19
- allow_population_by_field_name = True
20
-
21
-
22
- class PipelineCreateTransformConfig_Advanced(AdvancedModeTransformConfig):
23
- mode: typing_extensions.Literal["advanced"]
24
-
25
- class Config:
26
- frozen = True
27
- smart_union = True
28
- allow_population_by_field_name = True
29
-
30
-
31
- PipelineCreateTransformConfig = typing.Union[PipelineCreateTransformConfig_Auto, PipelineCreateTransformConfig_Advanced]
8
+ PipelineCreateTransformConfig = typing.Union[AutoTransformConfig, AdvancedModeTransformConfig]
@@ -23,24 +23,18 @@ class PipelineDataSource(pydantic.BaseModel):
23
23
  """
24
24
 
25
25
  id: str = pydantic.Field(description="Unique identifier")
26
- created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
27
- updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
26
+ created_at: typing.Optional[dt.datetime]
27
+ updated_at: typing.Optional[dt.datetime]
28
28
  name: str = pydantic.Field(description="The name of the data source.")
29
29
  source_type: ConfigurableDataSourceNames
30
- custom_metadata: typing.Optional[typing.Dict[str, PipelineDataSourceCustomMetadataValue]] = pydantic.Field(
31
- description="Custom metadata that will be present on all data loaded from the data source"
32
- )
30
+ custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[PipelineDataSourceCustomMetadataValue]]]
33
31
  component: PipelineDataSourceComponent
34
32
  project_id: str
35
33
  data_source_id: str = pydantic.Field(description="The ID of the data source.")
36
34
  pipeline_id: str = pydantic.Field(description="The ID of the pipeline.")
37
35
  last_synced_at: dt.datetime = pydantic.Field(description="The last time the data source was automatically synced.")
38
- sync_interval: typing.Optional[float] = pydantic.Field(
39
- description="The interval at which the data source should be synced."
40
- )
41
- sync_schedule_set_by: typing.Optional[str] = pydantic.Field(
42
- description="The id of the user who set the sync schedule."
43
- )
36
+ sync_interval: typing.Optional[float]
37
+ sync_schedule_set_by: typing.Optional[str]
44
38
 
45
39
  def json(self, **kwargs: typing.Any) -> str:
46
40
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -2,6 +2,27 @@
2
2
 
3
3
  import typing
4
4
 
5
- from .pipeline_data_source_component_one import PipelineDataSourceComponentOne
5
+ from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
6
+ from .cloud_box_data_source import CloudBoxDataSource
7
+ from .cloud_confluence_data_source import CloudConfluenceDataSource
8
+ from .cloud_google_drive_data_source import CloudGoogleDriveDataSource
9
+ from .cloud_jira_data_source import CloudJiraDataSource
10
+ from .cloud_notion_page_data_source import CloudNotionPageDataSource
11
+ from .cloud_one_drive_data_source import CloudOneDriveDataSource
12
+ from .cloud_s_3_data_source import CloudS3DataSource
13
+ from .cloud_sharepoint_data_source import CloudSharepointDataSource
14
+ from .cloud_slack_data_source import CloudSlackDataSource
6
15
 
7
- PipelineDataSourceComponent = typing.Union[typing.Dict[str, typing.Any], PipelineDataSourceComponentOne]
16
+ PipelineDataSourceComponent = typing.Union[
17
+ typing.Dict[str, typing.Any],
18
+ CloudS3DataSource,
19
+ CloudAzStorageBlobDataSource,
20
+ CloudGoogleDriveDataSource,
21
+ CloudOneDriveDataSource,
22
+ CloudSharepointDataSource,
23
+ CloudSlackDataSource,
24
+ CloudNotionPageDataSource,
25
+ CloudConfluenceDataSource,
26
+ CloudJiraDataSource,
27
+ CloudBoxDataSource,
28
+ ]
@@ -20,9 +20,7 @@ class PipelineDataSourceCreate(pydantic.BaseModel):
20
20
  """
21
21
 
22
22
  data_source_id: str = pydantic.Field(description="The ID of the data source.")
23
- sync_interval: typing.Optional[float] = pydantic.Field(
24
- description="The interval at which the data source should be synced."
25
- )
23
+ sync_interval: typing.Optional[float]
26
24
 
27
25
  def json(self, **kwargs: typing.Any) -> str:
28
26
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -16,16 +16,12 @@ except ImportError:
16
16
 
17
17
 
18
18
  class PipelineDeployment(pydantic.BaseModel):
19
- """
20
- Base schema model containing common database fields.
21
- """
22
-
23
19
  id: str = pydantic.Field(description="Unique identifier")
24
- created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
25
- updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
20
+ created_at: typing.Optional[dt.datetime]
21
+ updated_at: typing.Optional[dt.datetime]
26
22
  status: ManagedIngestionStatus = pydantic.Field(description="Status of the pipeline deployment.")
27
- started_at: typing.Optional[dt.datetime] = pydantic.Field(description="Time the pipeline deployment started.")
28
- ended_at: typing.Optional[dt.datetime] = pydantic.Field(description="Time the pipeline deployment finished.")
23
+ started_at: typing.Optional[dt.datetime]
24
+ ended_at: typing.Optional[dt.datetime]
29
25
 
30
26
  def json(self, **kwargs: typing.Any) -> str:
31
27
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -15,8 +15,8 @@ from .open_ai_embedding_config import OpenAiEmbeddingConfig
15
15
  from .vertex_ai_embedding_config import VertexAiEmbeddingConfig
16
16
 
17
17
 
18
- class PipelineEmbeddingConfig_OpenaiEmbedding(OpenAiEmbeddingConfig):
19
- type: typing_extensions.Literal["OPENAI_EMBEDDING"]
18
+ class PipelineEmbeddingConfig_AzureEmbedding(AzureOpenAiEmbeddingConfig):
19
+ type: typing_extensions.Literal["AZURE_EMBEDDING"]
20
20
 
21
21
  class Config:
22
22
  frozen = True
@@ -24,8 +24,8 @@ class PipelineEmbeddingConfig_OpenaiEmbedding(OpenAiEmbeddingConfig):
24
24
  allow_population_by_field_name = True
25
25
 
26
26
 
27
- class PipelineEmbeddingConfig_AzureEmbedding(AzureOpenAiEmbeddingConfig):
28
- type: typing_extensions.Literal["AZURE_EMBEDDING"]
27
+ class PipelineEmbeddingConfig_BedrockEmbedding(BedrockEmbeddingConfig):
28
+ type: typing_extensions.Literal["BEDROCK_EMBEDDING"]
29
29
 
30
30
  class Config:
31
31
  frozen = True
@@ -33,8 +33,8 @@ class PipelineEmbeddingConfig_AzureEmbedding(AzureOpenAiEmbeddingConfig):
33
33
  allow_population_by_field_name = True
34
34
 
35
35
 
36
- class PipelineEmbeddingConfig_HuggingfaceApiEmbedding(HuggingFaceInferenceApiEmbeddingConfig):
37
- type: typing_extensions.Literal["HUGGINGFACE_API_EMBEDDING"]
36
+ class PipelineEmbeddingConfig_CohereEmbedding(CohereEmbeddingConfig):
37
+ type: typing_extensions.Literal["COHERE_EMBEDDING"]
38
38
 
39
39
  class Config:
40
40
  frozen = True
@@ -42,8 +42,8 @@ class PipelineEmbeddingConfig_HuggingfaceApiEmbedding(HuggingFaceInferenceApiEmb
42
42
  allow_population_by_field_name = True
43
43
 
44
44
 
45
- class PipelineEmbeddingConfig_BedrockEmbedding(BedrockEmbeddingConfig):
46
- type: typing_extensions.Literal["BEDROCK_EMBEDDING"]
45
+ class PipelineEmbeddingConfig_GeminiEmbedding(GeminiEmbeddingConfig):
46
+ type: typing_extensions.Literal["GEMINI_EMBEDDING"]
47
47
 
48
48
  class Config:
49
49
  frozen = True
@@ -51,8 +51,8 @@ class PipelineEmbeddingConfig_BedrockEmbedding(BedrockEmbeddingConfig):
51
51
  allow_population_by_field_name = True
52
52
 
53
53
 
54
- class PipelineEmbeddingConfig_GeminiEmbedding(GeminiEmbeddingConfig):
55
- type: typing_extensions.Literal["GEMINI_EMBEDDING"]
54
+ class PipelineEmbeddingConfig_HuggingfaceApiEmbedding(HuggingFaceInferenceApiEmbeddingConfig):
55
+ type: typing_extensions.Literal["HUGGINGFACE_API_EMBEDDING"]
56
56
 
57
57
  class Config:
58
58
  frozen = True
@@ -60,8 +60,8 @@ class PipelineEmbeddingConfig_GeminiEmbedding(GeminiEmbeddingConfig):
60
60
  allow_population_by_field_name = True
61
61
 
62
62
 
63
- class PipelineEmbeddingConfig_CohereEmbedding(CohereEmbeddingConfig):
64
- type: typing_extensions.Literal["COHERE_EMBEDDING"]
63
+ class PipelineEmbeddingConfig_OpenaiEmbedding(OpenAiEmbeddingConfig):
64
+ type: typing_extensions.Literal["OPENAI_EMBEDDING"]
65
65
 
66
66
  class Config:
67
67
  frozen = True
@@ -79,11 +79,11 @@ class PipelineEmbeddingConfig_VertexaiEmbedding(VertexAiEmbeddingConfig):
79
79
 
80
80
 
81
81
  PipelineEmbeddingConfig = typing.Union[
82
- PipelineEmbeddingConfig_OpenaiEmbedding,
83
82
  PipelineEmbeddingConfig_AzureEmbedding,
84
- PipelineEmbeddingConfig_HuggingfaceApiEmbedding,
85
83
  PipelineEmbeddingConfig_BedrockEmbedding,
86
- PipelineEmbeddingConfig_GeminiEmbedding,
87
84
  PipelineEmbeddingConfig_CohereEmbedding,
85
+ PipelineEmbeddingConfig_GeminiEmbedding,
86
+ PipelineEmbeddingConfig_HuggingfaceApiEmbedding,
87
+ PipelineEmbeddingConfig_OpenaiEmbedding,
88
88
  PipelineEmbeddingConfig_VertexaiEmbedding,
89
89
  ]
@@ -23,27 +23,19 @@ class PipelineFile(pydantic.BaseModel):
23
23
  """
24
24
 
25
25
  id: str = pydantic.Field(description="Unique identifier")
26
- created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
27
- updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
26
+ created_at: typing.Optional[dt.datetime]
27
+ updated_at: typing.Optional[dt.datetime]
28
28
  name: typing.Optional[str]
29
- file_size: typing.Optional[int] = pydantic.Field(description="Size of the file in bytes")
30
- file_type: typing.Optional[str] = pydantic.Field(description="File type (e.g. pdf, docx, etc.)")
29
+ file_size: typing.Optional[int]
30
+ file_type: typing.Optional[str]
31
31
  project_id: str = pydantic.Field(description="The ID of the project that the file belongs to")
32
- last_modified_at: typing.Optional[dt.datetime] = pydantic.Field(description="The last modified time of the file")
33
- resource_info: typing.Optional[typing.Dict[str, PipelineFileResourceInfoValue]] = pydantic.Field(
34
- description="Resource information for the file"
35
- )
36
- data_source_id: typing.Optional[str] = pydantic.Field(
37
- description="The ID of the data source that the file belongs to"
38
- )
39
- file_id: typing.Optional[str] = pydantic.Field(description="The ID of the file")
32
+ last_modified_at: typing.Optional[dt.datetime]
33
+ resource_info: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileResourceInfoValue]]]
34
+ data_source_id: typing.Optional[str]
35
+ file_id: typing.Optional[str]
40
36
  pipeline_id: str = pydantic.Field(description="The ID of the pipeline that the file is associated with")
41
- custom_metadata: typing.Optional[typing.Dict[str, PipelineFileCustomMetadataValue]] = pydantic.Field(
42
- description="Custom metadata for the file"
43
- )
44
- config_hash: typing.Optional[typing.Dict[str, PipelineFileConfigHashValue]] = pydantic.Field(
45
- description="Hashes for the configuration of the pipeline."
46
- )
37
+ custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileCustomMetadataValue]]]
38
+ config_hash: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileConfigHashValue]]]
47
39
 
48
40
  def json(self, **kwargs: typing.Any) -> str:
49
41
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -21,9 +21,7 @@ class PipelineFileCreate(pydantic.BaseModel):
21
21
  """
22
22
 
23
23
  file_id: str = pydantic.Field(description="The ID of the file")
24
- custom_metadata: typing.Optional[typing.Dict[str, PipelineFileCreateCustomMetadataValue]] = pydantic.Field(
25
- description="Custom metadata for the file"
26
- )
24
+ custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileCreateCustomMetadataValue]]]
27
25
 
28
26
  def json(self, **kwargs: typing.Any) -> str:
29
27
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -23,8 +23,8 @@ class PlaygroundSession(pydantic.BaseModel):
23
23
  """
24
24
 
25
25
  id: str = pydantic.Field(description="Unique identifier")
26
- created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
27
- updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
26
+ created_at: typing.Optional[dt.datetime]
27
+ updated_at: typing.Optional[dt.datetime]
28
28
  pipeline_id: str
29
29
  user_id: str
30
30
  llm_params_id: str
@@ -21,19 +21,16 @@ class PresetRetrievalParams(pydantic.BaseModel):
21
21
  Schema for the search params for an retrieval execution that can be preset for a pipeline.
22
22
  """
23
23
 
24
- dense_similarity_top_k: typing.Optional[int] = pydantic.Field(description="Number of nodes for dense retrieval.")
25
- sparse_similarity_top_k: typing.Optional[int] = pydantic.Field(description="Number of nodes for sparse retrieval.")
26
- enable_reranking: typing.Optional[bool] = pydantic.Field(description="Enable reranking for retrieval")
27
- rerank_top_n: typing.Optional[int] = pydantic.Field(description="Number of reranked nodes for returning.")
28
- alpha: typing.Optional[float] = pydantic.Field(
29
- description="Alpha value for hybrid retrieval to determine the weights between dense and sparse retrieval. 0 is sparse retrieval and 1 is dense retrieval."
30
- )
31
- search_filters: typing.Optional[MetadataFilters] = pydantic.Field(description="Search filters for retrieval.")
32
- files_top_k: typing.Optional[int] = pydantic.Field(
33
- description="Number of files to retrieve (only for retrieval mode files_via_metadata and files_via_content)."
34
- )
24
+ dense_similarity_top_k: typing.Optional[int]
25
+ sparse_similarity_top_k: typing.Optional[int]
26
+ enable_reranking: typing.Optional[bool]
27
+ rerank_top_n: typing.Optional[int]
28
+ alpha: typing.Optional[float]
29
+ search_filters: typing.Optional[MetadataFilters]
30
+ files_top_k: typing.Optional[int]
35
31
  retrieval_mode: typing.Optional[RetrievalMode] = pydantic.Field(description="The retrieval mode for the query.")
36
32
  retrieve_image_nodes: typing.Optional[bool] = pydantic.Field(description="Whether to retrieve image nodes.")
33
+ class_name: typing.Optional[str]
37
34
 
38
35
  def json(self, **kwargs: typing.Any) -> str:
39
36
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -21,9 +21,7 @@ class PresignedUrl(pydantic.BaseModel):
21
21
 
22
22
  url: str = pydantic.Field(description="A presigned URL for IO operations against a private file")
23
23
  expires_at: dt.datetime = pydantic.Field(description="The time at which the presigned URL expires")
24
- form_fields: typing.Optional[typing.Dict[str, str]] = pydantic.Field(
25
- description="Form fields for a presigned POST request"
26
- )
24
+ form_fields: typing.Optional[typing.Dict[str, typing.Optional[str]]]
27
25
 
28
26
  def json(self, **kwargs: typing.Any) -> str:
29
27
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -21,8 +21,8 @@ class Project(pydantic.BaseModel):
21
21
 
22
22
  name: str
23
23
  id: str = pydantic.Field(description="Unique identifier")
24
- created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
25
- updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
24
+ created_at: typing.Optional[dt.datetime]
25
+ updated_at: typing.Optional[dt.datetime]
26
26
  ad_hoc_eval_dataset_id: typing.Optional[str]
27
27
  organization_id: str = pydantic.Field(description="The Organization ID the project is under.")
28
28
  is_default: typing.Optional[bool] = pydantic.Field(
@@ -21,7 +21,7 @@ class PromptMixinPrompts(pydantic.BaseModel):
21
21
  """
22
22
 
23
23
  project_id: str = pydantic.Field(description="The ID of the project.")
24
- id: typing.Optional[str] = pydantic.Field(description="The ID of the prompt set.")
24
+ id: typing.Optional[str]
25
25
  name: str = pydantic.Field(description="The name of the prompt set.")
26
26
  prompts: typing.List[PromptSpec] = pydantic.Field(description="The prompts.")
27
27
 
@@ -19,10 +19,8 @@ class PromptSpec(pydantic.BaseModel):
19
19
  prompt_key: str = pydantic.Field(description="The key of the prompt in the PromptMixin.")
20
20
  prompt_class: str = pydantic.Field(description="The class of the prompt (PromptTemplate or ChatPromptTemplate).")
21
21
  prompt_type: str = pydantic.Field(description="The type of prompt.")
22
- template: typing.Optional[str] = pydantic.Field(description="The template of the prompt.")
23
- message_templates: typing.Optional[typing.List[ChatMessage]] = pydantic.Field(
24
- description="The chat message templates of the prompt."
25
- )
22
+ template: typing.Optional[str]
23
+ message_templates: typing.Optional[typing.List[ChatMessage]]
26
24
 
27
25
  def json(self, **kwargs: typing.Any) -> str:
28
26
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -16,10 +16,6 @@ except ImportError:
16
16
 
17
17
 
18
18
  class RelatedNodeInfo(pydantic.BaseModel):
19
- """
20
- Base component object to capture class names.
21
- """
22
-
23
19
  node_id: str
24
20
  node_type: typing.Optional[ObjectType]
25
21
  metadata: typing.Optional[typing.Dict[str, typing.Any]]
@@ -7,10 +7,6 @@ T_Result = typing.TypeVar("T_Result")
7
7
 
8
8
 
9
9
  class RetrievalMode(str, enum.Enum):
10
- """
11
- An enumeration.
12
- """
13
-
14
10
  CHUNKS = "chunks"
15
11
  FILES_VIA_METADATA = "files_via_metadata"
16
12
  FILES_VIA_CONTENT = "files_via_content"
@@ -27,14 +27,13 @@ class SentenceSplitter(pydantic.BaseModel):
27
27
  description="Whether or not to consider metadata when splitting."
28
28
  )
29
29
  include_prev_next_rel: typing.Optional[bool] = pydantic.Field(description="Include prev/next node relationships.")
30
- callback_manager: typing.Optional[typing.Dict[str, typing.Any]]
30
+ callback_manager: typing.Optional[typing.Any]
31
+ id_func: typing.Optional[str]
31
32
  chunk_size: typing.Optional[int] = pydantic.Field(description="The token chunk size for each chunk.")
32
33
  chunk_overlap: typing.Optional[int] = pydantic.Field(description="The token overlap of each chunk when splitting.")
33
34
  separator: typing.Optional[str] = pydantic.Field(description="Default separator for splitting into words")
34
35
  paragraph_separator: typing.Optional[str] = pydantic.Field(description="Separator between paragraphs.")
35
- secondary_chunking_regex: typing.Optional[str] = pydantic.Field(
36
- description="Backup regex for splitting into sentences."
37
- )
36
+ secondary_chunking_regex: typing.Optional[str]
38
37
  class_name: typing.Optional[str]
39
38
 
40
39
  def json(self, **kwargs: typing.Any) -> str:
@@ -7,10 +7,6 @@ T_Result = typing.TypeVar("T_Result")
7
7
 
8
8
 
9
9
  class SupportedLlmModelNames(str, enum.Enum):
10
- """
11
- An enumeration.
12
- """
13
-
14
10
  GPT_3_5_TURBO = "GPT_3_5_TURBO"
15
11
  GPT_4 = "GPT_4"
16
12
  GPT_4_TURBO = "GPT_4_TURBO"
@@ -16,14 +16,8 @@ except ImportError:
16
16
 
17
17
 
18
18
  class TextNode(pydantic.BaseModel):
19
- """
20
- Base node Object.
21
-
22
- Generic abstract interface for retrievable nodes
23
- """
24
-
25
19
  id: typing.Optional[str] = pydantic.Field(alias="id_", description="Unique ID of the node.")
26
- embedding: typing.Optional[typing.List[float]] = pydantic.Field(description="Embedding of the node.")
20
+ embedding: typing.Optional[typing.List[float]]
27
21
  extra_info: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field(
28
22
  description="A flat dictionary of metadata fields"
29
23
  )
@@ -38,8 +32,8 @@ class TextNode(pydantic.BaseModel):
38
32
  )
39
33
  text: typing.Optional[str] = pydantic.Field(description="Text content of the node.")
40
34
  mimetype: typing.Optional[str] = pydantic.Field(description="MIME type of the node content.")
41
- start_char_idx: typing.Optional[int] = pydantic.Field(description="Start char index of the node.")
42
- end_char_idx: typing.Optional[int] = pydantic.Field(description="End char index of the node.")
35
+ start_char_idx: typing.Optional[int]
36
+ end_char_idx: typing.Optional[int]
43
37
  text_template: typing.Optional[str] = pydantic.Field(
44
38
  description="Template for how text is formatted, with {content} and {metadata_str} placeholders."
45
39
  )
@@ -23,7 +23,8 @@ class TokenTextSplitter(pydantic.BaseModel):
23
23
  description="Whether or not to consider metadata when splitting."
24
24
  )
25
25
  include_prev_next_rel: typing.Optional[bool] = pydantic.Field(description="Include prev/next node relationships.")
26
- callback_manager: typing.Optional[typing.Dict[str, typing.Any]]
26
+ callback_manager: typing.Optional[typing.Any]
27
+ id_func: typing.Optional[str]
27
28
  chunk_size: typing.Optional[int] = pydantic.Field(description="The token chunk size for each chunk.")
28
29
  chunk_overlap: typing.Optional[int] = pydantic.Field(description="The token overlap of each chunk when splitting.")
29
30
  separator: typing.Optional[str] = pydantic.Field(description="Default separator for splitting into words")
@@ -7,10 +7,6 @@ T_Result = typing.TypeVar("T_Result")
7
7
 
8
8
 
9
9
  class TransformationCategoryNames(str, enum.Enum):
10
- """
11
- An enumeration.
12
- """
13
-
14
10
  NODE_PARSER = "NODE_PARSER"
15
11
  EMBEDDING = "EMBEDDING"
16
12
 
@@ -20,20 +20,16 @@ class UserOrganization(pydantic.BaseModel):
20
20
  """
21
21
 
22
22
  id: str = pydantic.Field(description="Unique identifier")
23
- created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
24
- updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
23
+ created_at: typing.Optional[dt.datetime]
24
+ updated_at: typing.Optional[dt.datetime]
25
25
  email: str = pydantic.Field(description="The user's email address.")
26
- user_id: typing.Optional[str] = pydantic.Field(description="The user's ID.")
26
+ user_id: typing.Optional[str]
27
27
  organization_id: str = pydantic.Field(description="The organization's ID.")
28
28
  pending: typing.Optional[bool] = pydantic.Field(
29
29
  description="Whether the user's membership is pending account signup."
30
30
  )
31
- invited_by_user_id: typing.Optional[str] = pydantic.Field(
32
- description="The user ID of the user who added the user to the organization."
33
- )
34
- invited_by_user_email: typing.Optional[str] = pydantic.Field(
35
- description="The email address of the user who added the user to the organization."
36
- )
31
+ invited_by_user_id: typing.Optional[str]
32
+ invited_by_user_email: typing.Optional[str]
37
33
 
38
34
  def json(self, **kwargs: typing.Any) -> str:
39
35
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -19,8 +19,8 @@ class UserOrganizationCreate(pydantic.BaseModel):
19
19
  Schema for creating a user's membership to an organization.
20
20
  """
21
21
 
22
- user_id: typing.Optional[str] = pydantic.Field(description="The user's ID.")
23
- email: typing.Optional[str] = pydantic.Field(description="The user's email address.")
22
+ user_id: typing.Optional[str]
23
+ email: typing.Optional[str]
24
24
 
25
25
  def json(self, **kwargs: typing.Any) -> str:
26
26
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -19,8 +19,8 @@ class UserOrganizationDelete(pydantic.BaseModel):
19
19
  Schema for deleting a user's membership to an organization.
20
20
  """
21
21
 
22
- user_id: typing.Optional[str] = pydantic.Field(description="The user's ID.")
23
- email: typing.Optional[str] = pydantic.Field(description="The user's email address.")
22
+ user_id: typing.Optional[str]
23
+ email: typing.Optional[str]
24
24
 
25
25
  def json(self, **kwargs: typing.Any) -> str:
26
26
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -4,7 +4,7 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
- from .extend_vertex_text_embedding import ExtendVertexTextEmbedding
7
+ from .vertex_text_embedding import VertexTextEmbedding
8
8
 
9
9
  try:
10
10
  import pydantic
@@ -16,7 +16,7 @@ except ImportError:
16
16
 
17
17
 
18
18
  class VertexAiEmbeddingConfig(pydantic.BaseModel):
19
- component: typing.Optional[ExtendVertexTextEmbedding] = pydantic.Field(
19
+ component: typing.Optional[VertexTextEmbedding] = pydantic.Field(
20
20
  description="Configuration for the VertexAI embedding model."
21
21
  )
22
22