llama-cloud 0.1.4__py3-none-any.whl → 0.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (117) hide show
  1. llama_cloud/__init__.py +76 -10
  2. llama_cloud/client.py +3 -0
  3. llama_cloud/environment.py +1 -1
  4. llama_cloud/resources/__init__.py +23 -1
  5. llama_cloud/resources/data_sinks/client.py +26 -20
  6. llama_cloud/resources/data_sources/client.py +16 -16
  7. llama_cloud/resources/embedding_model_configs/__init__.py +23 -0
  8. llama_cloud/resources/embedding_model_configs/client.py +416 -0
  9. llama_cloud/resources/embedding_model_configs/types/__init__.py +23 -0
  10. llama_cloud/resources/embedding_model_configs/types/embedding_model_config_create_embedding_config.py +89 -0
  11. llama_cloud/resources/evals/client.py +36 -26
  12. llama_cloud/resources/extraction/client.py +32 -32
  13. llama_cloud/resources/files/__init__.py +2 -2
  14. llama_cloud/resources/files/client.py +310 -54
  15. llama_cloud/resources/files/types/__init__.py +3 -1
  16. llama_cloud/resources/files/types/file_create_from_url_resource_info_value.py +7 -0
  17. llama_cloud/resources/files/types/file_create_permission_info_value.py +7 -0
  18. llama_cloud/resources/organizations/client.py +125 -56
  19. llama_cloud/resources/parsing/client.py +652 -264
  20. llama_cloud/resources/pipelines/client.py +617 -310
  21. llama_cloud/resources/projects/client.py +341 -136
  22. llama_cloud/types/__init__.py +58 -10
  23. llama_cloud/types/azure_open_ai_embedding.py +12 -6
  24. llama_cloud/types/base_prompt_template.py +6 -2
  25. llama_cloud/types/bedrock_embedding.py +12 -6
  26. llama_cloud/types/character_splitter.py +4 -2
  27. llama_cloud/types/chat_message.py +1 -1
  28. llama_cloud/types/cloud_az_storage_blob_data_source.py +16 -7
  29. llama_cloud/types/cloud_box_data_source.py +13 -6
  30. llama_cloud/types/cloud_confluence_data_source.py +7 -6
  31. llama_cloud/types/cloud_document.py +3 -1
  32. llama_cloud/types/cloud_document_create.py +3 -1
  33. llama_cloud/types/cloud_google_drive_data_source.py +1 -0
  34. llama_cloud/types/cloud_jira_data_source.py +7 -4
  35. llama_cloud/types/cloud_notion_page_data_source.py +3 -2
  36. llama_cloud/types/cloud_one_drive_data_source.py +6 -2
  37. llama_cloud/types/cloud_postgres_vector_store.py +1 -1
  38. llama_cloud/types/cloud_s_3_data_source.py +9 -4
  39. llama_cloud/types/cloud_sharepoint_data_source.py +9 -5
  40. llama_cloud/types/cloud_slack_data_source.py +7 -6
  41. llama_cloud/types/code_splitter.py +1 -1
  42. llama_cloud/types/cohere_embedding.py +7 -3
  43. llama_cloud/types/data_sink.py +4 -4
  44. llama_cloud/types/data_sink_create.py +1 -1
  45. llama_cloud/types/data_source.py +7 -5
  46. llama_cloud/types/data_source_create.py +4 -2
  47. llama_cloud/types/embedding_model_config.py +43 -0
  48. llama_cloud/types/embedding_model_config_embedding_config.py +89 -0
  49. llama_cloud/types/embedding_model_config_update.py +35 -0
  50. llama_cloud/types/embedding_model_config_update_embedding_config.py +89 -0
  51. llama_cloud/types/eval_dataset.py +2 -2
  52. llama_cloud/types/eval_dataset_job_record.py +13 -7
  53. llama_cloud/types/eval_execution_params_override.py +6 -2
  54. llama_cloud/types/eval_question.py +2 -2
  55. llama_cloud/types/extraction_result.py +2 -2
  56. llama_cloud/types/extraction_schema.py +5 -3
  57. llama_cloud/types/file.py +15 -7
  58. llama_cloud/types/file_permission_info_value.py +5 -0
  59. llama_cloud/types/filter_operator.py +2 -2
  60. llama_cloud/types/gemini_embedding.py +10 -6
  61. llama_cloud/types/hugging_face_inference_api_embedding.py +27 -11
  62. llama_cloud/types/input_message.py +3 -1
  63. llama_cloud/types/interval_usage_and_plan.py +36 -0
  64. llama_cloud/types/job_name_mapping.py +4 -0
  65. llama_cloud/types/llama_parse_parameters.py +21 -0
  66. llama_cloud/types/llm.py +4 -2
  67. llama_cloud/types/llm_parameters.py +5 -2
  68. llama_cloud/types/local_eval.py +10 -8
  69. llama_cloud/types/local_eval_results.py +1 -1
  70. llama_cloud/types/managed_ingestion_status_response.py +5 -3
  71. llama_cloud/types/markdown_element_node_parser.py +5 -3
  72. llama_cloud/types/markdown_node_parser.py +3 -2
  73. llama_cloud/types/metadata_filter.py +2 -2
  74. llama_cloud/types/metric_result.py +3 -3
  75. llama_cloud/types/node_parser.py +1 -1
  76. llama_cloud/types/open_ai_embedding.py +12 -6
  77. llama_cloud/types/organization.py +2 -2
  78. llama_cloud/types/page_splitter_node_parser.py +2 -2
  79. llama_cloud/types/paginated_list_pipeline_files_response.py +35 -0
  80. llama_cloud/types/parsing_job_structured_result.py +32 -0
  81. llama_cloud/types/permission.py +3 -3
  82. llama_cloud/types/pipeline.py +17 -6
  83. llama_cloud/types/pipeline_configuration_hashes.py +3 -3
  84. llama_cloud/types/pipeline_create.py +15 -4
  85. llama_cloud/types/pipeline_data_source.py +13 -7
  86. llama_cloud/types/pipeline_data_source_create.py +3 -1
  87. llama_cloud/types/pipeline_deployment.py +4 -4
  88. llama_cloud/types/pipeline_file.py +25 -10
  89. llama_cloud/types/pipeline_file_create.py +3 -1
  90. llama_cloud/types/pipeline_file_permission_info_value.py +7 -0
  91. llama_cloud/types/plan.py +40 -0
  92. llama_cloud/types/playground_session.py +2 -2
  93. llama_cloud/types/preset_retrieval_params.py +14 -7
  94. llama_cloud/types/presigned_url.py +3 -1
  95. llama_cloud/types/project.py +2 -2
  96. llama_cloud/types/prompt_mixin_prompts.py +1 -1
  97. llama_cloud/types/prompt_spec.py +4 -2
  98. llama_cloud/types/role.py +3 -3
  99. llama_cloud/types/sentence_splitter.py +4 -2
  100. llama_cloud/types/text_node.py +3 -3
  101. llama_cloud/types/{hugging_face_inference_api_embedding_token.py → token.py} +1 -1
  102. llama_cloud/types/token_text_splitter.py +1 -1
  103. llama_cloud/types/usage.py +41 -0
  104. llama_cloud/types/user_organization.py +9 -5
  105. llama_cloud/types/user_organization_create.py +4 -4
  106. llama_cloud/types/user_organization_delete.py +2 -2
  107. llama_cloud/types/user_organization_role.py +2 -2
  108. llama_cloud/types/value.py +5 -0
  109. llama_cloud/types/vertex_text_embedding.py +9 -5
  110. {llama_cloud-0.1.4.dist-info → llama_cloud-0.1.6.dist-info}/METADATA +1 -1
  111. {llama_cloud-0.1.4.dist-info → llama_cloud-0.1.6.dist-info}/RECORD +113 -99
  112. llama_cloud/types/data_sink_component.py +0 -20
  113. llama_cloud/types/data_source_component.py +0 -28
  114. llama_cloud/types/metadata_filter_value.py +0 -5
  115. llama_cloud/types/pipeline_data_source_component.py +0 -28
  116. {llama_cloud-0.1.4.dist-info → llama_cloud-0.1.6.dist-info}/LICENSE +0 -0
  117. {llama_cloud-0.1.4.dist-info → llama_cloud-0.1.6.dist-info}/WHEEL +0 -0
@@ -5,7 +5,7 @@ import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
7
  from .configurable_data_sink_names import ConfigurableDataSinkNames
8
- from .data_sink_component import DataSinkComponent
8
+ from .data_sink_create_component import DataSinkCreateComponent
9
9
 
10
10
  try:
11
11
  import pydantic
@@ -22,11 +22,11 @@ class DataSink(pydantic.BaseModel):
22
22
  """
23
23
 
24
24
  id: str = pydantic.Field(description="Unique identifier")
25
- created_at: typing.Optional[dt.datetime]
26
- updated_at: typing.Optional[dt.datetime]
25
+ created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
26
+ updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
27
27
  name: str = pydantic.Field(description="The name of the data sink.")
28
28
  sink_type: ConfigurableDataSinkNames
29
- component: DataSinkComponent
29
+ component: DataSinkCreateComponent = pydantic.Field(description="Component that implements the data sink")
30
30
  project_id: str
31
31
 
32
32
  def json(self, **kwargs: typing.Any) -> str:
@@ -23,7 +23,7 @@ class DataSinkCreate(pydantic.BaseModel):
23
23
 
24
24
  name: str = pydantic.Field(description="The name of the data sink.")
25
25
  sink_type: ConfigurableDataSinkNames
26
- component: DataSinkCreateComponent
26
+ component: DataSinkCreateComponent = pydantic.Field(description="Component that implements the data sink")
27
27
 
28
28
  def json(self, **kwargs: typing.Any) -> str:
29
29
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -5,7 +5,7 @@ import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
7
  from .configurable_data_source_names import ConfigurableDataSourceNames
8
- from .data_source_component import DataSourceComponent
8
+ from .data_source_create_component import DataSourceCreateComponent
9
9
  from .data_source_custom_metadata_value import DataSourceCustomMetadataValue
10
10
 
11
11
  try:
@@ -23,12 +23,14 @@ class DataSource(pydantic.BaseModel):
23
23
  """
24
24
 
25
25
  id: str = pydantic.Field(description="Unique identifier")
26
- created_at: typing.Optional[dt.datetime]
27
- updated_at: typing.Optional[dt.datetime]
26
+ created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
27
+ updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
28
28
  name: str = pydantic.Field(description="The name of the data source.")
29
29
  source_type: ConfigurableDataSourceNames
30
- custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[DataSourceCustomMetadataValue]]]
31
- component: DataSourceComponent
30
+ custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[DataSourceCustomMetadataValue]]] = pydantic.Field(
31
+ description="Custom metadata that will be present on all data loaded from the data source"
32
+ )
33
+ component: DataSourceCreateComponent = pydantic.Field(description="Component that implements the data source")
32
34
  project_id: str
33
35
 
34
36
  def json(self, **kwargs: typing.Any) -> str:
@@ -24,8 +24,10 @@ class DataSourceCreate(pydantic.BaseModel):
24
24
 
25
25
  name: str = pydantic.Field(description="The name of the data source.")
26
26
  source_type: ConfigurableDataSourceNames
27
- custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[DataSourceCreateCustomMetadataValue]]]
28
- component: DataSourceCreateComponent
27
+ custom_metadata: typing.Optional[
28
+ typing.Dict[str, typing.Optional[DataSourceCreateCustomMetadataValue]]
29
+ ] = pydantic.Field(description="Custom metadata that will be present on all data loaded from the data source")
30
+ component: DataSourceCreateComponent = pydantic.Field(description="Component that implements the data source")
29
31
 
30
32
  def json(self, **kwargs: typing.Any) -> str:
31
33
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -0,0 +1,43 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .embedding_model_config_embedding_config import EmbeddingModelConfigEmbeddingConfig
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class EmbeddingModelConfig(pydantic.BaseModel):
19
+ """
20
+ Schema for an embedding model config.
21
+ """
22
+
23
+ id: str = pydantic.Field(description="Unique identifier")
24
+ created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
25
+ updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
26
+ name: str = pydantic.Field(description="The name of the embedding model config.")
27
+ embedding_config: EmbeddingModelConfigEmbeddingConfig = pydantic.Field(
28
+ description="The embedding configuration for the embedding model config."
29
+ )
30
+ project_id: str
31
+
32
+ def json(self, **kwargs: typing.Any) -> str:
33
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
34
+ return super().json(**kwargs_with_defaults)
35
+
36
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
37
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
38
+ return super().dict(**kwargs_with_defaults)
39
+
40
+ class Config:
41
+ frozen = True
42
+ smart_union = True
43
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,89 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing
6
+
7
+ import typing_extensions
8
+
9
+ from .azure_open_ai_embedding_config import AzureOpenAiEmbeddingConfig
10
+ from .bedrock_embedding_config import BedrockEmbeddingConfig
11
+ from .cohere_embedding_config import CohereEmbeddingConfig
12
+ from .gemini_embedding_config import GeminiEmbeddingConfig
13
+ from .hugging_face_inference_api_embedding_config import HuggingFaceInferenceApiEmbeddingConfig
14
+ from .open_ai_embedding_config import OpenAiEmbeddingConfig
15
+ from .vertex_ai_embedding_config import VertexAiEmbeddingConfig
16
+
17
+
18
+ class EmbeddingModelConfigEmbeddingConfig_AzureEmbedding(AzureOpenAiEmbeddingConfig):
19
+ type: typing_extensions.Literal["AZURE_EMBEDDING"]
20
+
21
+ class Config:
22
+ frozen = True
23
+ smart_union = True
24
+ allow_population_by_field_name = True
25
+
26
+
27
+ class EmbeddingModelConfigEmbeddingConfig_BedrockEmbedding(BedrockEmbeddingConfig):
28
+ type: typing_extensions.Literal["BEDROCK_EMBEDDING"]
29
+
30
+ class Config:
31
+ frozen = True
32
+ smart_union = True
33
+ allow_population_by_field_name = True
34
+
35
+
36
+ class EmbeddingModelConfigEmbeddingConfig_CohereEmbedding(CohereEmbeddingConfig):
37
+ type: typing_extensions.Literal["COHERE_EMBEDDING"]
38
+
39
+ class Config:
40
+ frozen = True
41
+ smart_union = True
42
+ allow_population_by_field_name = True
43
+
44
+
45
+ class EmbeddingModelConfigEmbeddingConfig_GeminiEmbedding(GeminiEmbeddingConfig):
46
+ type: typing_extensions.Literal["GEMINI_EMBEDDING"]
47
+
48
+ class Config:
49
+ frozen = True
50
+ smart_union = True
51
+ allow_population_by_field_name = True
52
+
53
+
54
+ class EmbeddingModelConfigEmbeddingConfig_HuggingfaceApiEmbedding(HuggingFaceInferenceApiEmbeddingConfig):
55
+ type: typing_extensions.Literal["HUGGINGFACE_API_EMBEDDING"]
56
+
57
+ class Config:
58
+ frozen = True
59
+ smart_union = True
60
+ allow_population_by_field_name = True
61
+
62
+
63
+ class EmbeddingModelConfigEmbeddingConfig_OpenaiEmbedding(OpenAiEmbeddingConfig):
64
+ type: typing_extensions.Literal["OPENAI_EMBEDDING"]
65
+
66
+ class Config:
67
+ frozen = True
68
+ smart_union = True
69
+ allow_population_by_field_name = True
70
+
71
+
72
+ class EmbeddingModelConfigEmbeddingConfig_VertexaiEmbedding(VertexAiEmbeddingConfig):
73
+ type: typing_extensions.Literal["VERTEXAI_EMBEDDING"]
74
+
75
+ class Config:
76
+ frozen = True
77
+ smart_union = True
78
+ allow_population_by_field_name = True
79
+
80
+
81
+ EmbeddingModelConfigEmbeddingConfig = typing.Union[
82
+ EmbeddingModelConfigEmbeddingConfig_AzureEmbedding,
83
+ EmbeddingModelConfigEmbeddingConfig_BedrockEmbedding,
84
+ EmbeddingModelConfigEmbeddingConfig_CohereEmbedding,
85
+ EmbeddingModelConfigEmbeddingConfig_GeminiEmbedding,
86
+ EmbeddingModelConfigEmbeddingConfig_HuggingfaceApiEmbedding,
87
+ EmbeddingModelConfigEmbeddingConfig_OpenaiEmbedding,
88
+ EmbeddingModelConfigEmbeddingConfig_VertexaiEmbedding,
89
+ ]
@@ -0,0 +1,35 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .embedding_model_config_update_embedding_config import EmbeddingModelConfigUpdateEmbeddingConfig
8
+
9
+ try:
10
+ import pydantic
11
+ if pydantic.__version__.startswith("1."):
12
+ raise ImportError
13
+ import pydantic.v1 as pydantic # type: ignore
14
+ except ImportError:
15
+ import pydantic # type: ignore
16
+
17
+
18
+ class EmbeddingModelConfigUpdate(pydantic.BaseModel):
19
+ name: typing.Optional[str] = pydantic.Field(description="The name of the embedding model config.")
20
+ embedding_config: typing.Optional[EmbeddingModelConfigUpdateEmbeddingConfig] = pydantic.Field(
21
+ description="The embedding configuration for the embedding model config."
22
+ )
23
+
24
+ def json(self, **kwargs: typing.Any) -> str:
25
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
26
+ return super().json(**kwargs_with_defaults)
27
+
28
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
29
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
30
+ return super().dict(**kwargs_with_defaults)
31
+
32
+ class Config:
33
+ frozen = True
34
+ smart_union = True
35
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -0,0 +1,89 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing
6
+
7
+ import typing_extensions
8
+
9
+ from .azure_open_ai_embedding_config import AzureOpenAiEmbeddingConfig
10
+ from .bedrock_embedding_config import BedrockEmbeddingConfig
11
+ from .cohere_embedding_config import CohereEmbeddingConfig
12
+ from .gemini_embedding_config import GeminiEmbeddingConfig
13
+ from .hugging_face_inference_api_embedding_config import HuggingFaceInferenceApiEmbeddingConfig
14
+ from .open_ai_embedding_config import OpenAiEmbeddingConfig
15
+ from .vertex_ai_embedding_config import VertexAiEmbeddingConfig
16
+
17
+
18
+ class EmbeddingModelConfigUpdateEmbeddingConfig_AzureEmbedding(AzureOpenAiEmbeddingConfig):
19
+ type: typing_extensions.Literal["AZURE_EMBEDDING"]
20
+
21
+ class Config:
22
+ frozen = True
23
+ smart_union = True
24
+ allow_population_by_field_name = True
25
+
26
+
27
+ class EmbeddingModelConfigUpdateEmbeddingConfig_BedrockEmbedding(BedrockEmbeddingConfig):
28
+ type: typing_extensions.Literal["BEDROCK_EMBEDDING"]
29
+
30
+ class Config:
31
+ frozen = True
32
+ smart_union = True
33
+ allow_population_by_field_name = True
34
+
35
+
36
+ class EmbeddingModelConfigUpdateEmbeddingConfig_CohereEmbedding(CohereEmbeddingConfig):
37
+ type: typing_extensions.Literal["COHERE_EMBEDDING"]
38
+
39
+ class Config:
40
+ frozen = True
41
+ smart_union = True
42
+ allow_population_by_field_name = True
43
+
44
+
45
+ class EmbeddingModelConfigUpdateEmbeddingConfig_GeminiEmbedding(GeminiEmbeddingConfig):
46
+ type: typing_extensions.Literal["GEMINI_EMBEDDING"]
47
+
48
+ class Config:
49
+ frozen = True
50
+ smart_union = True
51
+ allow_population_by_field_name = True
52
+
53
+
54
+ class EmbeddingModelConfigUpdateEmbeddingConfig_HuggingfaceApiEmbedding(HuggingFaceInferenceApiEmbeddingConfig):
55
+ type: typing_extensions.Literal["HUGGINGFACE_API_EMBEDDING"]
56
+
57
+ class Config:
58
+ frozen = True
59
+ smart_union = True
60
+ allow_population_by_field_name = True
61
+
62
+
63
+ class EmbeddingModelConfigUpdateEmbeddingConfig_OpenaiEmbedding(OpenAiEmbeddingConfig):
64
+ type: typing_extensions.Literal["OPENAI_EMBEDDING"]
65
+
66
+ class Config:
67
+ frozen = True
68
+ smart_union = True
69
+ allow_population_by_field_name = True
70
+
71
+
72
+ class EmbeddingModelConfigUpdateEmbeddingConfig_VertexaiEmbedding(VertexAiEmbeddingConfig):
73
+ type: typing_extensions.Literal["VERTEXAI_EMBEDDING"]
74
+
75
+ class Config:
76
+ frozen = True
77
+ smart_union = True
78
+ allow_population_by_field_name = True
79
+
80
+
81
+ EmbeddingModelConfigUpdateEmbeddingConfig = typing.Union[
82
+ EmbeddingModelConfigUpdateEmbeddingConfig_AzureEmbedding,
83
+ EmbeddingModelConfigUpdateEmbeddingConfig_BedrockEmbedding,
84
+ EmbeddingModelConfigUpdateEmbeddingConfig_CohereEmbedding,
85
+ EmbeddingModelConfigUpdateEmbeddingConfig_GeminiEmbedding,
86
+ EmbeddingModelConfigUpdateEmbeddingConfig_HuggingfaceApiEmbedding,
87
+ EmbeddingModelConfigUpdateEmbeddingConfig_OpenaiEmbedding,
88
+ EmbeddingModelConfigUpdateEmbeddingConfig_VertexaiEmbedding,
89
+ ]
@@ -21,8 +21,8 @@ class EvalDataset(pydantic.BaseModel):
21
21
  """
22
22
 
23
23
  id: str = pydantic.Field(description="Unique identifier")
24
- created_at: typing.Optional[dt.datetime]
25
- updated_at: typing.Optional[dt.datetime]
24
+ created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
25
+ updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
26
26
  name: str = pydantic.Field(description="The name of the EvalDataset.")
27
27
  project_id: str
28
28
 
@@ -28,21 +28,27 @@ class EvalDatasetJobRecord(pydantic.BaseModel):
28
28
  partitions: typing.Dict[str, str] = pydantic.Field(
29
29
  description="The partitions for this execution. Used for determining where to save job output."
30
30
  )
31
- parameters: typing.Optional[EvalDatasetJobParams]
32
- session_id: typing.Optional[str]
33
- correlation_id: typing.Optional[str]
34
- parent_job_execution_id: typing.Optional[str]
35
- user_id: typing.Optional[str]
31
+ parameters: typing.Optional[EvalDatasetJobParams] = pydantic.Field(
32
+ description="Additional input parameters for the eval execution."
33
+ )
34
+ session_id: typing.Optional[str] = pydantic.Field(
35
+ description="The upstream request ID that created this job. Used for tracking the job across services."
36
+ )
37
+ correlation_id: typing.Optional[str] = pydantic.Field(
38
+ description="The correlation ID for this job. Used for tracking the job across services."
39
+ )
40
+ parent_job_execution_id: typing.Optional[str] = pydantic.Field(description="The ID of the parent job execution.")
41
+ user_id: typing.Optional[str] = pydantic.Field(description="The ID of the user that created this job")
36
42
  created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
37
43
  id: typing.Optional[str] = pydantic.Field(description="Unique identifier")
38
44
  status: StatusEnum
39
45
  error_code: typing.Optional[str]
40
46
  error_message: typing.Optional[str]
41
- attempts: typing.Optional[int]
47
+ attempts: typing.Optional[int] = pydantic.Field(description="The number of times this job has been attempted")
42
48
  started_at: typing.Optional[dt.datetime]
43
49
  ended_at: typing.Optional[dt.datetime]
44
50
  updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
45
- data: typing.Optional[Base]
51
+ data: typing.Optional[Base] = pydantic.Field(description="Additional metadata for the job execution.")
46
52
 
47
53
  def json(self, **kwargs: typing.Any) -> str:
48
54
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -20,8 +20,12 @@ class EvalExecutionParamsOverride(pydantic.BaseModel):
20
20
  Schema for the params override for an eval execution.
21
21
  """
22
22
 
23
- llm_model: typing.Optional[SupportedLlmModelNames]
24
- qa_prompt_tmpl: typing.Optional[str]
23
+ llm_model: typing.Optional[SupportedLlmModelNames] = pydantic.Field(
24
+ description="The LLM model to use within eval execution."
25
+ )
26
+ qa_prompt_tmpl: typing.Optional[str] = pydantic.Field(
27
+ description="The template to use for the question answering prompt."
28
+ )
25
29
 
26
30
  def json(self, **kwargs: typing.Any) -> str:
27
31
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -16,8 +16,8 @@ except ImportError:
16
16
 
17
17
  class EvalQuestion(pydantic.BaseModel):
18
18
  id: str = pydantic.Field(description="Unique identifier")
19
- created_at: typing.Optional[dt.datetime]
20
- updated_at: typing.Optional[dt.datetime]
19
+ created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
20
+ updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
21
21
  content: str = pydantic.Field(description="The content of the question.")
22
22
  eval_dataset_id: str
23
23
  eval_dataset_index: int = pydantic.Field(
@@ -22,8 +22,8 @@ class ExtractionResult(pydantic.BaseModel):
22
22
  """
23
23
 
24
24
  id: str = pydantic.Field(description="Unique identifier")
25
- created_at: typing.Optional[dt.datetime]
26
- updated_at: typing.Optional[dt.datetime]
25
+ created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
26
+ updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
27
27
  schema_id: str = pydantic.Field(description="The id of the schema")
28
28
  data: typing.Dict[str, typing.Optional[ExtractionResultDataValue]] = pydantic.Field(
29
29
  description="The data extracted from the file"
@@ -21,11 +21,13 @@ class ExtractionSchema(pydantic.BaseModel):
21
21
  """
22
22
 
23
23
  id: str = pydantic.Field(description="Unique identifier")
24
- created_at: typing.Optional[dt.datetime]
25
- updated_at: typing.Optional[dt.datetime]
24
+ created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
25
+ updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
26
26
  name: str = pydantic.Field(description="The name of the extraction schema")
27
27
  project_id: str = pydantic.Field(description="The ID of the project that the extraction schema belongs to")
28
- data_schema: typing.Optional[typing.Dict[str, typing.Optional[ExtractionSchemaDataSchemaValue]]]
28
+ data_schema: typing.Optional[typing.Dict[str, typing.Optional[ExtractionSchemaDataSchemaValue]]] = pydantic.Field(
29
+ description="The schema of the data"
30
+ )
29
31
 
30
32
  def json(self, **kwargs: typing.Any) -> str:
31
33
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
llama_cloud/types/file.py CHANGED
@@ -4,6 +4,7 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
+ from .file_permission_info_value import FilePermissionInfoValue
7
8
  from .file_resource_info_value import FileResourceInfoValue
8
9
 
9
10
  try:
@@ -21,15 +22,22 @@ class File(pydantic.BaseModel):
21
22
  """
22
23
 
23
24
  id: str = pydantic.Field(description="Unique identifier")
24
- created_at: typing.Optional[dt.datetime]
25
- updated_at: typing.Optional[dt.datetime]
25
+ created_at: typing.Optional[dt.datetime] = pydantic.Field(description="Creation datetime")
26
+ updated_at: typing.Optional[dt.datetime] = pydantic.Field(description="Update datetime")
26
27
  name: str
27
- file_size: typing.Optional[int]
28
- file_type: typing.Optional[str]
28
+ file_size: typing.Optional[int] = pydantic.Field(description="Size of the file in bytes")
29
+ file_type: typing.Optional[str] = pydantic.Field(description="File type (e.g. pdf, docx, etc.)")
29
30
  project_id: str = pydantic.Field(description="The ID of the project that the file belongs to")
30
- last_modified_at: typing.Optional[dt.datetime]
31
- resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileResourceInfoValue]]]
32
- data_source_id: typing.Optional[str]
31
+ last_modified_at: typing.Optional[dt.datetime] = pydantic.Field(description="The last modified time of the file")
32
+ resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileResourceInfoValue]]] = pydantic.Field(
33
+ description="Resource information for the file"
34
+ )
35
+ permission_info: typing.Optional[typing.Dict[str, typing.Optional[FilePermissionInfoValue]]] = pydantic.Field(
36
+ description="Permission information for the file"
37
+ )
38
+ data_source_id: typing.Optional[str] = pydantic.Field(
39
+ description="The ID of the data source that the file belongs to"
40
+ )
33
41
 
34
42
  def json(self, **kwargs: typing.Any) -> str:
35
43
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ FilePermissionInfoValue = typing.Union[typing.Dict[str, typing.Any], typing.List[typing.Any], str, int, float, bool]
@@ -36,7 +36,7 @@ class FilterOperator(str, enum.Enum):
36
36
  in_: typing.Callable[[], T_Result],
37
37
  nin: typing.Callable[[], T_Result],
38
38
  any: typing.Callable[[], T_Result],
39
- all: typing.Callable[[], T_Result],
39
+ all_: typing.Callable[[], T_Result],
40
40
  text_match: typing.Callable[[], T_Result],
41
41
  contains: typing.Callable[[], T_Result],
42
42
  is_empty: typing.Callable[[], T_Result],
@@ -60,7 +60,7 @@ class FilterOperator(str, enum.Enum):
60
60
  if self is FilterOperator.ANY:
61
61
  return any()
62
62
  if self is FilterOperator.ALL:
63
- return all()
63
+ return all_()
64
64
  if self is FilterOperator.TEXT_MATCH:
65
65
  return text_match()
66
66
  if self is FilterOperator.CONTAINS:
@@ -17,12 +17,16 @@ except ImportError:
17
17
  class GeminiEmbedding(pydantic.BaseModel):
18
18
  model_name: typing.Optional[str] = pydantic.Field(description="The modelId of the Gemini model to use.")
19
19
  embed_batch_size: typing.Optional[int] = pydantic.Field(description="The batch size for embedding calls.")
20
- num_workers: typing.Optional[int]
21
- title: typing.Optional[str]
22
- task_type: typing.Optional[str]
23
- api_key: typing.Optional[str]
24
- api_base: typing.Optional[str]
25
- transport: typing.Optional[str]
20
+ num_workers: typing.Optional[int] = pydantic.Field(
21
+ description="The number of workers to use for async embedding calls."
22
+ )
23
+ title: typing.Optional[str] = pydantic.Field(
24
+ description="Title is only applicable for retrieval_document tasks, and is used to represent a document title. For other tasks, title is invalid."
25
+ )
26
+ task_type: typing.Optional[str] = pydantic.Field(description="The task for embedding model.")
27
+ api_key: typing.Optional[str] = pydantic.Field(description="API key to access the model. Defaults to None.")
28
+ api_base: typing.Optional[str] = pydantic.Field(description="API base to access the model. Defaults to None.")
29
+ transport: typing.Optional[str] = pydantic.Field(description="Transport to access the model. Defaults to None.")
26
30
  class_name: typing.Optional[str]
27
31
 
28
32
  def json(self, **kwargs: typing.Any) -> str:
@@ -4,8 +4,8 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
- from .hugging_face_inference_api_embedding_token import HuggingFaceInferenceApiEmbeddingToken
8
7
  from .pooling import Pooling
8
+ from .token import Token
9
9
 
10
10
  try:
11
11
  import pydantic
@@ -17,19 +17,35 @@ except ImportError:
17
17
 
18
18
 
19
19
  class HuggingFaceInferenceApiEmbedding(pydantic.BaseModel):
20
- model_name: typing.Optional[str]
20
+ model_name: typing.Optional[str] = pydantic.Field(
21
+ description="Hugging Face model name. If None, the task will be used."
22
+ )
21
23
  embed_batch_size: typing.Optional[int] = pydantic.Field(description="The batch size for embedding calls.")
22
- num_workers: typing.Optional[int]
23
- pooling: typing.Optional[Pooling]
24
- query_instruction: typing.Optional[str]
25
- text_instruction: typing.Optional[str]
26
- token: typing.Optional[HuggingFaceInferenceApiEmbeddingToken] = pydantic.Field(
24
+ num_workers: typing.Optional[int] = pydantic.Field(
25
+ description="The number of workers to use for async embedding calls."
26
+ )
27
+ pooling: typing.Optional[Pooling] = pydantic.Field(
28
+ description="Pooling strategy. If None, the model's default pooling is used."
29
+ )
30
+ query_instruction: typing.Optional[str] = pydantic.Field(
31
+ description="Instruction to prepend during query embedding."
32
+ )
33
+ text_instruction: typing.Optional[str] = pydantic.Field(description="Instruction to prepend during text embedding.")
34
+ token: typing.Optional[Token] = pydantic.Field(
27
35
  description="Hugging Face token. Will default to the locally saved token. Pass token=False if you don’t want to send your token to the server."
28
36
  )
29
- timeout: typing.Optional[float]
30
- headers: typing.Optional[typing.Dict[str, typing.Optional[str]]]
31
- cookies: typing.Optional[typing.Dict[str, typing.Optional[str]]]
32
- task: typing.Optional[str]
37
+ timeout: typing.Optional[float] = pydantic.Field(
38
+ description="The maximum number of seconds to wait for a response from the server. Loading a new model in Inference API can take up to several minutes. Defaults to None, meaning it will loop until the server is available."
39
+ )
40
+ headers: typing.Optional[typing.Dict[str, typing.Optional[str]]] = pydantic.Field(
41
+ description="Additional headers to send to the server. By default only the authorization and user-agent headers are sent. Values in this dictionary will override the default values."
42
+ )
43
+ cookies: typing.Optional[typing.Dict[str, typing.Optional[str]]] = pydantic.Field(
44
+ description="Additional cookies to send to the server."
45
+ )
46
+ task: typing.Optional[str] = pydantic.Field(
47
+ description="Optional task to pick Hugging Face's recommended model, used when model_name is left as default of None."
48
+ )
33
49
  class_name: typing.Optional[str]
34
50
 
35
51
  def json(self, **kwargs: typing.Any) -> str:
@@ -23,7 +23,9 @@ class InputMessage(pydantic.BaseModel):
23
23
  id: str = pydantic.Field(description="ID of the message, if any. a UUID.")
24
24
  role: MessageRole
25
25
  content: str
26
- data: typing.Optional[typing.Dict[str, typing.Any]]
26
+ data: typing.Optional[typing.Dict[str, typing.Any]] = pydantic.Field(
27
+ description="Additional data to be stored with the message."
28
+ )
27
29
  class_name: typing.Optional[str]
28
30
 
29
31
  def json(self, **kwargs: typing.Any) -> str:
@@ -0,0 +1,36 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from .plan import Plan
8
+ from .usage import Usage
9
+
10
+ try:
11
+ import pydantic
12
+ if pydantic.__version__.startswith("1."):
13
+ raise ImportError
14
+ import pydantic.v1 as pydantic # type: ignore
15
+ except ImportError:
16
+ import pydantic # type: ignore
17
+
18
+
19
+ class IntervalUsageAndPlan(pydantic.BaseModel):
20
+ start_window: typing.Optional[dt.datetime]
21
+ end_window: typing.Optional[dt.datetime]
22
+ plan: typing.Optional[Plan]
23
+ usage: typing.Optional[Usage]
24
+
25
+ def json(self, **kwargs: typing.Any) -> str:
26
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
27
+ return super().json(**kwargs_with_defaults)
28
+
29
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
30
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
31
+ return super().dict(**kwargs_with_defaults)
32
+
33
+ class Config:
34
+ frozen = True
35
+ smart_union = True
36
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -18,6 +18,7 @@ class JobNameMapping(str, enum.Enum):
18
18
  PARSE = "PARSE"
19
19
  TRANSFORM = "TRANSFORM"
20
20
  INGESTION = "INGESTION"
21
+ METADATA_UPDATE = "METADATA_UPDATE"
21
22
 
22
23
  def visit(
23
24
  self,
@@ -28,6 +29,7 @@ class JobNameMapping(str, enum.Enum):
28
29
  parse: typing.Callable[[], T_Result],
29
30
  transform: typing.Callable[[], T_Result],
30
31
  ingestion: typing.Callable[[], T_Result],
32
+ metadata_update: typing.Callable[[], T_Result],
31
33
  ) -> T_Result:
32
34
  if self is JobNameMapping.MANAGED_INGESTION:
33
35
  return managed_ingestion()
@@ -43,3 +45,5 @@ class JobNameMapping(str, enum.Enum):
43
45
  return transform()
44
46
  if self is JobNameMapping.INGESTION:
45
47
  return ingestion()
48
+ if self is JobNameMapping.METADATA_UPDATE:
49
+ return metadata_update()