llama-cloud 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (173) hide show
  1. llama_cloud/__init__.py +295 -0
  2. llama_cloud/client.py +72 -0
  3. llama_cloud/core/__init__.py +17 -0
  4. llama_cloud/core/api_error.py +15 -0
  5. llama_cloud/core/client_wrapper.py +51 -0
  6. llama_cloud/core/datetime_utils.py +28 -0
  7. llama_cloud/core/jsonable_encoder.py +103 -0
  8. llama_cloud/core/remove_none_from_dict.py +11 -0
  9. llama_cloud/errors/__init__.py +5 -0
  10. llama_cloud/errors/unprocessable_entity_error.py +9 -0
  11. llama_cloud/resources/__init__.py +40 -0
  12. llama_cloud/resources/api_keys/__init__.py +2 -0
  13. llama_cloud/resources/api_keys/client.py +302 -0
  14. llama_cloud/resources/billing/__init__.py +2 -0
  15. llama_cloud/resources/billing/client.py +234 -0
  16. llama_cloud/resources/component_definitions/__init__.py +2 -0
  17. llama_cloud/resources/component_definitions/client.py +192 -0
  18. llama_cloud/resources/data_sinks/__init__.py +5 -0
  19. llama_cloud/resources/data_sinks/client.py +506 -0
  20. llama_cloud/resources/data_sinks/types/__init__.py +6 -0
  21. llama_cloud/resources/data_sinks/types/data_sink_update_component.py +7 -0
  22. llama_cloud/resources/data_sinks/types/data_sink_update_component_one.py +17 -0
  23. llama_cloud/resources/data_sources/__init__.py +5 -0
  24. llama_cloud/resources/data_sources/client.py +521 -0
  25. llama_cloud/resources/data_sources/types/__init__.py +7 -0
  26. llama_cloud/resources/data_sources/types/data_source_update_component.py +7 -0
  27. llama_cloud/resources/data_sources/types/data_source_update_component_one.py +19 -0
  28. llama_cloud/resources/data_sources/types/data_source_update_custom_metadata_value.py +7 -0
  29. llama_cloud/resources/deprecated/__init__.py +2 -0
  30. llama_cloud/resources/deprecated/client.py +982 -0
  31. llama_cloud/resources/evals/__init__.py +2 -0
  32. llama_cloud/resources/evals/client.py +745 -0
  33. llama_cloud/resources/files/__init__.py +5 -0
  34. llama_cloud/resources/files/client.py +560 -0
  35. llama_cloud/resources/files/types/__init__.py +5 -0
  36. llama_cloud/resources/files/types/file_create_resource_info_value.py +5 -0
  37. llama_cloud/resources/parsing/__init__.py +2 -0
  38. llama_cloud/resources/parsing/client.py +982 -0
  39. llama_cloud/resources/pipelines/__init__.py +5 -0
  40. llama_cloud/resources/pipelines/client.py +2599 -0
  41. llama_cloud/resources/pipelines/types/__init__.py +5 -0
  42. llama_cloud/resources/pipelines/types/pipeline_file_update_custom_metadata_value.py +7 -0
  43. llama_cloud/resources/projects/__init__.py +2 -0
  44. llama_cloud/resources/projects/client.py +1231 -0
  45. llama_cloud/types/__init__.py +253 -0
  46. llama_cloud/types/api_key.py +37 -0
  47. llama_cloud/types/azure_open_ai_embedding.py +75 -0
  48. llama_cloud/types/base.py +26 -0
  49. llama_cloud/types/base_prompt_template.py +44 -0
  50. llama_cloud/types/bedrock_embedding.py +56 -0
  51. llama_cloud/types/chat_message.py +35 -0
  52. llama_cloud/types/cloud_az_storage_blob_data_source.py +40 -0
  53. llama_cloud/types/cloud_chroma_vector_store.py +40 -0
  54. llama_cloud/types/cloud_document.py +36 -0
  55. llama_cloud/types/cloud_document_create.py +36 -0
  56. llama_cloud/types/cloud_gcs_data_source.py +37 -0
  57. llama_cloud/types/cloud_google_drive_data_source.py +36 -0
  58. llama_cloud/types/cloud_one_drive_data_source.py +38 -0
  59. llama_cloud/types/cloud_pinecone_vector_store.py +46 -0
  60. llama_cloud/types/cloud_postgres_vector_store.py +44 -0
  61. llama_cloud/types/cloud_qdrant_vector_store.py +48 -0
  62. llama_cloud/types/cloud_s_3_data_source.py +42 -0
  63. llama_cloud/types/cloud_sharepoint_data_source.py +38 -0
  64. llama_cloud/types/cloud_weaviate_vector_store.py +38 -0
  65. llama_cloud/types/code_splitter.py +46 -0
  66. llama_cloud/types/cohere_embedding.py +46 -0
  67. llama_cloud/types/configurable_data_sink_names.py +37 -0
  68. llama_cloud/types/configurable_data_source_names.py +41 -0
  69. llama_cloud/types/configurable_transformation_definition.py +45 -0
  70. llama_cloud/types/configurable_transformation_names.py +73 -0
  71. llama_cloud/types/configured_transformation_item.py +43 -0
  72. llama_cloud/types/configured_transformation_item_component.py +9 -0
  73. llama_cloud/types/configured_transformation_item_component_one.py +35 -0
  74. llama_cloud/types/data_sink.py +40 -0
  75. llama_cloud/types/data_sink_component.py +7 -0
  76. llama_cloud/types/data_sink_component_one.py +17 -0
  77. llama_cloud/types/data_sink_create.py +36 -0
  78. llama_cloud/types/data_sink_create_component.py +7 -0
  79. llama_cloud/types/data_sink_create_component_one.py +17 -0
  80. llama_cloud/types/data_sink_definition.py +41 -0
  81. llama_cloud/types/data_source.py +44 -0
  82. llama_cloud/types/data_source_component.py +7 -0
  83. llama_cloud/types/data_source_component_one.py +19 -0
  84. llama_cloud/types/data_source_create.py +40 -0
  85. llama_cloud/types/data_source_create_component.py +7 -0
  86. llama_cloud/types/data_source_create_component_one.py +19 -0
  87. llama_cloud/types/data_source_create_custom_metadata_value.py +7 -0
  88. llama_cloud/types/data_source_custom_metadata_value.py +7 -0
  89. llama_cloud/types/data_source_definition.py +41 -0
  90. llama_cloud/types/eval_dataset.py +37 -0
  91. llama_cloud/types/eval_dataset_job_params.py +36 -0
  92. llama_cloud/types/eval_dataset_job_record.py +59 -0
  93. llama_cloud/types/eval_execution_params.py +38 -0
  94. llama_cloud/types/eval_execution_params_override.py +38 -0
  95. llama_cloud/types/eval_llm_model_data.py +33 -0
  96. llama_cloud/types/eval_question.py +39 -0
  97. llama_cloud/types/eval_question_create.py +28 -0
  98. llama_cloud/types/eval_question_result.py +49 -0
  99. llama_cloud/types/file.py +46 -0
  100. llama_cloud/types/file_resource_info_value.py +5 -0
  101. llama_cloud/types/filter_condition.py +21 -0
  102. llama_cloud/types/filter_operator.py +65 -0
  103. llama_cloud/types/gemini_embedding.py +51 -0
  104. llama_cloud/types/html_node_parser.py +44 -0
  105. llama_cloud/types/http_validation_error.py +29 -0
  106. llama_cloud/types/hugging_face_inference_api_embedding.py +68 -0
  107. llama_cloud/types/hugging_face_inference_api_embedding_token.py +5 -0
  108. llama_cloud/types/json_node_parser.py +43 -0
  109. llama_cloud/types/llama_parse_supported_file_extensions.py +161 -0
  110. llama_cloud/types/llm.py +55 -0
  111. llama_cloud/types/local_eval.py +46 -0
  112. llama_cloud/types/local_eval_results.py +37 -0
  113. llama_cloud/types/local_eval_sets.py +30 -0
  114. llama_cloud/types/managed_ingestion_status.py +37 -0
  115. llama_cloud/types/markdown_element_node_parser.py +49 -0
  116. llama_cloud/types/markdown_node_parser.py +43 -0
  117. llama_cloud/types/message_role.py +45 -0
  118. llama_cloud/types/metadata_filter.py +41 -0
  119. llama_cloud/types/metadata_filter_value.py +5 -0
  120. llama_cloud/types/metadata_filters.py +41 -0
  121. llama_cloud/types/metadata_filters_filters_item.py +8 -0
  122. llama_cloud/types/metric_result.py +30 -0
  123. llama_cloud/types/node_parser.py +37 -0
  124. llama_cloud/types/object_type.py +33 -0
  125. llama_cloud/types/open_ai_embedding.py +73 -0
  126. llama_cloud/types/parser_languages.py +361 -0
  127. llama_cloud/types/parsing_history_item.py +36 -0
  128. llama_cloud/types/parsing_job.py +30 -0
  129. llama_cloud/types/parsing_job_json_result.py +29 -0
  130. llama_cloud/types/parsing_job_markdown_result.py +29 -0
  131. llama_cloud/types/parsing_job_text_result.py +29 -0
  132. llama_cloud/types/parsing_usage.py +29 -0
  133. llama_cloud/types/pipeline.py +64 -0
  134. llama_cloud/types/pipeline_create.py +61 -0
  135. llama_cloud/types/pipeline_data_source.py +46 -0
  136. llama_cloud/types/pipeline_data_source_component.py +7 -0
  137. llama_cloud/types/pipeline_data_source_component_one.py +19 -0
  138. llama_cloud/types/pipeline_data_source_create.py +32 -0
  139. llama_cloud/types/pipeline_data_source_custom_metadata_value.py +7 -0
  140. llama_cloud/types/pipeline_deployment.py +38 -0
  141. llama_cloud/types/pipeline_file.py +52 -0
  142. llama_cloud/types/pipeline_file_create.py +36 -0
  143. llama_cloud/types/pipeline_file_create_custom_metadata_value.py +7 -0
  144. llama_cloud/types/pipeline_file_custom_metadata_value.py +7 -0
  145. llama_cloud/types/pipeline_file_resource_info_value.py +7 -0
  146. llama_cloud/types/pipeline_file_status_response.py +35 -0
  147. llama_cloud/types/pipeline_type.py +21 -0
  148. llama_cloud/types/pooling.py +29 -0
  149. llama_cloud/types/preset_retrieval_params.py +40 -0
  150. llama_cloud/types/presigned_url.py +36 -0
  151. llama_cloud/types/project.py +42 -0
  152. llama_cloud/types/project_create.py +32 -0
  153. llama_cloud/types/prompt_mixin_prompts.py +36 -0
  154. llama_cloud/types/prompt_spec.py +35 -0
  155. llama_cloud/types/pydantic_program_mode.py +41 -0
  156. llama_cloud/types/related_node_info.py +37 -0
  157. llama_cloud/types/retrieve_results.py +40 -0
  158. llama_cloud/types/sentence_splitter.py +48 -0
  159. llama_cloud/types/simple_file_node_parser.py +44 -0
  160. llama_cloud/types/status_enum.py +33 -0
  161. llama_cloud/types/supported_eval_llm_model.py +35 -0
  162. llama_cloud/types/supported_eval_llm_model_names.py +29 -0
  163. llama_cloud/types/text_node.py +62 -0
  164. llama_cloud/types/text_node_relationships_value.py +7 -0
  165. llama_cloud/types/text_node_with_score.py +36 -0
  166. llama_cloud/types/token_text_splitter.py +43 -0
  167. llama_cloud/types/transformation_category_names.py +21 -0
  168. llama_cloud/types/validation_error.py +31 -0
  169. llama_cloud/types/validation_error_loc_item.py +5 -0
  170. llama_cloud-0.0.1.dist-info/LICENSE +21 -0
  171. llama_cloud-0.0.1.dist-info/METADATA +25 -0
  172. llama_cloud-0.0.1.dist-info/RECORD +173 -0
  173. llama_cloud-0.0.1.dist-info/WHEEL +4 -0
@@ -0,0 +1,2599 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+ import urllib.parse
5
+ from json.decoder import JSONDecodeError
6
+
7
+ from ...core.api_error import ApiError
8
+ from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
9
+ from ...core.jsonable_encoder import jsonable_encoder
10
+ from ...core.remove_none_from_dict import remove_none_from_dict
11
+ from ...errors.unprocessable_entity_error import UnprocessableEntityError
12
+ from ...types.cloud_document import CloudDocument
13
+ from ...types.cloud_document_create import CloudDocumentCreate
14
+ from ...types.configured_transformation_item import ConfiguredTransformationItem
15
+ from ...types.data_sink_create import DataSinkCreate
16
+ from ...types.eval_dataset_job_record import EvalDatasetJobRecord
17
+ from ...types.eval_execution_params import EvalExecutionParams
18
+ from ...types.eval_execution_params_override import EvalExecutionParamsOverride
19
+ from ...types.eval_question_result import EvalQuestionResult
20
+ from ...types.http_validation_error import HttpValidationError
21
+ from ...types.metadata_filters import MetadataFilters
22
+ from ...types.pipeline import Pipeline
23
+ from ...types.pipeline_create import PipelineCreate
24
+ from ...types.pipeline_data_source import PipelineDataSource
25
+ from ...types.pipeline_data_source_create import PipelineDataSourceCreate
26
+ from ...types.pipeline_deployment import PipelineDeployment
27
+ from ...types.pipeline_file import PipelineFile
28
+ from ...types.pipeline_file_create import PipelineFileCreate
29
+ from ...types.pipeline_file_status_response import PipelineFileStatusResponse
30
+ from ...types.pipeline_type import PipelineType
31
+ from ...types.preset_retrieval_params import PresetRetrievalParams
32
+ from ...types.retrieve_results import RetrieveResults
33
+ from .types.pipeline_file_update_custom_metadata_value import PipelineFileUpdateCustomMetadataValue
34
+
35
+ try:
36
+ import pydantic.v1 as pydantic # type: ignore
37
+ except ImportError:
38
+ import pydantic # type: ignore
39
+
40
+ # this is used as the default value for optional parameters
41
+ OMIT = typing.cast(typing.Any, ...)
42
+
43
+
44
+ class PipelinesClient:
45
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
46
+ self._client_wrapper = client_wrapper
47
+
48
+ def search_pipelines(
49
+ self,
50
+ *,
51
+ project_name: str,
52
+ pipeline_name: typing.Optional[str] = None,
53
+ pipeline_type: typing.Optional[PipelineType] = None,
54
+ ) -> typing.List[Pipeline]:
55
+ """
56
+ Search for pipelines by various parameters.
57
+
58
+ Parameters:
59
+ - project_name: str.
60
+
61
+ - pipeline_name: typing.Optional[str].
62
+
63
+ - pipeline_type: typing.Optional[PipelineType].
64
+ ---
65
+ from platform import PipelineType
66
+ from platform.client import PlatformApi
67
+
68
+ client = PlatformApi(
69
+ token="YOUR_TOKEN",
70
+ base_url="https://yourhost.com/path/to/api",
71
+ )
72
+ client.pipelines.search_pipelines(
73
+ project_name="string",
74
+ pipeline_type=PipelineType.PLAYGROUND,
75
+ )
76
+ """
77
+ _response = self._client_wrapper.httpx_client.request(
78
+ "GET",
79
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/pipelines"),
80
+ params=remove_none_from_dict(
81
+ {"project_name": project_name, "pipeline_name": pipeline_name, "pipeline_type": pipeline_type}
82
+ ),
83
+ headers=self._client_wrapper.get_headers(),
84
+ timeout=60,
85
+ )
86
+ if 200 <= _response.status_code < 300:
87
+ return pydantic.parse_obj_as(typing.List[Pipeline], _response.json()) # type: ignore
88
+ if _response.status_code == 422:
89
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
90
+ try:
91
+ _response_json = _response.json()
92
+ except JSONDecodeError:
93
+ raise ApiError(status_code=_response.status_code, body=_response.text)
94
+ raise ApiError(status_code=_response.status_code, body=_response_json)
95
+
96
+ def create_pipeline(self, *, project_id: typing.Optional[str] = None, request: PipelineCreate) -> Pipeline:
97
+ """
98
+ Create a new pipeline for a project.
99
+
100
+ Parameters:
101
+ - project_id: typing.Optional[str].
102
+
103
+ - request: PipelineCreate.
104
+ ---
105
+ from platform import (
106
+ ConfigurableDataSinkNames,
107
+ DataSinkCreate,
108
+ EvalExecutionParams,
109
+ FilterCondition,
110
+ MetadataFilters,
111
+ PipelineCreate,
112
+ PipelineType,
113
+ PresetRetrievalParams,
114
+ SupportedEvalLlmModelNames,
115
+ )
116
+ from platform.client import PlatformApi
117
+
118
+ client = PlatformApi(
119
+ token="YOUR_TOKEN",
120
+ base_url="https://yourhost.com/path/to/api",
121
+ )
122
+ client.pipelines.create_pipeline(
123
+ request=PipelineCreate(
124
+ data_sink=DataSinkCreate(
125
+ name="string",
126
+ sink_type=ConfigurableDataSinkNames.CHROMA,
127
+ ),
128
+ preset_retrieval_parameters=PresetRetrievalParams(
129
+ search_filters=MetadataFilters(
130
+ filters=[],
131
+ condition=FilterCondition.AND,
132
+ ),
133
+ ),
134
+ eval_parameters=EvalExecutionParams(
135
+ llm_model=SupportedEvalLlmModelNames.GPT_3_5_TURBO,
136
+ ),
137
+ name="string",
138
+ pipeline_type=PipelineType.PLAYGROUND,
139
+ ),
140
+ )
141
+ """
142
+ _response = self._client_wrapper.httpx_client.request(
143
+ "POST",
144
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/pipelines"),
145
+ params=remove_none_from_dict({"project_id": project_id}),
146
+ json=jsonable_encoder(request),
147
+ headers=self._client_wrapper.get_headers(),
148
+ timeout=60,
149
+ )
150
+ if 200 <= _response.status_code < 300:
151
+ return pydantic.parse_obj_as(Pipeline, _response.json()) # type: ignore
152
+ if _response.status_code == 422:
153
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
154
+ try:
155
+ _response_json = _response.json()
156
+ except JSONDecodeError:
157
+ raise ApiError(status_code=_response.status_code, body=_response.text)
158
+ raise ApiError(status_code=_response.status_code, body=_response_json)
159
+
160
+ def upsert_pipeline(self, *, project_id: typing.Optional[str] = None, request: PipelineCreate) -> Pipeline:
161
+ """
162
+ Upsert a pipeline for a project.
163
+ Updates if a pipeline with the same name and project_id already exists. Otherwise, creates a new pipeline.
164
+
165
+ Parameters:
166
+ - project_id: typing.Optional[str].
167
+
168
+ - request: PipelineCreate.
169
+ ---
170
+ from platform import (
171
+ ConfigurableDataSinkNames,
172
+ DataSinkCreate,
173
+ EvalExecutionParams,
174
+ FilterCondition,
175
+ MetadataFilters,
176
+ PipelineCreate,
177
+ PipelineType,
178
+ PresetRetrievalParams,
179
+ SupportedEvalLlmModelNames,
180
+ )
181
+ from platform.client import PlatformApi
182
+
183
+ client = PlatformApi(
184
+ token="YOUR_TOKEN",
185
+ base_url="https://yourhost.com/path/to/api",
186
+ )
187
+ client.pipelines.upsert_pipeline(
188
+ request=PipelineCreate(
189
+ data_sink=DataSinkCreate(
190
+ name="string",
191
+ sink_type=ConfigurableDataSinkNames.CHROMA,
192
+ ),
193
+ preset_retrieval_parameters=PresetRetrievalParams(
194
+ search_filters=MetadataFilters(
195
+ filters=[],
196
+ condition=FilterCondition.AND,
197
+ ),
198
+ ),
199
+ eval_parameters=EvalExecutionParams(
200
+ llm_model=SupportedEvalLlmModelNames.GPT_3_5_TURBO,
201
+ ),
202
+ name="string",
203
+ pipeline_type=PipelineType.PLAYGROUND,
204
+ ),
205
+ )
206
+ """
207
+ _response = self._client_wrapper.httpx_client.request(
208
+ "PUT",
209
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/pipelines"),
210
+ params=remove_none_from_dict({"project_id": project_id}),
211
+ json=jsonable_encoder(request),
212
+ headers=self._client_wrapper.get_headers(),
213
+ timeout=60,
214
+ )
215
+ if 200 <= _response.status_code < 300:
216
+ return pydantic.parse_obj_as(Pipeline, _response.json()) # type: ignore
217
+ if _response.status_code == 422:
218
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
219
+ try:
220
+ _response_json = _response.json()
221
+ except JSONDecodeError:
222
+ raise ApiError(status_code=_response.status_code, body=_response.text)
223
+ raise ApiError(status_code=_response.status_code, body=_response_json)
224
+
225
+ def get_pipeline(
226
+ self, pipeline_id: str, *, with_managed_ingestion_status: typing.Optional[bool] = None
227
+ ) -> Pipeline:
228
+ """
229
+ Get a pipeline by ID for a given project.
230
+
231
+ Parameters:
232
+ - pipeline_id: str.
233
+
234
+ - with_managed_ingestion_status: typing.Optional[bool].
235
+ ---
236
+ from platform.client import PlatformApi
237
+
238
+ client = PlatformApi(
239
+ token="YOUR_TOKEN",
240
+ base_url="https://yourhost.com/path/to/api",
241
+ )
242
+ client.pipelines.get_pipeline(
243
+ pipeline_id="string",
244
+ )
245
+ """
246
+ _response = self._client_wrapper.httpx_client.request(
247
+ "GET",
248
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}"),
249
+ params=remove_none_from_dict({"with_managed_ingestion_status": with_managed_ingestion_status}),
250
+ headers=self._client_wrapper.get_headers(),
251
+ timeout=60,
252
+ )
253
+ if 200 <= _response.status_code < 300:
254
+ return pydantic.parse_obj_as(Pipeline, _response.json()) # type: ignore
255
+ if _response.status_code == 422:
256
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
257
+ try:
258
+ _response_json = _response.json()
259
+ except JSONDecodeError:
260
+ raise ApiError(status_code=_response.status_code, body=_response.text)
261
+ raise ApiError(status_code=_response.status_code, body=_response_json)
262
+
263
+ def update_existing_pipeline(
264
+ self,
265
+ pipeline_id: str,
266
+ *,
267
+ configured_transformations: typing.Optional[typing.List[ConfiguredTransformationItem]] = OMIT,
268
+ data_sink_id: typing.Optional[str] = OMIT,
269
+ data_sink: typing.Optional[DataSinkCreate] = OMIT,
270
+ preset_retrieval_parameters: typing.Optional[PresetRetrievalParams] = OMIT,
271
+ eval_parameters: typing.Optional[EvalExecutionParams] = OMIT,
272
+ llama_parse_enabled: typing.Optional[bool] = OMIT,
273
+ name: typing.Optional[str] = OMIT,
274
+ managed_pipeline_id: typing.Optional[str] = OMIT,
275
+ ) -> Pipeline:
276
+ """
277
+ Update an existing pipeline for a project.
278
+
279
+ Parameters:
280
+ - pipeline_id: str.
281
+
282
+ - configured_transformations: typing.Optional[typing.List[ConfiguredTransformationItem]].
283
+
284
+ - data_sink_id: typing.Optional[str]. Data sink ID. When provided instead of data_sink, the data sink will be looked up by ID.
285
+
286
+ - data_sink: typing.Optional[DataSinkCreate]. Data sink. When provided instead of data_sink_id, the data sink will be created.
287
+
288
+ - preset_retrieval_parameters: typing.Optional[PresetRetrievalParams]. Preset retrieval parameters for the pipeline.
289
+
290
+ - eval_parameters: typing.Optional[EvalExecutionParams]. Eval parameters for the pipeline.
291
+
292
+ - llama_parse_enabled: typing.Optional[bool]. Whether to use LlamaParse during pipeline execution.
293
+
294
+ - name: typing.Optional[str].
295
+
296
+ - managed_pipeline_id: typing.Optional[str]. The ID of the ManagedPipeline this playground pipeline is linked to.
297
+ ---
298
+ from platform import (
299
+ ConfigurableDataSinkNames,
300
+ DataSinkCreate,
301
+ EvalExecutionParams,
302
+ FilterCondition,
303
+ MetadataFilters,
304
+ PresetRetrievalParams,
305
+ SupportedEvalLlmModelNames,
306
+ )
307
+ from platform.client import PlatformApi
308
+
309
+ client = PlatformApi(
310
+ token="YOUR_TOKEN",
311
+ base_url="https://yourhost.com/path/to/api",
312
+ )
313
+ client.pipelines.update_existing_pipeline(
314
+ pipeline_id="string",
315
+ data_sink=DataSinkCreate(
316
+ name="string",
317
+ sink_type=ConfigurableDataSinkNames.CHROMA,
318
+ ),
319
+ preset_retrieval_parameters=PresetRetrievalParams(
320
+ search_filters=MetadataFilters(
321
+ filters=[],
322
+ condition=FilterCondition.AND,
323
+ ),
324
+ ),
325
+ eval_parameters=EvalExecutionParams(
326
+ llm_model=SupportedEvalLlmModelNames.GPT_3_5_TURBO,
327
+ ),
328
+ )
329
+ """
330
+ _request: typing.Dict[str, typing.Any] = {}
331
+ if configured_transformations is not OMIT:
332
+ _request["configured_transformations"] = configured_transformations
333
+ if data_sink_id is not OMIT:
334
+ _request["data_sink_id"] = data_sink_id
335
+ if data_sink is not OMIT:
336
+ _request["data_sink"] = data_sink
337
+ if preset_retrieval_parameters is not OMIT:
338
+ _request["preset_retrieval_parameters"] = preset_retrieval_parameters
339
+ if eval_parameters is not OMIT:
340
+ _request["eval_parameters"] = eval_parameters
341
+ if llama_parse_enabled is not OMIT:
342
+ _request["llama_parse_enabled"] = llama_parse_enabled
343
+ if name is not OMIT:
344
+ _request["name"] = name
345
+ if managed_pipeline_id is not OMIT:
346
+ _request["managed_pipeline_id"] = managed_pipeline_id
347
+ _response = self._client_wrapper.httpx_client.request(
348
+ "PUT",
349
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}"),
350
+ json=jsonable_encoder(_request),
351
+ headers=self._client_wrapper.get_headers(),
352
+ timeout=60,
353
+ )
354
+ if 200 <= _response.status_code < 300:
355
+ return pydantic.parse_obj_as(Pipeline, _response.json()) # type: ignore
356
+ if _response.status_code == 422:
357
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
358
+ try:
359
+ _response_json = _response.json()
360
+ except JSONDecodeError:
361
+ raise ApiError(status_code=_response.status_code, body=_response.text)
362
+ raise ApiError(status_code=_response.status_code, body=_response_json)
363
+
364
+ def delete_pipeline(self, pipeline_id: str) -> None:
365
+ """
366
+ Delete a pipeline by ID.
367
+
368
+ Parameters:
369
+ - pipeline_id: str.
370
+ ---
371
+ from platform.client import PlatformApi
372
+
373
+ client = PlatformApi(
374
+ token="YOUR_TOKEN",
375
+ base_url="https://yourhost.com/path/to/api",
376
+ )
377
+ client.pipelines.delete_pipeline(
378
+ pipeline_id="string",
379
+ )
380
+ """
381
+ _response = self._client_wrapper.httpx_client.request(
382
+ "DELETE",
383
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}"),
384
+ headers=self._client_wrapper.get_headers(),
385
+ timeout=60,
386
+ )
387
+ if 200 <= _response.status_code < 300:
388
+ return
389
+ if _response.status_code == 422:
390
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
391
+ try:
392
+ _response_json = _response.json()
393
+ except JSONDecodeError:
394
+ raise ApiError(status_code=_response.status_code, body=_response.text)
395
+ raise ApiError(status_code=_response.status_code, body=_response_json)
396
+
397
+ def sync_pipeline(self, pipeline_id: str) -> Pipeline:
398
+ """
399
+ Run ingestion for the pipeline by incrementally updating the data-sink with upstream changes from data-sources & files.
400
+
401
+ Parameters:
402
+ - pipeline_id: str.
403
+ ---
404
+ from platform.client import PlatformApi
405
+
406
+ client = PlatformApi(
407
+ token="YOUR_TOKEN",
408
+ base_url="https://yourhost.com/path/to/api",
409
+ )
410
+ client.pipelines.sync_pipeline(
411
+ pipeline_id="string",
412
+ )
413
+ """
414
+ _response = self._client_wrapper.httpx_client.request(
415
+ "POST",
416
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/sync"),
417
+ headers=self._client_wrapper.get_headers(),
418
+ timeout=60,
419
+ )
420
+ if 200 <= _response.status_code < 300:
421
+ return pydantic.parse_obj_as(Pipeline, _response.json()) # type: ignore
422
+ if _response.status_code == 422:
423
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
424
+ try:
425
+ _response_json = _response.json()
426
+ except JSONDecodeError:
427
+ raise ApiError(status_code=_response.status_code, body=_response.text)
428
+ raise ApiError(status_code=_response.status_code, body=_response_json)
429
+
430
+ def get_eval_dataset_executions(self, pipeline_id: str, eval_dataset_id: str) -> typing.List[EvalDatasetJobRecord]:
431
+ """
432
+ Get the status of an EvalDatasetExecution.
433
+
434
+ Parameters:
435
+ - pipeline_id: str.
436
+
437
+ - eval_dataset_id: str.
438
+ ---
439
+ from platform.client import PlatformApi
440
+
441
+ client = PlatformApi(
442
+ token="YOUR_TOKEN",
443
+ base_url="https://yourhost.com/path/to/api",
444
+ )
445
+ client.pipelines.get_eval_dataset_executions(
446
+ pipeline_id="string",
447
+ eval_dataset_id="string",
448
+ )
449
+ """
450
+ _response = self._client_wrapper.httpx_client.request(
451
+ "GET",
452
+ urllib.parse.urljoin(
453
+ f"{self._client_wrapper.get_base_url()}/",
454
+ f"api/v1/pipelines/{pipeline_id}/eval-datasets/{eval_dataset_id}/execute",
455
+ ),
456
+ headers=self._client_wrapper.get_headers(),
457
+ timeout=60,
458
+ )
459
+ if 200 <= _response.status_code < 300:
460
+ return pydantic.parse_obj_as(typing.List[EvalDatasetJobRecord], _response.json()) # type: ignore
461
+ if _response.status_code == 422:
462
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
463
+ try:
464
+ _response_json = _response.json()
465
+ except JSONDecodeError:
466
+ raise ApiError(status_code=_response.status_code, body=_response.text)
467
+ raise ApiError(status_code=_response.status_code, body=_response_json)
468
+
469
+ def execute_eval_dataset(
470
+ self,
471
+ pipeline_id: str,
472
+ eval_dataset_id: str,
473
+ *,
474
+ eval_question_ids: typing.List[str],
475
+ params: typing.Optional[EvalExecutionParamsOverride] = OMIT,
476
+ ) -> EvalDatasetJobRecord:
477
+ """
478
+ Execute a dataset.
479
+
480
+ Parameters:
481
+ - pipeline_id: str.
482
+
483
+ - eval_dataset_id: str.
484
+
485
+ - eval_question_ids: typing.List[str].
486
+
487
+ - params: typing.Optional[EvalExecutionParamsOverride]. The parameters for the eval execution that will override the ones set in the pipeline.
488
+ ---
489
+ from platform import EvalExecutionParamsOverride, SupportedEvalLlmModelNames
490
+ from platform.client import PlatformApi
491
+
492
+ client = PlatformApi(
493
+ token="YOUR_TOKEN",
494
+ base_url="https://yourhost.com/path/to/api",
495
+ )
496
+ client.pipelines.execute_eval_dataset(
497
+ pipeline_id="string",
498
+ eval_dataset_id="string",
499
+ eval_question_ids=[],
500
+ params=EvalExecutionParamsOverride(
501
+ llm_model=SupportedEvalLlmModelNames.GPT_3_5_TURBO,
502
+ ),
503
+ )
504
+ """
505
+ _request: typing.Dict[str, typing.Any] = {"eval_question_ids": eval_question_ids}
506
+ if params is not OMIT:
507
+ _request["params"] = params
508
+ _response = self._client_wrapper.httpx_client.request(
509
+ "POST",
510
+ urllib.parse.urljoin(
511
+ f"{self._client_wrapper.get_base_url()}/",
512
+ f"api/v1/pipelines/{pipeline_id}/eval-datasets/{eval_dataset_id}/execute",
513
+ ),
514
+ json=jsonable_encoder(_request),
515
+ headers=self._client_wrapper.get_headers(),
516
+ timeout=60,
517
+ )
518
+ if 200 <= _response.status_code < 300:
519
+ return pydantic.parse_obj_as(EvalDatasetJobRecord, _response.json()) # type: ignore
520
+ if _response.status_code == 422:
521
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
522
+ try:
523
+ _response_json = _response.json()
524
+ except JSONDecodeError:
525
+ raise ApiError(status_code=_response.status_code, body=_response.text)
526
+ raise ApiError(status_code=_response.status_code, body=_response_json)
527
+
528
+ def get_eval_dataset_execution_result(
529
+ self, pipeline_id: str, eval_dataset_id: str
530
+ ) -> typing.List[EvalQuestionResult]:
531
+ """
532
+ Get the result of an EvalDatasetExecution.
533
+ If eval_question_ids is specified, only the results for the specified
534
+ questions will be returned.
535
+ If any of the specified questions do not have a result, they will be ignored.
536
+
537
+ Parameters:
538
+ - pipeline_id: str.
539
+
540
+ - eval_dataset_id: str.
541
+ ---
542
+ from platform.client import PlatformApi
543
+
544
+ client = PlatformApi(
545
+ token="YOUR_TOKEN",
546
+ base_url="https://yourhost.com/path/to/api",
547
+ )
548
+ client.pipelines.get_eval_dataset_execution_result(
549
+ pipeline_id="string",
550
+ eval_dataset_id="string",
551
+ )
552
+ """
553
+ _response = self._client_wrapper.httpx_client.request(
554
+ "GET",
555
+ urllib.parse.urljoin(
556
+ f"{self._client_wrapper.get_base_url()}/",
557
+ f"api/v1/pipelines/{pipeline_id}/eval-datasets/{eval_dataset_id}/execute/result",
558
+ ),
559
+ headers=self._client_wrapper.get_headers(),
560
+ timeout=60,
561
+ )
562
+ if 200 <= _response.status_code < 300:
563
+ return pydantic.parse_obj_as(typing.List[EvalQuestionResult], _response.json()) # type: ignore
564
+ if _response.status_code == 422:
565
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
566
+ try:
567
+ _response_json = _response.json()
568
+ except JSONDecodeError:
569
+ raise ApiError(status_code=_response.status_code, body=_response.text)
570
+ raise ApiError(status_code=_response.status_code, body=_response_json)
571
+
572
+ def get_eval_dataset_execution(
573
+ self, pipeline_id: str, eval_dataset_id: str, eval_dataset_execution_id: str
574
+ ) -> EvalDatasetJobRecord:
575
+ """
576
+ Get the status of an EvalDatasetExecution.
577
+
578
+ Parameters:
579
+ - pipeline_id: str.
580
+
581
+ - eval_dataset_id: str.
582
+
583
+ - eval_dataset_execution_id: str.
584
+ ---
585
+ from platform.client import PlatformApi
586
+
587
+ client = PlatformApi(
588
+ token="YOUR_TOKEN",
589
+ base_url="https://yourhost.com/path/to/api",
590
+ )
591
+ client.pipelines.get_eval_dataset_execution(
592
+ pipeline_id="string",
593
+ eval_dataset_id="string",
594
+ eval_dataset_execution_id="string",
595
+ )
596
+ """
597
+ _response = self._client_wrapper.httpx_client.request(
598
+ "GET",
599
+ urllib.parse.urljoin(
600
+ f"{self._client_wrapper.get_base_url()}/",
601
+ f"api/v1/pipelines/{pipeline_id}/eval-datasets/{eval_dataset_id}/execute/{eval_dataset_execution_id}",
602
+ ),
603
+ headers=self._client_wrapper.get_headers(),
604
+ timeout=60,
605
+ )
606
+ if 200 <= _response.status_code < 300:
607
+ return pydantic.parse_obj_as(EvalDatasetJobRecord, _response.json()) # type: ignore
608
+ if _response.status_code == 422:
609
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
610
+ try:
611
+ _response_json = _response.json()
612
+ except JSONDecodeError:
613
+ raise ApiError(status_code=_response.status_code, body=_response.text)
614
+ raise ApiError(status_code=_response.status_code, body=_response_json)
615
+
616
+ def get_files_for_pipeline(self, pipeline_id: str) -> typing.List[PipelineFile]:
617
+ """
618
+ Get files for a pipeline.
619
+
620
+ Parameters:
621
+ - pipeline_id: str.
622
+ ---
623
+ from platform.client import PlatformApi
624
+
625
+ client = PlatformApi(
626
+ token="YOUR_TOKEN",
627
+ base_url="https://yourhost.com/path/to/api",
628
+ )
629
+ client.pipelines.get_files_for_pipeline(
630
+ pipeline_id="string",
631
+ )
632
+ """
633
+ _response = self._client_wrapper.httpx_client.request(
634
+ "GET",
635
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/files"),
636
+ headers=self._client_wrapper.get_headers(),
637
+ timeout=60,
638
+ )
639
+ if 200 <= _response.status_code < 300:
640
+ return pydantic.parse_obj_as(typing.List[PipelineFile], _response.json()) # type: ignore
641
+ if _response.status_code == 422:
642
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
643
+ try:
644
+ _response_json = _response.json()
645
+ except JSONDecodeError:
646
+ raise ApiError(status_code=_response.status_code, body=_response.text)
647
+ raise ApiError(status_code=_response.status_code, body=_response_json)
648
+
649
+ def add_files_to_pipeline(
650
+ self, pipeline_id: str, *, request: typing.List[PipelineFileCreate]
651
+ ) -> typing.List[PipelineFile]:
652
+ """
653
+ Add files to a pipeline.
654
+
655
+ Parameters:
656
+ - pipeline_id: str.
657
+
658
+ - request: typing.List[PipelineFileCreate].
659
+ ---
660
+ from platform.client import PlatformApi
661
+
662
+ client = PlatformApi(
663
+ token="YOUR_TOKEN",
664
+ base_url="https://yourhost.com/path/to/api",
665
+ )
666
+ client.pipelines.add_files_to_pipeline(
667
+ pipeline_id="string",
668
+ request=[],
669
+ )
670
+ """
671
+ _response = self._client_wrapper.httpx_client.request(
672
+ "PUT",
673
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/files"),
674
+ json=jsonable_encoder(request),
675
+ headers=self._client_wrapper.get_headers(),
676
+ timeout=60,
677
+ )
678
+ if 200 <= _response.status_code < 300:
679
+ return pydantic.parse_obj_as(typing.List[PipelineFile], _response.json()) # type: ignore
680
+ if _response.status_code == 422:
681
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
682
+ try:
683
+ _response_json = _response.json()
684
+ except JSONDecodeError:
685
+ raise ApiError(status_code=_response.status_code, body=_response.text)
686
+ raise ApiError(status_code=_response.status_code, body=_response_json)
687
+
688
+ def get_pipeline_file_status(self, pipeline_id: str, file_id: str) -> PipelineFileStatusResponse:
689
+ """
690
+ Get status of a file for a pipeline.
691
+
692
+ Parameters:
693
+ - pipeline_id: str.
694
+
695
+ - file_id: str.
696
+ ---
697
+ from platform.client import PlatformApi
698
+
699
+ client = PlatformApi(
700
+ token="YOUR_TOKEN",
701
+ base_url="https://yourhost.com/path/to/api",
702
+ )
703
+ client.pipelines.get_pipeline_file_status(
704
+ pipeline_id="string",
705
+ file_id="string",
706
+ )
707
+ """
708
+ _response = self._client_wrapper.httpx_client.request(
709
+ "GET",
710
+ urllib.parse.urljoin(
711
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/files/{file_id}/status"
712
+ ),
713
+ headers=self._client_wrapper.get_headers(),
714
+ timeout=60,
715
+ )
716
+ if 200 <= _response.status_code < 300:
717
+ return pydantic.parse_obj_as(PipelineFileStatusResponse, _response.json()) # type: ignore
718
+ if _response.status_code == 422:
719
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
720
+ try:
721
+ _response_json = _response.json()
722
+ except JSONDecodeError:
723
+ raise ApiError(status_code=_response.status_code, body=_response.text)
724
+ raise ApiError(status_code=_response.status_code, body=_response_json)
725
+
726
+ def update_pipeline_file(
727
+ self,
728
+ pipeline_id: str,
729
+ file_id: str,
730
+ *,
731
+ custom_metadata: typing.Optional[typing.Dict[str, PipelineFileUpdateCustomMetadataValue]] = OMIT,
732
+ ) -> PipelineFile:
733
+ """
734
+ Update a file for a pipeline.
735
+
736
+ Parameters:
737
+ - pipeline_id: str.
738
+
739
+ - file_id: str.
740
+
741
+ - custom_metadata: typing.Optional[typing.Dict[str, PipelineFileUpdateCustomMetadataValue]]. Custom metadata for the file
742
+ ---
743
+ from platform.client import PlatformApi
744
+
745
+ client = PlatformApi(
746
+ token="YOUR_TOKEN",
747
+ base_url="https://yourhost.com/path/to/api",
748
+ )
749
+ client.pipelines.update_pipeline_file(
750
+ pipeline_id="string",
751
+ file_id="string",
752
+ )
753
+ """
754
+ _request: typing.Dict[str, typing.Any] = {}
755
+ if custom_metadata is not OMIT:
756
+ _request["custom_metadata"] = custom_metadata
757
+ _response = self._client_wrapper.httpx_client.request(
758
+ "PUT",
759
+ urllib.parse.urljoin(
760
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/files/{file_id}"
761
+ ),
762
+ json=jsonable_encoder(_request),
763
+ headers=self._client_wrapper.get_headers(),
764
+ timeout=60,
765
+ )
766
+ if 200 <= _response.status_code < 300:
767
+ return pydantic.parse_obj_as(PipelineFile, _response.json()) # type: ignore
768
+ if _response.status_code == 422:
769
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
770
+ try:
771
+ _response_json = _response.json()
772
+ except JSONDecodeError:
773
+ raise ApiError(status_code=_response.status_code, body=_response.text)
774
+ raise ApiError(status_code=_response.status_code, body=_response_json)
775
+
776
+ def delete_pipeline_file(self, pipeline_id: str, file_id: str) -> None:
777
+ """
778
+ Delete a file from a pipeline.
779
+
780
+ Parameters:
781
+ - pipeline_id: str.
782
+
783
+ - file_id: str.
784
+ ---
785
+ from platform.client import PlatformApi
786
+
787
+ client = PlatformApi(
788
+ token="YOUR_TOKEN",
789
+ base_url="https://yourhost.com/path/to/api",
790
+ )
791
+ client.pipelines.delete_pipeline_file(
792
+ pipeline_id="string",
793
+ file_id="string",
794
+ )
795
+ """
796
+ _response = self._client_wrapper.httpx_client.request(
797
+ "DELETE",
798
+ urllib.parse.urljoin(
799
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/files/{file_id}"
800
+ ),
801
+ headers=self._client_wrapper.get_headers(),
802
+ timeout=60,
803
+ )
804
+ if 200 <= _response.status_code < 300:
805
+ return
806
+ if _response.status_code == 422:
807
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
808
+ try:
809
+ _response_json = _response.json()
810
+ except JSONDecodeError:
811
+ raise ApiError(status_code=_response.status_code, body=_response.text)
812
+ raise ApiError(status_code=_response.status_code, body=_response_json)
813
+
814
+ def get_pipeline_data_sources(self, pipeline_id: str) -> typing.List[PipelineDataSource]:
815
+ """
816
+ Get data sources for a pipeline.
817
+
818
+ Parameters:
819
+ - pipeline_id: str.
820
+ ---
821
+ from platform.client import PlatformApi
822
+
823
+ client = PlatformApi(
824
+ token="YOUR_TOKEN",
825
+ base_url="https://yourhost.com/path/to/api",
826
+ )
827
+ client.pipelines.get_pipeline_data_sources(
828
+ pipeline_id="string",
829
+ )
830
+ """
831
+ _response = self._client_wrapper.httpx_client.request(
832
+ "GET",
833
+ urllib.parse.urljoin(
834
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/data-sources"
835
+ ),
836
+ headers=self._client_wrapper.get_headers(),
837
+ timeout=60,
838
+ )
839
+ if 200 <= _response.status_code < 300:
840
+ return pydantic.parse_obj_as(typing.List[PipelineDataSource], _response.json()) # type: ignore
841
+ if _response.status_code == 422:
842
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
843
+ try:
844
+ _response_json = _response.json()
845
+ except JSONDecodeError:
846
+ raise ApiError(status_code=_response.status_code, body=_response.text)
847
+ raise ApiError(status_code=_response.status_code, body=_response_json)
848
+
849
+ def add_data_sources_to_pipeline(
850
+ self, pipeline_id: str, *, request: typing.List[PipelineDataSourceCreate]
851
+ ) -> typing.List[PipelineDataSource]:
852
+ """
853
+ Add data sources to a pipeline.
854
+
855
+ Parameters:
856
+ - pipeline_id: str.
857
+
858
+ - request: typing.List[PipelineDataSourceCreate].
859
+ ---
860
+ from platform.client import PlatformApi
861
+
862
+ client = PlatformApi(
863
+ token="YOUR_TOKEN",
864
+ base_url="https://yourhost.com/path/to/api",
865
+ )
866
+ client.pipelines.add_data_sources_to_pipeline(
867
+ pipeline_id="string",
868
+ request=[],
869
+ )
870
+ """
871
+ _response = self._client_wrapper.httpx_client.request(
872
+ "PUT",
873
+ urllib.parse.urljoin(
874
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/data-sources"
875
+ ),
876
+ json=jsonable_encoder(request),
877
+ headers=self._client_wrapper.get_headers(),
878
+ timeout=60,
879
+ )
880
+ if 200 <= _response.status_code < 300:
881
+ return pydantic.parse_obj_as(typing.List[PipelineDataSource], _response.json()) # type: ignore
882
+ if _response.status_code == 422:
883
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
884
+ try:
885
+ _response_json = _response.json()
886
+ except JSONDecodeError:
887
+ raise ApiError(status_code=_response.status_code, body=_response.text)
888
+ raise ApiError(status_code=_response.status_code, body=_response_json)
889
+
890
+ def delete_pipeline_data_source(self, pipeline_id: str, data_source_id: str) -> None:
891
+ """
892
+ Delete a data source from a pipeline.
893
+
894
+ Parameters:
895
+ - pipeline_id: str.
896
+
897
+ - data_source_id: str.
898
+ ---
899
+ from platform.client import PlatformApi
900
+
901
+ client = PlatformApi(
902
+ token="YOUR_TOKEN",
903
+ base_url="https://yourhost.com/path/to/api",
904
+ )
905
+ client.pipelines.delete_pipeline_data_source(
906
+ pipeline_id="string",
907
+ data_source_id="string",
908
+ )
909
+ """
910
+ _response = self._client_wrapper.httpx_client.request(
911
+ "DELETE",
912
+ urllib.parse.urljoin(
913
+ f"{self._client_wrapper.get_base_url()}/",
914
+ f"api/v1/pipelines/{pipeline_id}/data-sources/{data_source_id}",
915
+ ),
916
+ headers=self._client_wrapper.get_headers(),
917
+ timeout=60,
918
+ )
919
+ if 200 <= _response.status_code < 300:
920
+ return
921
+ if _response.status_code == 422:
922
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
923
+ try:
924
+ _response_json = _response.json()
925
+ except JSONDecodeError:
926
+ raise ApiError(status_code=_response.status_code, body=_response.text)
927
+ raise ApiError(status_code=_response.status_code, body=_response_json)
928
+
929
+ def sync_pipeline_data_source(self, pipeline_id: str, data_source_id: str) -> Pipeline:
930
+ """
931
+ Run ingestion for the pipeline data source by incrementally updating the data-sink with upstream changes from data-source.
932
+
933
+ Parameters:
934
+ - pipeline_id: str.
935
+
936
+ - data_source_id: str.
937
+ ---
938
+ from platform.client import PlatformApi
939
+
940
+ client = PlatformApi(
941
+ token="YOUR_TOKEN",
942
+ base_url="https://yourhost.com/path/to/api",
943
+ )
944
+ client.pipelines.sync_pipeline_data_source(
945
+ pipeline_id="string",
946
+ data_source_id="string",
947
+ )
948
+ """
949
+ _response = self._client_wrapper.httpx_client.request(
950
+ "POST",
951
+ urllib.parse.urljoin(
952
+ f"{self._client_wrapper.get_base_url()}/",
953
+ f"api/v1/pipelines/{pipeline_id}/data-sources/{data_source_id}/sync",
954
+ ),
955
+ headers=self._client_wrapper.get_headers(),
956
+ timeout=60,
957
+ )
958
+ if 200 <= _response.status_code < 300:
959
+ return pydantic.parse_obj_as(Pipeline, _response.json()) # type: ignore
960
+ if _response.status_code == 422:
961
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
962
+ try:
963
+ _response_json = _response.json()
964
+ except JSONDecodeError:
965
+ raise ApiError(status_code=_response.status_code, body=_response.text)
966
+ raise ApiError(status_code=_response.status_code, body=_response_json)
967
+
968
+ def run_search(
969
+ self,
970
+ pipeline_id: str,
971
+ *,
972
+ dense_similarity_top_k: typing.Optional[int] = OMIT,
973
+ sparse_similarity_top_k: typing.Optional[int] = OMIT,
974
+ enable_reranking: typing.Optional[bool] = OMIT,
975
+ rerank_top_n: typing.Optional[int] = OMIT,
976
+ alpha: typing.Optional[float] = OMIT,
977
+ search_filters: typing.Optional[MetadataFilters] = OMIT,
978
+ query: str,
979
+ ) -> RetrieveResults:
980
+ """
981
+ Get retrieval results for a managed pipeline and a query
982
+
983
+ Parameters:
984
+ - pipeline_id: str.
985
+
986
+ - dense_similarity_top_k: typing.Optional[int]. Number of nodes for dense retrieval.
987
+
988
+ - sparse_similarity_top_k: typing.Optional[int]. Number of nodes for sparse retrieval.
989
+
990
+ - enable_reranking: typing.Optional[bool]. Enable reranking for retrieval
991
+
992
+ - rerank_top_n: typing.Optional[int]. Number of reranked nodes for returning.
993
+
994
+ - alpha: typing.Optional[float]. Alpha value for hybrid retrieval to determine the weights between dense and sparse retrieval. 0 is sparse retrieval and 1 is dense retrieval.
995
+
996
+ - search_filters: typing.Optional[MetadataFilters]. Search filters for retrieval.
997
+
998
+ - query: str. The query to retrieve against.
999
+ ---
1000
+ from platform import FilterCondition, MetadataFilters
1001
+ from platform.client import PlatformApi
1002
+
1003
+ client = PlatformApi(
1004
+ token="YOUR_TOKEN",
1005
+ base_url="https://yourhost.com/path/to/api",
1006
+ )
1007
+ client.pipelines.run_search(
1008
+ pipeline_id="string",
1009
+ search_filters=MetadataFilters(
1010
+ filters=[],
1011
+ condition=FilterCondition.AND,
1012
+ ),
1013
+ query="string",
1014
+ )
1015
+ """
1016
+ _request: typing.Dict[str, typing.Any] = {"query": query}
1017
+ if dense_similarity_top_k is not OMIT:
1018
+ _request["dense_similarity_top_k"] = dense_similarity_top_k
1019
+ if sparse_similarity_top_k is not OMIT:
1020
+ _request["sparse_similarity_top_k"] = sparse_similarity_top_k
1021
+ if enable_reranking is not OMIT:
1022
+ _request["enable_reranking"] = enable_reranking
1023
+ if rerank_top_n is not OMIT:
1024
+ _request["rerank_top_n"] = rerank_top_n
1025
+ if alpha is not OMIT:
1026
+ _request["alpha"] = alpha
1027
+ if search_filters is not OMIT:
1028
+ _request["search_filters"] = search_filters
1029
+ _response = self._client_wrapper.httpx_client.request(
1030
+ "POST",
1031
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/retrieve"),
1032
+ json=jsonable_encoder(_request),
1033
+ headers=self._client_wrapper.get_headers(),
1034
+ timeout=60,
1035
+ )
1036
+ if 200 <= _response.status_code < 300:
1037
+ return pydantic.parse_obj_as(RetrieveResults, _response.json()) # type: ignore
1038
+ if _response.status_code == 422:
1039
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1040
+ try:
1041
+ _response_json = _response.json()
1042
+ except JSONDecodeError:
1043
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1044
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1045
+
1046
+ def get_pipeline_jobs(self, pipeline_id: str) -> typing.List[PipelineDeployment]:
1047
+ """
1048
+ Get jobs for a pipeline.
1049
+
1050
+ Parameters:
1051
+ - pipeline_id: str.
1052
+ ---
1053
+ from platform.client import PlatformApi
1054
+
1055
+ client = PlatformApi(
1056
+ token="YOUR_TOKEN",
1057
+ base_url="https://yourhost.com/path/to/api",
1058
+ )
1059
+ client.pipelines.get_pipeline_jobs(
1060
+ pipeline_id="string",
1061
+ )
1062
+ """
1063
+ _response = self._client_wrapper.httpx_client.request(
1064
+ "GET",
1065
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/jobs"),
1066
+ headers=self._client_wrapper.get_headers(),
1067
+ timeout=60,
1068
+ )
1069
+ if 200 <= _response.status_code < 300:
1070
+ return pydantic.parse_obj_as(typing.List[PipelineDeployment], _response.json()) # type: ignore
1071
+ if _response.status_code == 422:
1072
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1073
+ try:
1074
+ _response_json = _response.json()
1075
+ except JSONDecodeError:
1076
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1077
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1078
+
1079
+ def get_pipeline_job(self, pipeline_id: str, job_id: str) -> PipelineDeployment:
1080
+ """
1081
+ Get a job for a pipeline.
1082
+
1083
+ Parameters:
1084
+ - pipeline_id: str.
1085
+
1086
+ - job_id: str.
1087
+ ---
1088
+ from platform.client import PlatformApi
1089
+
1090
+ client = PlatformApi(
1091
+ token="YOUR_TOKEN",
1092
+ base_url="https://yourhost.com/path/to/api",
1093
+ )
1094
+ client.pipelines.get_pipeline_job(
1095
+ pipeline_id="string",
1096
+ job_id="string",
1097
+ )
1098
+ """
1099
+ _response = self._client_wrapper.httpx_client.request(
1100
+ "GET",
1101
+ urllib.parse.urljoin(
1102
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/jobs/{job_id}"
1103
+ ),
1104
+ headers=self._client_wrapper.get_headers(),
1105
+ timeout=60,
1106
+ )
1107
+ if 200 <= _response.status_code < 300:
1108
+ return pydantic.parse_obj_as(PipelineDeployment, _response.json()) # type: ignore
1109
+ if _response.status_code == 422:
1110
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1111
+ try:
1112
+ _response_json = _response.json()
1113
+ except JSONDecodeError:
1114
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1115
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1116
+
1117
+ def list_pipeline_documents(
1118
+ self, pipeline_id: str, *, skip: typing.Optional[int] = None, limit: typing.Optional[int] = None
1119
+ ) -> typing.List[CloudDocument]:
1120
+ """
1121
+ Return a list of documents for a pipeline.
1122
+
1123
+ Parameters:
1124
+ - pipeline_id: str.
1125
+
1126
+ - skip: typing.Optional[int].
1127
+
1128
+ - limit: typing.Optional[int].
1129
+ ---
1130
+ from platform.client import PlatformApi
1131
+
1132
+ client = PlatformApi(
1133
+ token="YOUR_TOKEN",
1134
+ base_url="https://yourhost.com/path/to/api",
1135
+ )
1136
+ client.pipelines.list_pipeline_documents(
1137
+ pipeline_id="string",
1138
+ )
1139
+ """
1140
+ _response = self._client_wrapper.httpx_client.request(
1141
+ "GET",
1142
+ urllib.parse.urljoin(
1143
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/documents"
1144
+ ),
1145
+ params=remove_none_from_dict({"skip": skip, "limit": limit}),
1146
+ headers=self._client_wrapper.get_headers(),
1147
+ timeout=60,
1148
+ )
1149
+ if 200 <= _response.status_code < 300:
1150
+ return pydantic.parse_obj_as(typing.List[CloudDocument], _response.json()) # type: ignore
1151
+ if _response.status_code == 422:
1152
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1153
+ try:
1154
+ _response_json = _response.json()
1155
+ except JSONDecodeError:
1156
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1157
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1158
+
1159
+ def create_pipeline_document(self, pipeline_id: str, *, request: CloudDocumentCreate) -> CloudDocument:
1160
+ """
1161
+ Create a new document for a pipeline.
1162
+
1163
+ Parameters:
1164
+ - pipeline_id: str.
1165
+
1166
+ - request: CloudDocumentCreate.
1167
+ ---
1168
+ from platform import CloudDocumentCreate
1169
+ from platform.client import PlatformApi
1170
+
1171
+ client = PlatformApi(
1172
+ token="YOUR_TOKEN",
1173
+ base_url="https://yourhost.com/path/to/api",
1174
+ )
1175
+ client.pipelines.create_pipeline_document(
1176
+ pipeline_id="string",
1177
+ request=CloudDocumentCreate(
1178
+ text="string",
1179
+ metadata={"string": {}},
1180
+ ),
1181
+ )
1182
+ """
1183
+ _response = self._client_wrapper.httpx_client.request(
1184
+ "POST",
1185
+ urllib.parse.urljoin(
1186
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/documents"
1187
+ ),
1188
+ json=jsonable_encoder(request),
1189
+ headers=self._client_wrapper.get_headers(),
1190
+ timeout=60,
1191
+ )
1192
+ if 200 <= _response.status_code < 300:
1193
+ return pydantic.parse_obj_as(CloudDocument, _response.json()) # type: ignore
1194
+ if _response.status_code == 422:
1195
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1196
+ try:
1197
+ _response_json = _response.json()
1198
+ except JSONDecodeError:
1199
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1200
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1201
+
1202
+ def upsert_pipeline_document(self, pipeline_id: str, *, request: CloudDocumentCreate) -> CloudDocument:
1203
+ """
1204
+ Create or update a document for a pipeline.
1205
+
1206
+ Parameters:
1207
+ - pipeline_id: str.
1208
+
1209
+ - request: CloudDocumentCreate.
1210
+ ---
1211
+ from platform import CloudDocumentCreate
1212
+ from platform.client import PlatformApi
1213
+
1214
+ client = PlatformApi(
1215
+ token="YOUR_TOKEN",
1216
+ base_url="https://yourhost.com/path/to/api",
1217
+ )
1218
+ client.pipelines.upsert_pipeline_document(
1219
+ pipeline_id="string",
1220
+ request=CloudDocumentCreate(
1221
+ text="string",
1222
+ metadata={"string": {}},
1223
+ ),
1224
+ )
1225
+ """
1226
+ _response = self._client_wrapper.httpx_client.request(
1227
+ "PUT",
1228
+ urllib.parse.urljoin(
1229
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/documents"
1230
+ ),
1231
+ json=jsonable_encoder(request),
1232
+ headers=self._client_wrapper.get_headers(),
1233
+ timeout=60,
1234
+ )
1235
+ if 200 <= _response.status_code < 300:
1236
+ return pydantic.parse_obj_as(CloudDocument, _response.json()) # type: ignore
1237
+ if _response.status_code == 422:
1238
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1239
+ try:
1240
+ _response_json = _response.json()
1241
+ except JSONDecodeError:
1242
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1243
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1244
+
1245
+ def get_pipeline_document(self, pipeline_id: str, document_id: str) -> CloudDocument:
1246
+ """
1247
+ Return a single document for a pipeline.
1248
+
1249
+ Parameters:
1250
+ - pipeline_id: str.
1251
+
1252
+ - document_id: str.
1253
+ ---
1254
+ from platform.client import PlatformApi
1255
+
1256
+ client = PlatformApi(
1257
+ token="YOUR_TOKEN",
1258
+ base_url="https://yourhost.com/path/to/api",
1259
+ )
1260
+ client.pipelines.get_pipeline_document(
1261
+ pipeline_id="string",
1262
+ document_id="string",
1263
+ )
1264
+ """
1265
+ _response = self._client_wrapper.httpx_client.request(
1266
+ "GET",
1267
+ urllib.parse.urljoin(
1268
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/documents/{document_id}"
1269
+ ),
1270
+ headers=self._client_wrapper.get_headers(),
1271
+ timeout=60,
1272
+ )
1273
+ if 200 <= _response.status_code < 300:
1274
+ return pydantic.parse_obj_as(CloudDocument, _response.json()) # type: ignore
1275
+ if _response.status_code == 422:
1276
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1277
+ try:
1278
+ _response_json = _response.json()
1279
+ except JSONDecodeError:
1280
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1281
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1282
+
1283
+ def delete_pipeline_document(self, pipeline_id: str, document_id: str) -> None:
1284
+ """
1285
+ Delete a document for a pipeline.
1286
+
1287
+ Parameters:
1288
+ - pipeline_id: str.
1289
+
1290
+ - document_id: str.
1291
+ ---
1292
+ from platform.client import PlatformApi
1293
+
1294
+ client = PlatformApi(
1295
+ token="YOUR_TOKEN",
1296
+ base_url="https://yourhost.com/path/to/api",
1297
+ )
1298
+ client.pipelines.delete_pipeline_document(
1299
+ pipeline_id="string",
1300
+ document_id="string",
1301
+ )
1302
+ """
1303
+ _response = self._client_wrapper.httpx_client.request(
1304
+ "DELETE",
1305
+ urllib.parse.urljoin(
1306
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/documents/{document_id}"
1307
+ ),
1308
+ headers=self._client_wrapper.get_headers(),
1309
+ timeout=60,
1310
+ )
1311
+ if 200 <= _response.status_code < 300:
1312
+ return
1313
+ if _response.status_code == 422:
1314
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1315
+ try:
1316
+ _response_json = _response.json()
1317
+ except JSONDecodeError:
1318
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1319
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1320
+
1321
+
1322
+ class AsyncPipelinesClient:
1323
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
1324
+ self._client_wrapper = client_wrapper
1325
+
1326
+ async def search_pipelines(
1327
+ self,
1328
+ *,
1329
+ project_name: str,
1330
+ pipeline_name: typing.Optional[str] = None,
1331
+ pipeline_type: typing.Optional[PipelineType] = None,
1332
+ ) -> typing.List[Pipeline]:
1333
+ """
1334
+ Search for pipelines by various parameters.
1335
+
1336
+ Parameters:
1337
+ - project_name: str.
1338
+
1339
+ - pipeline_name: typing.Optional[str].
1340
+
1341
+ - pipeline_type: typing.Optional[PipelineType].
1342
+ ---
1343
+ from platform import PipelineType
1344
+ from platform.client import AsyncPlatformApi
1345
+
1346
+ client = AsyncPlatformApi(
1347
+ token="YOUR_TOKEN",
1348
+ base_url="https://yourhost.com/path/to/api",
1349
+ )
1350
+ await client.pipelines.search_pipelines(
1351
+ project_name="string",
1352
+ pipeline_type=PipelineType.PLAYGROUND,
1353
+ )
1354
+ """
1355
+ _response = await self._client_wrapper.httpx_client.request(
1356
+ "GET",
1357
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/pipelines"),
1358
+ params=remove_none_from_dict(
1359
+ {"project_name": project_name, "pipeline_name": pipeline_name, "pipeline_type": pipeline_type}
1360
+ ),
1361
+ headers=self._client_wrapper.get_headers(),
1362
+ timeout=60,
1363
+ )
1364
+ if 200 <= _response.status_code < 300:
1365
+ return pydantic.parse_obj_as(typing.List[Pipeline], _response.json()) # type: ignore
1366
+ if _response.status_code == 422:
1367
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1368
+ try:
1369
+ _response_json = _response.json()
1370
+ except JSONDecodeError:
1371
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1372
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1373
+
1374
+ async def create_pipeline(self, *, project_id: typing.Optional[str] = None, request: PipelineCreate) -> Pipeline:
1375
+ """
1376
+ Create a new pipeline for a project.
1377
+
1378
+ Parameters:
1379
+ - project_id: typing.Optional[str].
1380
+
1381
+ - request: PipelineCreate.
1382
+ ---
1383
+ from platform import (
1384
+ ConfigurableDataSinkNames,
1385
+ DataSinkCreate,
1386
+ EvalExecutionParams,
1387
+ FilterCondition,
1388
+ MetadataFilters,
1389
+ PipelineCreate,
1390
+ PipelineType,
1391
+ PresetRetrievalParams,
1392
+ SupportedEvalLlmModelNames,
1393
+ )
1394
+ from platform.client import AsyncPlatformApi
1395
+
1396
+ client = AsyncPlatformApi(
1397
+ token="YOUR_TOKEN",
1398
+ base_url="https://yourhost.com/path/to/api",
1399
+ )
1400
+ await client.pipelines.create_pipeline(
1401
+ request=PipelineCreate(
1402
+ data_sink=DataSinkCreate(
1403
+ name="string",
1404
+ sink_type=ConfigurableDataSinkNames.CHROMA,
1405
+ ),
1406
+ preset_retrieval_parameters=PresetRetrievalParams(
1407
+ search_filters=MetadataFilters(
1408
+ filters=[],
1409
+ condition=FilterCondition.AND,
1410
+ ),
1411
+ ),
1412
+ eval_parameters=EvalExecutionParams(
1413
+ llm_model=SupportedEvalLlmModelNames.GPT_3_5_TURBO,
1414
+ ),
1415
+ name="string",
1416
+ pipeline_type=PipelineType.PLAYGROUND,
1417
+ ),
1418
+ )
1419
+ """
1420
+ _response = await self._client_wrapper.httpx_client.request(
1421
+ "POST",
1422
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/pipelines"),
1423
+ params=remove_none_from_dict({"project_id": project_id}),
1424
+ json=jsonable_encoder(request),
1425
+ headers=self._client_wrapper.get_headers(),
1426
+ timeout=60,
1427
+ )
1428
+ if 200 <= _response.status_code < 300:
1429
+ return pydantic.parse_obj_as(Pipeline, _response.json()) # type: ignore
1430
+ if _response.status_code == 422:
1431
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1432
+ try:
1433
+ _response_json = _response.json()
1434
+ except JSONDecodeError:
1435
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1436
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1437
+
1438
+ async def upsert_pipeline(self, *, project_id: typing.Optional[str] = None, request: PipelineCreate) -> Pipeline:
1439
+ """
1440
+ Upsert a pipeline for a project.
1441
+ Updates if a pipeline with the same name and project_id already exists. Otherwise, creates a new pipeline.
1442
+
1443
+ Parameters:
1444
+ - project_id: typing.Optional[str].
1445
+
1446
+ - request: PipelineCreate.
1447
+ ---
1448
+ from platform import (
1449
+ ConfigurableDataSinkNames,
1450
+ DataSinkCreate,
1451
+ EvalExecutionParams,
1452
+ FilterCondition,
1453
+ MetadataFilters,
1454
+ PipelineCreate,
1455
+ PipelineType,
1456
+ PresetRetrievalParams,
1457
+ SupportedEvalLlmModelNames,
1458
+ )
1459
+ from platform.client import AsyncPlatformApi
1460
+
1461
+ client = AsyncPlatformApi(
1462
+ token="YOUR_TOKEN",
1463
+ base_url="https://yourhost.com/path/to/api",
1464
+ )
1465
+ await client.pipelines.upsert_pipeline(
1466
+ request=PipelineCreate(
1467
+ data_sink=DataSinkCreate(
1468
+ name="string",
1469
+ sink_type=ConfigurableDataSinkNames.CHROMA,
1470
+ ),
1471
+ preset_retrieval_parameters=PresetRetrievalParams(
1472
+ search_filters=MetadataFilters(
1473
+ filters=[],
1474
+ condition=FilterCondition.AND,
1475
+ ),
1476
+ ),
1477
+ eval_parameters=EvalExecutionParams(
1478
+ llm_model=SupportedEvalLlmModelNames.GPT_3_5_TURBO,
1479
+ ),
1480
+ name="string",
1481
+ pipeline_type=PipelineType.PLAYGROUND,
1482
+ ),
1483
+ )
1484
+ """
1485
+ _response = await self._client_wrapper.httpx_client.request(
1486
+ "PUT",
1487
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/pipelines"),
1488
+ params=remove_none_from_dict({"project_id": project_id}),
1489
+ json=jsonable_encoder(request),
1490
+ headers=self._client_wrapper.get_headers(),
1491
+ timeout=60,
1492
+ )
1493
+ if 200 <= _response.status_code < 300:
1494
+ return pydantic.parse_obj_as(Pipeline, _response.json()) # type: ignore
1495
+ if _response.status_code == 422:
1496
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1497
+ try:
1498
+ _response_json = _response.json()
1499
+ except JSONDecodeError:
1500
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1501
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1502
+
1503
+ async def get_pipeline(
1504
+ self, pipeline_id: str, *, with_managed_ingestion_status: typing.Optional[bool] = None
1505
+ ) -> Pipeline:
1506
+ """
1507
+ Get a pipeline by ID for a given project.
1508
+
1509
+ Parameters:
1510
+ - pipeline_id: str.
1511
+
1512
+ - with_managed_ingestion_status: typing.Optional[bool].
1513
+ ---
1514
+ from platform.client import AsyncPlatformApi
1515
+
1516
+ client = AsyncPlatformApi(
1517
+ token="YOUR_TOKEN",
1518
+ base_url="https://yourhost.com/path/to/api",
1519
+ )
1520
+ await client.pipelines.get_pipeline(
1521
+ pipeline_id="string",
1522
+ )
1523
+ """
1524
+ _response = await self._client_wrapper.httpx_client.request(
1525
+ "GET",
1526
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}"),
1527
+ params=remove_none_from_dict({"with_managed_ingestion_status": with_managed_ingestion_status}),
1528
+ headers=self._client_wrapper.get_headers(),
1529
+ timeout=60,
1530
+ )
1531
+ if 200 <= _response.status_code < 300:
1532
+ return pydantic.parse_obj_as(Pipeline, _response.json()) # type: ignore
1533
+ if _response.status_code == 422:
1534
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1535
+ try:
1536
+ _response_json = _response.json()
1537
+ except JSONDecodeError:
1538
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1539
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1540
+
1541
+ async def update_existing_pipeline(
1542
+ self,
1543
+ pipeline_id: str,
1544
+ *,
1545
+ configured_transformations: typing.Optional[typing.List[ConfiguredTransformationItem]] = OMIT,
1546
+ data_sink_id: typing.Optional[str] = OMIT,
1547
+ data_sink: typing.Optional[DataSinkCreate] = OMIT,
1548
+ preset_retrieval_parameters: typing.Optional[PresetRetrievalParams] = OMIT,
1549
+ eval_parameters: typing.Optional[EvalExecutionParams] = OMIT,
1550
+ llama_parse_enabled: typing.Optional[bool] = OMIT,
1551
+ name: typing.Optional[str] = OMIT,
1552
+ managed_pipeline_id: typing.Optional[str] = OMIT,
1553
+ ) -> Pipeline:
1554
+ """
1555
+ Update an existing pipeline for a project.
1556
+
1557
+ Parameters:
1558
+ - pipeline_id: str.
1559
+
1560
+ - configured_transformations: typing.Optional[typing.List[ConfiguredTransformationItem]].
1561
+
1562
+ - data_sink_id: typing.Optional[str]. Data sink ID. When provided instead of data_sink, the data sink will be looked up by ID.
1563
+
1564
+ - data_sink: typing.Optional[DataSinkCreate]. Data sink. When provided instead of data_sink_id, the data sink will be created.
1565
+
1566
+ - preset_retrieval_parameters: typing.Optional[PresetRetrievalParams]. Preset retrieval parameters for the pipeline.
1567
+
1568
+ - eval_parameters: typing.Optional[EvalExecutionParams]. Eval parameters for the pipeline.
1569
+
1570
+ - llama_parse_enabled: typing.Optional[bool]. Whether to use LlamaParse during pipeline execution.
1571
+
1572
+ - name: typing.Optional[str].
1573
+
1574
+ - managed_pipeline_id: typing.Optional[str]. The ID of the ManagedPipeline this playground pipeline is linked to.
1575
+ ---
1576
+ from platform import (
1577
+ ConfigurableDataSinkNames,
1578
+ DataSinkCreate,
1579
+ EvalExecutionParams,
1580
+ FilterCondition,
1581
+ MetadataFilters,
1582
+ PresetRetrievalParams,
1583
+ SupportedEvalLlmModelNames,
1584
+ )
1585
+ from platform.client import AsyncPlatformApi
1586
+
1587
+ client = AsyncPlatformApi(
1588
+ token="YOUR_TOKEN",
1589
+ base_url="https://yourhost.com/path/to/api",
1590
+ )
1591
+ await client.pipelines.update_existing_pipeline(
1592
+ pipeline_id="string",
1593
+ data_sink=DataSinkCreate(
1594
+ name="string",
1595
+ sink_type=ConfigurableDataSinkNames.CHROMA,
1596
+ ),
1597
+ preset_retrieval_parameters=PresetRetrievalParams(
1598
+ search_filters=MetadataFilters(
1599
+ filters=[],
1600
+ condition=FilterCondition.AND,
1601
+ ),
1602
+ ),
1603
+ eval_parameters=EvalExecutionParams(
1604
+ llm_model=SupportedEvalLlmModelNames.GPT_3_5_TURBO,
1605
+ ),
1606
+ )
1607
+ """
1608
+ _request: typing.Dict[str, typing.Any] = {}
1609
+ if configured_transformations is not OMIT:
1610
+ _request["configured_transformations"] = configured_transformations
1611
+ if data_sink_id is not OMIT:
1612
+ _request["data_sink_id"] = data_sink_id
1613
+ if data_sink is not OMIT:
1614
+ _request["data_sink"] = data_sink
1615
+ if preset_retrieval_parameters is not OMIT:
1616
+ _request["preset_retrieval_parameters"] = preset_retrieval_parameters
1617
+ if eval_parameters is not OMIT:
1618
+ _request["eval_parameters"] = eval_parameters
1619
+ if llama_parse_enabled is not OMIT:
1620
+ _request["llama_parse_enabled"] = llama_parse_enabled
1621
+ if name is not OMIT:
1622
+ _request["name"] = name
1623
+ if managed_pipeline_id is not OMIT:
1624
+ _request["managed_pipeline_id"] = managed_pipeline_id
1625
+ _response = await self._client_wrapper.httpx_client.request(
1626
+ "PUT",
1627
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}"),
1628
+ json=jsonable_encoder(_request),
1629
+ headers=self._client_wrapper.get_headers(),
1630
+ timeout=60,
1631
+ )
1632
+ if 200 <= _response.status_code < 300:
1633
+ return pydantic.parse_obj_as(Pipeline, _response.json()) # type: ignore
1634
+ if _response.status_code == 422:
1635
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1636
+ try:
1637
+ _response_json = _response.json()
1638
+ except JSONDecodeError:
1639
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1640
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1641
+
1642
+ async def delete_pipeline(self, pipeline_id: str) -> None:
1643
+ """
1644
+ Delete a pipeline by ID.
1645
+
1646
+ Parameters:
1647
+ - pipeline_id: str.
1648
+ ---
1649
+ from platform.client import AsyncPlatformApi
1650
+
1651
+ client = AsyncPlatformApi(
1652
+ token="YOUR_TOKEN",
1653
+ base_url="https://yourhost.com/path/to/api",
1654
+ )
1655
+ await client.pipelines.delete_pipeline(
1656
+ pipeline_id="string",
1657
+ )
1658
+ """
1659
+ _response = await self._client_wrapper.httpx_client.request(
1660
+ "DELETE",
1661
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}"),
1662
+ headers=self._client_wrapper.get_headers(),
1663
+ timeout=60,
1664
+ )
1665
+ if 200 <= _response.status_code < 300:
1666
+ return
1667
+ if _response.status_code == 422:
1668
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1669
+ try:
1670
+ _response_json = _response.json()
1671
+ except JSONDecodeError:
1672
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1673
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1674
+
1675
+ async def sync_pipeline(self, pipeline_id: str) -> Pipeline:
1676
+ """
1677
+ Run ingestion for the pipeline by incrementally updating the data-sink with upstream changes from data-sources & files.
1678
+
1679
+ Parameters:
1680
+ - pipeline_id: str.
1681
+ ---
1682
+ from platform.client import AsyncPlatformApi
1683
+
1684
+ client = AsyncPlatformApi(
1685
+ token="YOUR_TOKEN",
1686
+ base_url="https://yourhost.com/path/to/api",
1687
+ )
1688
+ await client.pipelines.sync_pipeline(
1689
+ pipeline_id="string",
1690
+ )
1691
+ """
1692
+ _response = await self._client_wrapper.httpx_client.request(
1693
+ "POST",
1694
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/sync"),
1695
+ headers=self._client_wrapper.get_headers(),
1696
+ timeout=60,
1697
+ )
1698
+ if 200 <= _response.status_code < 300:
1699
+ return pydantic.parse_obj_as(Pipeline, _response.json()) # type: ignore
1700
+ if _response.status_code == 422:
1701
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1702
+ try:
1703
+ _response_json = _response.json()
1704
+ except JSONDecodeError:
1705
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1706
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1707
+
1708
+ async def get_eval_dataset_executions(
1709
+ self, pipeline_id: str, eval_dataset_id: str
1710
+ ) -> typing.List[EvalDatasetJobRecord]:
1711
+ """
1712
+ Get the status of an EvalDatasetExecution.
1713
+
1714
+ Parameters:
1715
+ - pipeline_id: str.
1716
+
1717
+ - eval_dataset_id: str.
1718
+ ---
1719
+ from platform.client import AsyncPlatformApi
1720
+
1721
+ client = AsyncPlatformApi(
1722
+ token="YOUR_TOKEN",
1723
+ base_url="https://yourhost.com/path/to/api",
1724
+ )
1725
+ await client.pipelines.get_eval_dataset_executions(
1726
+ pipeline_id="string",
1727
+ eval_dataset_id="string",
1728
+ )
1729
+ """
1730
+ _response = await self._client_wrapper.httpx_client.request(
1731
+ "GET",
1732
+ urllib.parse.urljoin(
1733
+ f"{self._client_wrapper.get_base_url()}/",
1734
+ f"api/v1/pipelines/{pipeline_id}/eval-datasets/{eval_dataset_id}/execute",
1735
+ ),
1736
+ headers=self._client_wrapper.get_headers(),
1737
+ timeout=60,
1738
+ )
1739
+ if 200 <= _response.status_code < 300:
1740
+ return pydantic.parse_obj_as(typing.List[EvalDatasetJobRecord], _response.json()) # type: ignore
1741
+ if _response.status_code == 422:
1742
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1743
+ try:
1744
+ _response_json = _response.json()
1745
+ except JSONDecodeError:
1746
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1747
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1748
+
1749
+ async def execute_eval_dataset(
1750
+ self,
1751
+ pipeline_id: str,
1752
+ eval_dataset_id: str,
1753
+ *,
1754
+ eval_question_ids: typing.List[str],
1755
+ params: typing.Optional[EvalExecutionParamsOverride] = OMIT,
1756
+ ) -> EvalDatasetJobRecord:
1757
+ """
1758
+ Execute a dataset.
1759
+
1760
+ Parameters:
1761
+ - pipeline_id: str.
1762
+
1763
+ - eval_dataset_id: str.
1764
+
1765
+ - eval_question_ids: typing.List[str].
1766
+
1767
+ - params: typing.Optional[EvalExecutionParamsOverride]. The parameters for the eval execution that will override the ones set in the pipeline.
1768
+ ---
1769
+ from platform import EvalExecutionParamsOverride, SupportedEvalLlmModelNames
1770
+ from platform.client import AsyncPlatformApi
1771
+
1772
+ client = AsyncPlatformApi(
1773
+ token="YOUR_TOKEN",
1774
+ base_url="https://yourhost.com/path/to/api",
1775
+ )
1776
+ await client.pipelines.execute_eval_dataset(
1777
+ pipeline_id="string",
1778
+ eval_dataset_id="string",
1779
+ eval_question_ids=[],
1780
+ params=EvalExecutionParamsOverride(
1781
+ llm_model=SupportedEvalLlmModelNames.GPT_3_5_TURBO,
1782
+ ),
1783
+ )
1784
+ """
1785
+ _request: typing.Dict[str, typing.Any] = {"eval_question_ids": eval_question_ids}
1786
+ if params is not OMIT:
1787
+ _request["params"] = params
1788
+ _response = await self._client_wrapper.httpx_client.request(
1789
+ "POST",
1790
+ urllib.parse.urljoin(
1791
+ f"{self._client_wrapper.get_base_url()}/",
1792
+ f"api/v1/pipelines/{pipeline_id}/eval-datasets/{eval_dataset_id}/execute",
1793
+ ),
1794
+ json=jsonable_encoder(_request),
1795
+ headers=self._client_wrapper.get_headers(),
1796
+ timeout=60,
1797
+ )
1798
+ if 200 <= _response.status_code < 300:
1799
+ return pydantic.parse_obj_as(EvalDatasetJobRecord, _response.json()) # type: ignore
1800
+ if _response.status_code == 422:
1801
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1802
+ try:
1803
+ _response_json = _response.json()
1804
+ except JSONDecodeError:
1805
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1806
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1807
+
1808
+ async def get_eval_dataset_execution_result(
1809
+ self, pipeline_id: str, eval_dataset_id: str
1810
+ ) -> typing.List[EvalQuestionResult]:
1811
+ """
1812
+ Get the result of an EvalDatasetExecution.
1813
+ If eval_question_ids is specified, only the results for the specified
1814
+ questions will be returned.
1815
+ If any of the specified questions do not have a result, they will be ignored.
1816
+
1817
+ Parameters:
1818
+ - pipeline_id: str.
1819
+
1820
+ - eval_dataset_id: str.
1821
+ ---
1822
+ from platform.client import AsyncPlatformApi
1823
+
1824
+ client = AsyncPlatformApi(
1825
+ token="YOUR_TOKEN",
1826
+ base_url="https://yourhost.com/path/to/api",
1827
+ )
1828
+ await client.pipelines.get_eval_dataset_execution_result(
1829
+ pipeline_id="string",
1830
+ eval_dataset_id="string",
1831
+ )
1832
+ """
1833
+ _response = await self._client_wrapper.httpx_client.request(
1834
+ "GET",
1835
+ urllib.parse.urljoin(
1836
+ f"{self._client_wrapper.get_base_url()}/",
1837
+ f"api/v1/pipelines/{pipeline_id}/eval-datasets/{eval_dataset_id}/execute/result",
1838
+ ),
1839
+ headers=self._client_wrapper.get_headers(),
1840
+ timeout=60,
1841
+ )
1842
+ if 200 <= _response.status_code < 300:
1843
+ return pydantic.parse_obj_as(typing.List[EvalQuestionResult], _response.json()) # type: ignore
1844
+ if _response.status_code == 422:
1845
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1846
+ try:
1847
+ _response_json = _response.json()
1848
+ except JSONDecodeError:
1849
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1850
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1851
+
1852
+ async def get_eval_dataset_execution(
1853
+ self, pipeline_id: str, eval_dataset_id: str, eval_dataset_execution_id: str
1854
+ ) -> EvalDatasetJobRecord:
1855
+ """
1856
+ Get the status of an EvalDatasetExecution.
1857
+
1858
+ Parameters:
1859
+ - pipeline_id: str.
1860
+
1861
+ - eval_dataset_id: str.
1862
+
1863
+ - eval_dataset_execution_id: str.
1864
+ ---
1865
+ from platform.client import AsyncPlatformApi
1866
+
1867
+ client = AsyncPlatformApi(
1868
+ token="YOUR_TOKEN",
1869
+ base_url="https://yourhost.com/path/to/api",
1870
+ )
1871
+ await client.pipelines.get_eval_dataset_execution(
1872
+ pipeline_id="string",
1873
+ eval_dataset_id="string",
1874
+ eval_dataset_execution_id="string",
1875
+ )
1876
+ """
1877
+ _response = await self._client_wrapper.httpx_client.request(
1878
+ "GET",
1879
+ urllib.parse.urljoin(
1880
+ f"{self._client_wrapper.get_base_url()}/",
1881
+ f"api/v1/pipelines/{pipeline_id}/eval-datasets/{eval_dataset_id}/execute/{eval_dataset_execution_id}",
1882
+ ),
1883
+ headers=self._client_wrapper.get_headers(),
1884
+ timeout=60,
1885
+ )
1886
+ if 200 <= _response.status_code < 300:
1887
+ return pydantic.parse_obj_as(EvalDatasetJobRecord, _response.json()) # type: ignore
1888
+ if _response.status_code == 422:
1889
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1890
+ try:
1891
+ _response_json = _response.json()
1892
+ except JSONDecodeError:
1893
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1894
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1895
+
1896
+ async def get_files_for_pipeline(self, pipeline_id: str) -> typing.List[PipelineFile]:
1897
+ """
1898
+ Get files for a pipeline.
1899
+
1900
+ Parameters:
1901
+ - pipeline_id: str.
1902
+ ---
1903
+ from platform.client import AsyncPlatformApi
1904
+
1905
+ client = AsyncPlatformApi(
1906
+ token="YOUR_TOKEN",
1907
+ base_url="https://yourhost.com/path/to/api",
1908
+ )
1909
+ await client.pipelines.get_files_for_pipeline(
1910
+ pipeline_id="string",
1911
+ )
1912
+ """
1913
+ _response = await self._client_wrapper.httpx_client.request(
1914
+ "GET",
1915
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/files"),
1916
+ headers=self._client_wrapper.get_headers(),
1917
+ timeout=60,
1918
+ )
1919
+ if 200 <= _response.status_code < 300:
1920
+ return pydantic.parse_obj_as(typing.List[PipelineFile], _response.json()) # type: ignore
1921
+ if _response.status_code == 422:
1922
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1923
+ try:
1924
+ _response_json = _response.json()
1925
+ except JSONDecodeError:
1926
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1927
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1928
+
1929
+ async def add_files_to_pipeline(
1930
+ self, pipeline_id: str, *, request: typing.List[PipelineFileCreate]
1931
+ ) -> typing.List[PipelineFile]:
1932
+ """
1933
+ Add files to a pipeline.
1934
+
1935
+ Parameters:
1936
+ - pipeline_id: str.
1937
+
1938
+ - request: typing.List[PipelineFileCreate].
1939
+ ---
1940
+ from platform.client import AsyncPlatformApi
1941
+
1942
+ client = AsyncPlatformApi(
1943
+ token="YOUR_TOKEN",
1944
+ base_url="https://yourhost.com/path/to/api",
1945
+ )
1946
+ await client.pipelines.add_files_to_pipeline(
1947
+ pipeline_id="string",
1948
+ request=[],
1949
+ )
1950
+ """
1951
+ _response = await self._client_wrapper.httpx_client.request(
1952
+ "PUT",
1953
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/files"),
1954
+ json=jsonable_encoder(request),
1955
+ headers=self._client_wrapper.get_headers(),
1956
+ timeout=60,
1957
+ )
1958
+ if 200 <= _response.status_code < 300:
1959
+ return pydantic.parse_obj_as(typing.List[PipelineFile], _response.json()) # type: ignore
1960
+ if _response.status_code == 422:
1961
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1962
+ try:
1963
+ _response_json = _response.json()
1964
+ except JSONDecodeError:
1965
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1966
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1967
+
1968
+ async def get_pipeline_file_status(self, pipeline_id: str, file_id: str) -> PipelineFileStatusResponse:
1969
+ """
1970
+ Get status of a file for a pipeline.
1971
+
1972
+ Parameters:
1973
+ - pipeline_id: str.
1974
+
1975
+ - file_id: str.
1976
+ ---
1977
+ from platform.client import AsyncPlatformApi
1978
+
1979
+ client = AsyncPlatformApi(
1980
+ token="YOUR_TOKEN",
1981
+ base_url="https://yourhost.com/path/to/api",
1982
+ )
1983
+ await client.pipelines.get_pipeline_file_status(
1984
+ pipeline_id="string",
1985
+ file_id="string",
1986
+ )
1987
+ """
1988
+ _response = await self._client_wrapper.httpx_client.request(
1989
+ "GET",
1990
+ urllib.parse.urljoin(
1991
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/files/{file_id}/status"
1992
+ ),
1993
+ headers=self._client_wrapper.get_headers(),
1994
+ timeout=60,
1995
+ )
1996
+ if 200 <= _response.status_code < 300:
1997
+ return pydantic.parse_obj_as(PipelineFileStatusResponse, _response.json()) # type: ignore
1998
+ if _response.status_code == 422:
1999
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2000
+ try:
2001
+ _response_json = _response.json()
2002
+ except JSONDecodeError:
2003
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2004
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2005
+
2006
+ async def update_pipeline_file(
2007
+ self,
2008
+ pipeline_id: str,
2009
+ file_id: str,
2010
+ *,
2011
+ custom_metadata: typing.Optional[typing.Dict[str, PipelineFileUpdateCustomMetadataValue]] = OMIT,
2012
+ ) -> PipelineFile:
2013
+ """
2014
+ Update a file for a pipeline.
2015
+
2016
+ Parameters:
2017
+ - pipeline_id: str.
2018
+
2019
+ - file_id: str.
2020
+
2021
+ - custom_metadata: typing.Optional[typing.Dict[str, PipelineFileUpdateCustomMetadataValue]]. Custom metadata for the file
2022
+ ---
2023
+ from platform.client import AsyncPlatformApi
2024
+
2025
+ client = AsyncPlatformApi(
2026
+ token="YOUR_TOKEN",
2027
+ base_url="https://yourhost.com/path/to/api",
2028
+ )
2029
+ await client.pipelines.update_pipeline_file(
2030
+ pipeline_id="string",
2031
+ file_id="string",
2032
+ )
2033
+ """
2034
+ _request: typing.Dict[str, typing.Any] = {}
2035
+ if custom_metadata is not OMIT:
2036
+ _request["custom_metadata"] = custom_metadata
2037
+ _response = await self._client_wrapper.httpx_client.request(
2038
+ "PUT",
2039
+ urllib.parse.urljoin(
2040
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/files/{file_id}"
2041
+ ),
2042
+ json=jsonable_encoder(_request),
2043
+ headers=self._client_wrapper.get_headers(),
2044
+ timeout=60,
2045
+ )
2046
+ if 200 <= _response.status_code < 300:
2047
+ return pydantic.parse_obj_as(PipelineFile, _response.json()) # type: ignore
2048
+ if _response.status_code == 422:
2049
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2050
+ try:
2051
+ _response_json = _response.json()
2052
+ except JSONDecodeError:
2053
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2054
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2055
+
2056
+ async def delete_pipeline_file(self, pipeline_id: str, file_id: str) -> None:
2057
+ """
2058
+ Delete a file from a pipeline.
2059
+
2060
+ Parameters:
2061
+ - pipeline_id: str.
2062
+
2063
+ - file_id: str.
2064
+ ---
2065
+ from platform.client import AsyncPlatformApi
2066
+
2067
+ client = AsyncPlatformApi(
2068
+ token="YOUR_TOKEN",
2069
+ base_url="https://yourhost.com/path/to/api",
2070
+ )
2071
+ await client.pipelines.delete_pipeline_file(
2072
+ pipeline_id="string",
2073
+ file_id="string",
2074
+ )
2075
+ """
2076
+ _response = await self._client_wrapper.httpx_client.request(
2077
+ "DELETE",
2078
+ urllib.parse.urljoin(
2079
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/files/{file_id}"
2080
+ ),
2081
+ headers=self._client_wrapper.get_headers(),
2082
+ timeout=60,
2083
+ )
2084
+ if 200 <= _response.status_code < 300:
2085
+ return
2086
+ if _response.status_code == 422:
2087
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2088
+ try:
2089
+ _response_json = _response.json()
2090
+ except JSONDecodeError:
2091
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2092
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2093
+
2094
+ async def get_pipeline_data_sources(self, pipeline_id: str) -> typing.List[PipelineDataSource]:
2095
+ """
2096
+ Get data sources for a pipeline.
2097
+
2098
+ Parameters:
2099
+ - pipeline_id: str.
2100
+ ---
2101
+ from platform.client import AsyncPlatformApi
2102
+
2103
+ client = AsyncPlatformApi(
2104
+ token="YOUR_TOKEN",
2105
+ base_url="https://yourhost.com/path/to/api",
2106
+ )
2107
+ await client.pipelines.get_pipeline_data_sources(
2108
+ pipeline_id="string",
2109
+ )
2110
+ """
2111
+ _response = await self._client_wrapper.httpx_client.request(
2112
+ "GET",
2113
+ urllib.parse.urljoin(
2114
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/data-sources"
2115
+ ),
2116
+ headers=self._client_wrapper.get_headers(),
2117
+ timeout=60,
2118
+ )
2119
+ if 200 <= _response.status_code < 300:
2120
+ return pydantic.parse_obj_as(typing.List[PipelineDataSource], _response.json()) # type: ignore
2121
+ if _response.status_code == 422:
2122
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2123
+ try:
2124
+ _response_json = _response.json()
2125
+ except JSONDecodeError:
2126
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2127
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2128
+
2129
+ async def add_data_sources_to_pipeline(
2130
+ self, pipeline_id: str, *, request: typing.List[PipelineDataSourceCreate]
2131
+ ) -> typing.List[PipelineDataSource]:
2132
+ """
2133
+ Add data sources to a pipeline.
2134
+
2135
+ Parameters:
2136
+ - pipeline_id: str.
2137
+
2138
+ - request: typing.List[PipelineDataSourceCreate].
2139
+ ---
2140
+ from platform.client import AsyncPlatformApi
2141
+
2142
+ client = AsyncPlatformApi(
2143
+ token="YOUR_TOKEN",
2144
+ base_url="https://yourhost.com/path/to/api",
2145
+ )
2146
+ await client.pipelines.add_data_sources_to_pipeline(
2147
+ pipeline_id="string",
2148
+ request=[],
2149
+ )
2150
+ """
2151
+ _response = await self._client_wrapper.httpx_client.request(
2152
+ "PUT",
2153
+ urllib.parse.urljoin(
2154
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/data-sources"
2155
+ ),
2156
+ json=jsonable_encoder(request),
2157
+ headers=self._client_wrapper.get_headers(),
2158
+ timeout=60,
2159
+ )
2160
+ if 200 <= _response.status_code < 300:
2161
+ return pydantic.parse_obj_as(typing.List[PipelineDataSource], _response.json()) # type: ignore
2162
+ if _response.status_code == 422:
2163
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2164
+ try:
2165
+ _response_json = _response.json()
2166
+ except JSONDecodeError:
2167
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2168
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2169
+
2170
+ async def delete_pipeline_data_source(self, pipeline_id: str, data_source_id: str) -> None:
2171
+ """
2172
+ Delete a data source from a pipeline.
2173
+
2174
+ Parameters:
2175
+ - pipeline_id: str.
2176
+
2177
+ - data_source_id: str.
2178
+ ---
2179
+ from platform.client import AsyncPlatformApi
2180
+
2181
+ client = AsyncPlatformApi(
2182
+ token="YOUR_TOKEN",
2183
+ base_url="https://yourhost.com/path/to/api",
2184
+ )
2185
+ await client.pipelines.delete_pipeline_data_source(
2186
+ pipeline_id="string",
2187
+ data_source_id="string",
2188
+ )
2189
+ """
2190
+ _response = await self._client_wrapper.httpx_client.request(
2191
+ "DELETE",
2192
+ urllib.parse.urljoin(
2193
+ f"{self._client_wrapper.get_base_url()}/",
2194
+ f"api/v1/pipelines/{pipeline_id}/data-sources/{data_source_id}",
2195
+ ),
2196
+ headers=self._client_wrapper.get_headers(),
2197
+ timeout=60,
2198
+ )
2199
+ if 200 <= _response.status_code < 300:
2200
+ return
2201
+ if _response.status_code == 422:
2202
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2203
+ try:
2204
+ _response_json = _response.json()
2205
+ except JSONDecodeError:
2206
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2207
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2208
+
2209
+ async def sync_pipeline_data_source(self, pipeline_id: str, data_source_id: str) -> Pipeline:
2210
+ """
2211
+ Run ingestion for the pipeline data source by incrementally updating the data-sink with upstream changes from data-source.
2212
+
2213
+ Parameters:
2214
+ - pipeline_id: str.
2215
+
2216
+ - data_source_id: str.
2217
+ ---
2218
+ from platform.client import AsyncPlatformApi
2219
+
2220
+ client = AsyncPlatformApi(
2221
+ token="YOUR_TOKEN",
2222
+ base_url="https://yourhost.com/path/to/api",
2223
+ )
2224
+ await client.pipelines.sync_pipeline_data_source(
2225
+ pipeline_id="string",
2226
+ data_source_id="string",
2227
+ )
2228
+ """
2229
+ _response = await self._client_wrapper.httpx_client.request(
2230
+ "POST",
2231
+ urllib.parse.urljoin(
2232
+ f"{self._client_wrapper.get_base_url()}/",
2233
+ f"api/v1/pipelines/{pipeline_id}/data-sources/{data_source_id}/sync",
2234
+ ),
2235
+ headers=self._client_wrapper.get_headers(),
2236
+ timeout=60,
2237
+ )
2238
+ if 200 <= _response.status_code < 300:
2239
+ return pydantic.parse_obj_as(Pipeline, _response.json()) # type: ignore
2240
+ if _response.status_code == 422:
2241
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2242
+ try:
2243
+ _response_json = _response.json()
2244
+ except JSONDecodeError:
2245
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2246
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2247
+
2248
+ async def run_search(
2249
+ self,
2250
+ pipeline_id: str,
2251
+ *,
2252
+ dense_similarity_top_k: typing.Optional[int] = OMIT,
2253
+ sparse_similarity_top_k: typing.Optional[int] = OMIT,
2254
+ enable_reranking: typing.Optional[bool] = OMIT,
2255
+ rerank_top_n: typing.Optional[int] = OMIT,
2256
+ alpha: typing.Optional[float] = OMIT,
2257
+ search_filters: typing.Optional[MetadataFilters] = OMIT,
2258
+ query: str,
2259
+ ) -> RetrieveResults:
2260
+ """
2261
+ Get retrieval results for a managed pipeline and a query
2262
+
2263
+ Parameters:
2264
+ - pipeline_id: str.
2265
+
2266
+ - dense_similarity_top_k: typing.Optional[int]. Number of nodes for dense retrieval.
2267
+
2268
+ - sparse_similarity_top_k: typing.Optional[int]. Number of nodes for sparse retrieval.
2269
+
2270
+ - enable_reranking: typing.Optional[bool]. Enable reranking for retrieval
2271
+
2272
+ - rerank_top_n: typing.Optional[int]. Number of reranked nodes for returning.
2273
+
2274
+ - alpha: typing.Optional[float]. Alpha value for hybrid retrieval to determine the weights between dense and sparse retrieval. 0 is sparse retrieval and 1 is dense retrieval.
2275
+
2276
+ - search_filters: typing.Optional[MetadataFilters]. Search filters for retrieval.
2277
+
2278
+ - query: str. The query to retrieve against.
2279
+ ---
2280
+ from platform import FilterCondition, MetadataFilters
2281
+ from platform.client import AsyncPlatformApi
2282
+
2283
+ client = AsyncPlatformApi(
2284
+ token="YOUR_TOKEN",
2285
+ base_url="https://yourhost.com/path/to/api",
2286
+ )
2287
+ await client.pipelines.run_search(
2288
+ pipeline_id="string",
2289
+ search_filters=MetadataFilters(
2290
+ filters=[],
2291
+ condition=FilterCondition.AND,
2292
+ ),
2293
+ query="string",
2294
+ )
2295
+ """
2296
+ _request: typing.Dict[str, typing.Any] = {"query": query}
2297
+ if dense_similarity_top_k is not OMIT:
2298
+ _request["dense_similarity_top_k"] = dense_similarity_top_k
2299
+ if sparse_similarity_top_k is not OMIT:
2300
+ _request["sparse_similarity_top_k"] = sparse_similarity_top_k
2301
+ if enable_reranking is not OMIT:
2302
+ _request["enable_reranking"] = enable_reranking
2303
+ if rerank_top_n is not OMIT:
2304
+ _request["rerank_top_n"] = rerank_top_n
2305
+ if alpha is not OMIT:
2306
+ _request["alpha"] = alpha
2307
+ if search_filters is not OMIT:
2308
+ _request["search_filters"] = search_filters
2309
+ _response = await self._client_wrapper.httpx_client.request(
2310
+ "POST",
2311
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/retrieve"),
2312
+ json=jsonable_encoder(_request),
2313
+ headers=self._client_wrapper.get_headers(),
2314
+ timeout=60,
2315
+ )
2316
+ if 200 <= _response.status_code < 300:
2317
+ return pydantic.parse_obj_as(RetrieveResults, _response.json()) # type: ignore
2318
+ if _response.status_code == 422:
2319
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2320
+ try:
2321
+ _response_json = _response.json()
2322
+ except JSONDecodeError:
2323
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2324
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2325
+
2326
+ async def get_pipeline_jobs(self, pipeline_id: str) -> typing.List[PipelineDeployment]:
2327
+ """
2328
+ Get jobs for a pipeline.
2329
+
2330
+ Parameters:
2331
+ - pipeline_id: str.
2332
+ ---
2333
+ from platform.client import AsyncPlatformApi
2334
+
2335
+ client = AsyncPlatformApi(
2336
+ token="YOUR_TOKEN",
2337
+ base_url="https://yourhost.com/path/to/api",
2338
+ )
2339
+ await client.pipelines.get_pipeline_jobs(
2340
+ pipeline_id="string",
2341
+ )
2342
+ """
2343
+ _response = await self._client_wrapper.httpx_client.request(
2344
+ "GET",
2345
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/jobs"),
2346
+ headers=self._client_wrapper.get_headers(),
2347
+ timeout=60,
2348
+ )
2349
+ if 200 <= _response.status_code < 300:
2350
+ return pydantic.parse_obj_as(typing.List[PipelineDeployment], _response.json()) # type: ignore
2351
+ if _response.status_code == 422:
2352
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2353
+ try:
2354
+ _response_json = _response.json()
2355
+ except JSONDecodeError:
2356
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2357
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2358
+
2359
+ async def get_pipeline_job(self, pipeline_id: str, job_id: str) -> PipelineDeployment:
2360
+ """
2361
+ Get a job for a pipeline.
2362
+
2363
+ Parameters:
2364
+ - pipeline_id: str.
2365
+
2366
+ - job_id: str.
2367
+ ---
2368
+ from platform.client import AsyncPlatformApi
2369
+
2370
+ client = AsyncPlatformApi(
2371
+ token="YOUR_TOKEN",
2372
+ base_url="https://yourhost.com/path/to/api",
2373
+ )
2374
+ await client.pipelines.get_pipeline_job(
2375
+ pipeline_id="string",
2376
+ job_id="string",
2377
+ )
2378
+ """
2379
+ _response = await self._client_wrapper.httpx_client.request(
2380
+ "GET",
2381
+ urllib.parse.urljoin(
2382
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/jobs/{job_id}"
2383
+ ),
2384
+ headers=self._client_wrapper.get_headers(),
2385
+ timeout=60,
2386
+ )
2387
+ if 200 <= _response.status_code < 300:
2388
+ return pydantic.parse_obj_as(PipelineDeployment, _response.json()) # type: ignore
2389
+ if _response.status_code == 422:
2390
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2391
+ try:
2392
+ _response_json = _response.json()
2393
+ except JSONDecodeError:
2394
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2395
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2396
+
2397
+ async def list_pipeline_documents(
2398
+ self, pipeline_id: str, *, skip: typing.Optional[int] = None, limit: typing.Optional[int] = None
2399
+ ) -> typing.List[CloudDocument]:
2400
+ """
2401
+ Return a list of documents for a pipeline.
2402
+
2403
+ Parameters:
2404
+ - pipeline_id: str.
2405
+
2406
+ - skip: typing.Optional[int].
2407
+
2408
+ - limit: typing.Optional[int].
2409
+ ---
2410
+ from platform.client import AsyncPlatformApi
2411
+
2412
+ client = AsyncPlatformApi(
2413
+ token="YOUR_TOKEN",
2414
+ base_url="https://yourhost.com/path/to/api",
2415
+ )
2416
+ await client.pipelines.list_pipeline_documents(
2417
+ pipeline_id="string",
2418
+ )
2419
+ """
2420
+ _response = await self._client_wrapper.httpx_client.request(
2421
+ "GET",
2422
+ urllib.parse.urljoin(
2423
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/documents"
2424
+ ),
2425
+ params=remove_none_from_dict({"skip": skip, "limit": limit}),
2426
+ headers=self._client_wrapper.get_headers(),
2427
+ timeout=60,
2428
+ )
2429
+ if 200 <= _response.status_code < 300:
2430
+ return pydantic.parse_obj_as(typing.List[CloudDocument], _response.json()) # type: ignore
2431
+ if _response.status_code == 422:
2432
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2433
+ try:
2434
+ _response_json = _response.json()
2435
+ except JSONDecodeError:
2436
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2437
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2438
+
2439
+ async def create_pipeline_document(self, pipeline_id: str, *, request: CloudDocumentCreate) -> CloudDocument:
2440
+ """
2441
+ Create a new document for a pipeline.
2442
+
2443
+ Parameters:
2444
+ - pipeline_id: str.
2445
+
2446
+ - request: CloudDocumentCreate.
2447
+ ---
2448
+ from platform import CloudDocumentCreate
2449
+ from platform.client import AsyncPlatformApi
2450
+
2451
+ client = AsyncPlatformApi(
2452
+ token="YOUR_TOKEN",
2453
+ base_url="https://yourhost.com/path/to/api",
2454
+ )
2455
+ await client.pipelines.create_pipeline_document(
2456
+ pipeline_id="string",
2457
+ request=CloudDocumentCreate(
2458
+ text="string",
2459
+ metadata={"string": {}},
2460
+ ),
2461
+ )
2462
+ """
2463
+ _response = await self._client_wrapper.httpx_client.request(
2464
+ "POST",
2465
+ urllib.parse.urljoin(
2466
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/documents"
2467
+ ),
2468
+ json=jsonable_encoder(request),
2469
+ headers=self._client_wrapper.get_headers(),
2470
+ timeout=60,
2471
+ )
2472
+ if 200 <= _response.status_code < 300:
2473
+ return pydantic.parse_obj_as(CloudDocument, _response.json()) # type: ignore
2474
+ if _response.status_code == 422:
2475
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2476
+ try:
2477
+ _response_json = _response.json()
2478
+ except JSONDecodeError:
2479
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2480
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2481
+
2482
+ async def upsert_pipeline_document(self, pipeline_id: str, *, request: CloudDocumentCreate) -> CloudDocument:
2483
+ """
2484
+ Create or update a document for a pipeline.
2485
+
2486
+ Parameters:
2487
+ - pipeline_id: str.
2488
+
2489
+ - request: CloudDocumentCreate.
2490
+ ---
2491
+ from platform import CloudDocumentCreate
2492
+ from platform.client import AsyncPlatformApi
2493
+
2494
+ client = AsyncPlatformApi(
2495
+ token="YOUR_TOKEN",
2496
+ base_url="https://yourhost.com/path/to/api",
2497
+ )
2498
+ await client.pipelines.upsert_pipeline_document(
2499
+ pipeline_id="string",
2500
+ request=CloudDocumentCreate(
2501
+ text="string",
2502
+ metadata={"string": {}},
2503
+ ),
2504
+ )
2505
+ """
2506
+ _response = await self._client_wrapper.httpx_client.request(
2507
+ "PUT",
2508
+ urllib.parse.urljoin(
2509
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/documents"
2510
+ ),
2511
+ json=jsonable_encoder(request),
2512
+ headers=self._client_wrapper.get_headers(),
2513
+ timeout=60,
2514
+ )
2515
+ if 200 <= _response.status_code < 300:
2516
+ return pydantic.parse_obj_as(CloudDocument, _response.json()) # type: ignore
2517
+ if _response.status_code == 422:
2518
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2519
+ try:
2520
+ _response_json = _response.json()
2521
+ except JSONDecodeError:
2522
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2523
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2524
+
2525
+ async def get_pipeline_document(self, pipeline_id: str, document_id: str) -> CloudDocument:
2526
+ """
2527
+ Return a single document for a pipeline.
2528
+
2529
+ Parameters:
2530
+ - pipeline_id: str.
2531
+
2532
+ - document_id: str.
2533
+ ---
2534
+ from platform.client import AsyncPlatformApi
2535
+
2536
+ client = AsyncPlatformApi(
2537
+ token="YOUR_TOKEN",
2538
+ base_url="https://yourhost.com/path/to/api",
2539
+ )
2540
+ await client.pipelines.get_pipeline_document(
2541
+ pipeline_id="string",
2542
+ document_id="string",
2543
+ )
2544
+ """
2545
+ _response = await self._client_wrapper.httpx_client.request(
2546
+ "GET",
2547
+ urllib.parse.urljoin(
2548
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/documents/{document_id}"
2549
+ ),
2550
+ headers=self._client_wrapper.get_headers(),
2551
+ timeout=60,
2552
+ )
2553
+ if 200 <= _response.status_code < 300:
2554
+ return pydantic.parse_obj_as(CloudDocument, _response.json()) # type: ignore
2555
+ if _response.status_code == 422:
2556
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2557
+ try:
2558
+ _response_json = _response.json()
2559
+ except JSONDecodeError:
2560
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2561
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2562
+
2563
+ async def delete_pipeline_document(self, pipeline_id: str, document_id: str) -> None:
2564
+ """
2565
+ Delete a document for a pipeline.
2566
+
2567
+ Parameters:
2568
+ - pipeline_id: str.
2569
+
2570
+ - document_id: str.
2571
+ ---
2572
+ from platform.client import AsyncPlatformApi
2573
+
2574
+ client = AsyncPlatformApi(
2575
+ token="YOUR_TOKEN",
2576
+ base_url="https://yourhost.com/path/to/api",
2577
+ )
2578
+ await client.pipelines.delete_pipeline_document(
2579
+ pipeline_id="string",
2580
+ document_id="string",
2581
+ )
2582
+ """
2583
+ _response = await self._client_wrapper.httpx_client.request(
2584
+ "DELETE",
2585
+ urllib.parse.urljoin(
2586
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/pipelines/{pipeline_id}/documents/{document_id}"
2587
+ ),
2588
+ headers=self._client_wrapper.get_headers(),
2589
+ timeout=60,
2590
+ )
2591
+ if 200 <= _response.status_code < 300:
2592
+ return
2593
+ if _response.status_code == 422:
2594
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2595
+ try:
2596
+ _response_json = _response.json()
2597
+ except JSONDecodeError:
2598
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2599
+ raise ApiError(status_code=_response.status_code, body=_response_json)