llama-cloud 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (173) hide show
  1. llama_cloud/__init__.py +295 -0
  2. llama_cloud/client.py +72 -0
  3. llama_cloud/core/__init__.py +17 -0
  4. llama_cloud/core/api_error.py +15 -0
  5. llama_cloud/core/client_wrapper.py +51 -0
  6. llama_cloud/core/datetime_utils.py +28 -0
  7. llama_cloud/core/jsonable_encoder.py +103 -0
  8. llama_cloud/core/remove_none_from_dict.py +11 -0
  9. llama_cloud/errors/__init__.py +5 -0
  10. llama_cloud/errors/unprocessable_entity_error.py +9 -0
  11. llama_cloud/resources/__init__.py +40 -0
  12. llama_cloud/resources/api_keys/__init__.py +2 -0
  13. llama_cloud/resources/api_keys/client.py +302 -0
  14. llama_cloud/resources/billing/__init__.py +2 -0
  15. llama_cloud/resources/billing/client.py +234 -0
  16. llama_cloud/resources/component_definitions/__init__.py +2 -0
  17. llama_cloud/resources/component_definitions/client.py +192 -0
  18. llama_cloud/resources/data_sinks/__init__.py +5 -0
  19. llama_cloud/resources/data_sinks/client.py +506 -0
  20. llama_cloud/resources/data_sinks/types/__init__.py +6 -0
  21. llama_cloud/resources/data_sinks/types/data_sink_update_component.py +7 -0
  22. llama_cloud/resources/data_sinks/types/data_sink_update_component_one.py +17 -0
  23. llama_cloud/resources/data_sources/__init__.py +5 -0
  24. llama_cloud/resources/data_sources/client.py +521 -0
  25. llama_cloud/resources/data_sources/types/__init__.py +7 -0
  26. llama_cloud/resources/data_sources/types/data_source_update_component.py +7 -0
  27. llama_cloud/resources/data_sources/types/data_source_update_component_one.py +19 -0
  28. llama_cloud/resources/data_sources/types/data_source_update_custom_metadata_value.py +7 -0
  29. llama_cloud/resources/deprecated/__init__.py +2 -0
  30. llama_cloud/resources/deprecated/client.py +982 -0
  31. llama_cloud/resources/evals/__init__.py +2 -0
  32. llama_cloud/resources/evals/client.py +745 -0
  33. llama_cloud/resources/files/__init__.py +5 -0
  34. llama_cloud/resources/files/client.py +560 -0
  35. llama_cloud/resources/files/types/__init__.py +5 -0
  36. llama_cloud/resources/files/types/file_create_resource_info_value.py +5 -0
  37. llama_cloud/resources/parsing/__init__.py +2 -0
  38. llama_cloud/resources/parsing/client.py +982 -0
  39. llama_cloud/resources/pipelines/__init__.py +5 -0
  40. llama_cloud/resources/pipelines/client.py +2599 -0
  41. llama_cloud/resources/pipelines/types/__init__.py +5 -0
  42. llama_cloud/resources/pipelines/types/pipeline_file_update_custom_metadata_value.py +7 -0
  43. llama_cloud/resources/projects/__init__.py +2 -0
  44. llama_cloud/resources/projects/client.py +1231 -0
  45. llama_cloud/types/__init__.py +253 -0
  46. llama_cloud/types/api_key.py +37 -0
  47. llama_cloud/types/azure_open_ai_embedding.py +75 -0
  48. llama_cloud/types/base.py +26 -0
  49. llama_cloud/types/base_prompt_template.py +44 -0
  50. llama_cloud/types/bedrock_embedding.py +56 -0
  51. llama_cloud/types/chat_message.py +35 -0
  52. llama_cloud/types/cloud_az_storage_blob_data_source.py +40 -0
  53. llama_cloud/types/cloud_chroma_vector_store.py +40 -0
  54. llama_cloud/types/cloud_document.py +36 -0
  55. llama_cloud/types/cloud_document_create.py +36 -0
  56. llama_cloud/types/cloud_gcs_data_source.py +37 -0
  57. llama_cloud/types/cloud_google_drive_data_source.py +36 -0
  58. llama_cloud/types/cloud_one_drive_data_source.py +38 -0
  59. llama_cloud/types/cloud_pinecone_vector_store.py +46 -0
  60. llama_cloud/types/cloud_postgres_vector_store.py +44 -0
  61. llama_cloud/types/cloud_qdrant_vector_store.py +48 -0
  62. llama_cloud/types/cloud_s_3_data_source.py +42 -0
  63. llama_cloud/types/cloud_sharepoint_data_source.py +38 -0
  64. llama_cloud/types/cloud_weaviate_vector_store.py +38 -0
  65. llama_cloud/types/code_splitter.py +46 -0
  66. llama_cloud/types/cohere_embedding.py +46 -0
  67. llama_cloud/types/configurable_data_sink_names.py +37 -0
  68. llama_cloud/types/configurable_data_source_names.py +41 -0
  69. llama_cloud/types/configurable_transformation_definition.py +45 -0
  70. llama_cloud/types/configurable_transformation_names.py +73 -0
  71. llama_cloud/types/configured_transformation_item.py +43 -0
  72. llama_cloud/types/configured_transformation_item_component.py +9 -0
  73. llama_cloud/types/configured_transformation_item_component_one.py +35 -0
  74. llama_cloud/types/data_sink.py +40 -0
  75. llama_cloud/types/data_sink_component.py +7 -0
  76. llama_cloud/types/data_sink_component_one.py +17 -0
  77. llama_cloud/types/data_sink_create.py +36 -0
  78. llama_cloud/types/data_sink_create_component.py +7 -0
  79. llama_cloud/types/data_sink_create_component_one.py +17 -0
  80. llama_cloud/types/data_sink_definition.py +41 -0
  81. llama_cloud/types/data_source.py +44 -0
  82. llama_cloud/types/data_source_component.py +7 -0
  83. llama_cloud/types/data_source_component_one.py +19 -0
  84. llama_cloud/types/data_source_create.py +40 -0
  85. llama_cloud/types/data_source_create_component.py +7 -0
  86. llama_cloud/types/data_source_create_component_one.py +19 -0
  87. llama_cloud/types/data_source_create_custom_metadata_value.py +7 -0
  88. llama_cloud/types/data_source_custom_metadata_value.py +7 -0
  89. llama_cloud/types/data_source_definition.py +41 -0
  90. llama_cloud/types/eval_dataset.py +37 -0
  91. llama_cloud/types/eval_dataset_job_params.py +36 -0
  92. llama_cloud/types/eval_dataset_job_record.py +59 -0
  93. llama_cloud/types/eval_execution_params.py +38 -0
  94. llama_cloud/types/eval_execution_params_override.py +38 -0
  95. llama_cloud/types/eval_llm_model_data.py +33 -0
  96. llama_cloud/types/eval_question.py +39 -0
  97. llama_cloud/types/eval_question_create.py +28 -0
  98. llama_cloud/types/eval_question_result.py +49 -0
  99. llama_cloud/types/file.py +46 -0
  100. llama_cloud/types/file_resource_info_value.py +5 -0
  101. llama_cloud/types/filter_condition.py +21 -0
  102. llama_cloud/types/filter_operator.py +65 -0
  103. llama_cloud/types/gemini_embedding.py +51 -0
  104. llama_cloud/types/html_node_parser.py +44 -0
  105. llama_cloud/types/http_validation_error.py +29 -0
  106. llama_cloud/types/hugging_face_inference_api_embedding.py +68 -0
  107. llama_cloud/types/hugging_face_inference_api_embedding_token.py +5 -0
  108. llama_cloud/types/json_node_parser.py +43 -0
  109. llama_cloud/types/llama_parse_supported_file_extensions.py +161 -0
  110. llama_cloud/types/llm.py +55 -0
  111. llama_cloud/types/local_eval.py +46 -0
  112. llama_cloud/types/local_eval_results.py +37 -0
  113. llama_cloud/types/local_eval_sets.py +30 -0
  114. llama_cloud/types/managed_ingestion_status.py +37 -0
  115. llama_cloud/types/markdown_element_node_parser.py +49 -0
  116. llama_cloud/types/markdown_node_parser.py +43 -0
  117. llama_cloud/types/message_role.py +45 -0
  118. llama_cloud/types/metadata_filter.py +41 -0
  119. llama_cloud/types/metadata_filter_value.py +5 -0
  120. llama_cloud/types/metadata_filters.py +41 -0
  121. llama_cloud/types/metadata_filters_filters_item.py +8 -0
  122. llama_cloud/types/metric_result.py +30 -0
  123. llama_cloud/types/node_parser.py +37 -0
  124. llama_cloud/types/object_type.py +33 -0
  125. llama_cloud/types/open_ai_embedding.py +73 -0
  126. llama_cloud/types/parser_languages.py +361 -0
  127. llama_cloud/types/parsing_history_item.py +36 -0
  128. llama_cloud/types/parsing_job.py +30 -0
  129. llama_cloud/types/parsing_job_json_result.py +29 -0
  130. llama_cloud/types/parsing_job_markdown_result.py +29 -0
  131. llama_cloud/types/parsing_job_text_result.py +29 -0
  132. llama_cloud/types/parsing_usage.py +29 -0
  133. llama_cloud/types/pipeline.py +64 -0
  134. llama_cloud/types/pipeline_create.py +61 -0
  135. llama_cloud/types/pipeline_data_source.py +46 -0
  136. llama_cloud/types/pipeline_data_source_component.py +7 -0
  137. llama_cloud/types/pipeline_data_source_component_one.py +19 -0
  138. llama_cloud/types/pipeline_data_source_create.py +32 -0
  139. llama_cloud/types/pipeline_data_source_custom_metadata_value.py +7 -0
  140. llama_cloud/types/pipeline_deployment.py +38 -0
  141. llama_cloud/types/pipeline_file.py +52 -0
  142. llama_cloud/types/pipeline_file_create.py +36 -0
  143. llama_cloud/types/pipeline_file_create_custom_metadata_value.py +7 -0
  144. llama_cloud/types/pipeline_file_custom_metadata_value.py +7 -0
  145. llama_cloud/types/pipeline_file_resource_info_value.py +7 -0
  146. llama_cloud/types/pipeline_file_status_response.py +35 -0
  147. llama_cloud/types/pipeline_type.py +21 -0
  148. llama_cloud/types/pooling.py +29 -0
  149. llama_cloud/types/preset_retrieval_params.py +40 -0
  150. llama_cloud/types/presigned_url.py +36 -0
  151. llama_cloud/types/project.py +42 -0
  152. llama_cloud/types/project_create.py +32 -0
  153. llama_cloud/types/prompt_mixin_prompts.py +36 -0
  154. llama_cloud/types/prompt_spec.py +35 -0
  155. llama_cloud/types/pydantic_program_mode.py +41 -0
  156. llama_cloud/types/related_node_info.py +37 -0
  157. llama_cloud/types/retrieve_results.py +40 -0
  158. llama_cloud/types/sentence_splitter.py +48 -0
  159. llama_cloud/types/simple_file_node_parser.py +44 -0
  160. llama_cloud/types/status_enum.py +33 -0
  161. llama_cloud/types/supported_eval_llm_model.py +35 -0
  162. llama_cloud/types/supported_eval_llm_model_names.py +29 -0
  163. llama_cloud/types/text_node.py +62 -0
  164. llama_cloud/types/text_node_relationships_value.py +7 -0
  165. llama_cloud/types/text_node_with_score.py +36 -0
  166. llama_cloud/types/token_text_splitter.py +43 -0
  167. llama_cloud/types/transformation_category_names.py +21 -0
  168. llama_cloud/types/validation_error.py +31 -0
  169. llama_cloud/types/validation_error_loc_item.py +5 -0
  170. llama_cloud-0.0.1.dist-info/LICENSE +21 -0
  171. llama_cloud-0.0.1.dist-info/METADATA +25 -0
  172. llama_cloud-0.0.1.dist-info/RECORD +173 -0
  173. llama_cloud-0.0.1.dist-info/WHEEL +4 -0
@@ -0,0 +1,506 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+ import urllib.parse
5
+ from json.decoder import JSONDecodeError
6
+
7
+ from ...core.api_error import ApiError
8
+ from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
9
+ from ...core.jsonable_encoder import jsonable_encoder
10
+ from ...core.remove_none_from_dict import remove_none_from_dict
11
+ from ...errors.unprocessable_entity_error import UnprocessableEntityError
12
+ from ...types.configurable_data_sink_names import ConfigurableDataSinkNames
13
+ from ...types.data_sink import DataSink
14
+ from ...types.data_sink_create import DataSinkCreate
15
+ from ...types.http_validation_error import HttpValidationError
16
+ from .types.data_sink_update_component import DataSinkUpdateComponent
17
+
18
+ try:
19
+ import pydantic.v1 as pydantic # type: ignore
20
+ except ImportError:
21
+ import pydantic # type: ignore
22
+
23
+ # this is used as the default value for optional parameters
24
+ OMIT = typing.cast(typing.Any, ...)
25
+
26
+
27
+ class DataSinksClient:
28
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
29
+ self._client_wrapper = client_wrapper
30
+
31
+ def list_data_sinks(self, *, project_id: typing.Optional[str] = None) -> typing.List[DataSink]:
32
+ """
33
+ Get all data sinks for a given project.
34
+ If project_id is not provided, uses the default project.
35
+
36
+ Parameters:
37
+ - project_id: typing.Optional[str].
38
+ ---
39
+ from platform.client import PlatformApi
40
+
41
+ client = PlatformApi(
42
+ token="YOUR_TOKEN",
43
+ base_url="https://yourhost.com/path/to/api",
44
+ )
45
+ client.data_sinks.list_data_sinks()
46
+ """
47
+ _response = self._client_wrapper.httpx_client.request(
48
+ "GET",
49
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sinks"),
50
+ params=remove_none_from_dict({"project_id": project_id}),
51
+ headers=self._client_wrapper.get_headers(),
52
+ timeout=60,
53
+ )
54
+ if 200 <= _response.status_code < 300:
55
+ return pydantic.parse_obj_as(typing.List[DataSink], _response.json()) # type: ignore
56
+ if _response.status_code == 422:
57
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
58
+ try:
59
+ _response_json = _response.json()
60
+ except JSONDecodeError:
61
+ raise ApiError(status_code=_response.status_code, body=_response.text)
62
+ raise ApiError(status_code=_response.status_code, body=_response_json)
63
+
64
+ def create_data_sink(self, *, project_id: typing.Optional[str] = None, request: DataSinkCreate) -> DataSink:
65
+ """
66
+ Create a new data sink.
67
+
68
+ Parameters:
69
+ - project_id: typing.Optional[str].
70
+
71
+ - request: DataSinkCreate.
72
+ ---
73
+ from platform import ConfigurableDataSinkNames, DataSinkCreate
74
+ from platform.client import PlatformApi
75
+
76
+ client = PlatformApi(
77
+ token="YOUR_TOKEN",
78
+ base_url="https://yourhost.com/path/to/api",
79
+ )
80
+ client.data_sinks.create_data_sink(
81
+ request=DataSinkCreate(
82
+ name="string",
83
+ sink_type=ConfigurableDataSinkNames.CHROMA,
84
+ ),
85
+ )
86
+ """
87
+ _response = self._client_wrapper.httpx_client.request(
88
+ "POST",
89
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sinks"),
90
+ params=remove_none_from_dict({"project_id": project_id}),
91
+ json=jsonable_encoder(request),
92
+ headers=self._client_wrapper.get_headers(),
93
+ timeout=60,
94
+ )
95
+ if 200 <= _response.status_code < 300:
96
+ return pydantic.parse_obj_as(DataSink, _response.json()) # type: ignore
97
+ if _response.status_code == 422:
98
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
99
+ try:
100
+ _response_json = _response.json()
101
+ except JSONDecodeError:
102
+ raise ApiError(status_code=_response.status_code, body=_response.text)
103
+ raise ApiError(status_code=_response.status_code, body=_response_json)
104
+
105
+ def upsert_data_sink(self, *, project_id: typing.Optional[str] = None, request: DataSinkCreate) -> DataSink:
106
+ """
107
+ Upserts a data sink.
108
+ Updates if a data sink with the same name and project_id already exists. Otherwise, creates a new data sink.
109
+
110
+ Parameters:
111
+ - project_id: typing.Optional[str].
112
+
113
+ - request: DataSinkCreate.
114
+ ---
115
+ from platform import ConfigurableDataSinkNames, DataSinkCreate
116
+ from platform.client import PlatformApi
117
+
118
+ client = PlatformApi(
119
+ token="YOUR_TOKEN",
120
+ base_url="https://yourhost.com/path/to/api",
121
+ )
122
+ client.data_sinks.upsert_data_sink(
123
+ request=DataSinkCreate(
124
+ name="string",
125
+ sink_type=ConfigurableDataSinkNames.CHROMA,
126
+ ),
127
+ )
128
+ """
129
+ _response = self._client_wrapper.httpx_client.request(
130
+ "PUT",
131
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sinks"),
132
+ params=remove_none_from_dict({"project_id": project_id}),
133
+ json=jsonable_encoder(request),
134
+ headers=self._client_wrapper.get_headers(),
135
+ timeout=60,
136
+ )
137
+ if 200 <= _response.status_code < 300:
138
+ return pydantic.parse_obj_as(DataSink, _response.json()) # type: ignore
139
+ if _response.status_code == 422:
140
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
141
+ try:
142
+ _response_json = _response.json()
143
+ except JSONDecodeError:
144
+ raise ApiError(status_code=_response.status_code, body=_response.text)
145
+ raise ApiError(status_code=_response.status_code, body=_response_json)
146
+
147
+ def get_data_sink(self, data_sink_id: str) -> DataSink:
148
+ """
149
+ Get a data sink by ID.
150
+
151
+ Parameters:
152
+ - data_sink_id: str.
153
+ ---
154
+ from platform.client import PlatformApi
155
+
156
+ client = PlatformApi(
157
+ token="YOUR_TOKEN",
158
+ base_url="https://yourhost.com/path/to/api",
159
+ )
160
+ client.data_sinks.get_data_sink(
161
+ data_sink_id="string",
162
+ )
163
+ """
164
+ _response = self._client_wrapper.httpx_client.request(
165
+ "GET",
166
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sinks/{data_sink_id}"),
167
+ headers=self._client_wrapper.get_headers(),
168
+ timeout=60,
169
+ )
170
+ if 200 <= _response.status_code < 300:
171
+ return pydantic.parse_obj_as(DataSink, _response.json()) # type: ignore
172
+ if _response.status_code == 422:
173
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
174
+ try:
175
+ _response_json = _response.json()
176
+ except JSONDecodeError:
177
+ raise ApiError(status_code=_response.status_code, body=_response.text)
178
+ raise ApiError(status_code=_response.status_code, body=_response_json)
179
+
180
+ def update_data_sink(
181
+ self,
182
+ data_sink_id: str,
183
+ *,
184
+ name: typing.Optional[str] = OMIT,
185
+ sink_type: ConfigurableDataSinkNames,
186
+ component: typing.Optional[DataSinkUpdateComponent] = OMIT,
187
+ ) -> DataSink:
188
+ """
189
+ Update a data sink by ID.
190
+
191
+ Parameters:
192
+ - data_sink_id: str.
193
+
194
+ - name: typing.Optional[str]. The name of the data sink.
195
+
196
+ - sink_type: ConfigurableDataSinkNames.
197
+
198
+ - component: typing.Optional[DataSinkUpdateComponent].
199
+ ---
200
+ from platform import ConfigurableDataSinkNames
201
+ from platform.client import PlatformApi
202
+
203
+ client = PlatformApi(
204
+ token="YOUR_TOKEN",
205
+ base_url="https://yourhost.com/path/to/api",
206
+ )
207
+ client.data_sinks.update_data_sink(
208
+ data_sink_id="string",
209
+ sink_type=ConfigurableDataSinkNames.CHROMA,
210
+ )
211
+ """
212
+ _request: typing.Dict[str, typing.Any] = {"sink_type": sink_type}
213
+ if name is not OMIT:
214
+ _request["name"] = name
215
+ if component is not OMIT:
216
+ _request["component"] = component
217
+ _response = self._client_wrapper.httpx_client.request(
218
+ "PUT",
219
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sinks/{data_sink_id}"),
220
+ json=jsonable_encoder(_request),
221
+ headers=self._client_wrapper.get_headers(),
222
+ timeout=60,
223
+ )
224
+ if 200 <= _response.status_code < 300:
225
+ return pydantic.parse_obj_as(DataSink, _response.json()) # type: ignore
226
+ if _response.status_code == 422:
227
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
228
+ try:
229
+ _response_json = _response.json()
230
+ except JSONDecodeError:
231
+ raise ApiError(status_code=_response.status_code, body=_response.text)
232
+ raise ApiError(status_code=_response.status_code, body=_response_json)
233
+
234
+ def delete_data_sink(self, data_sink_id: str) -> None:
235
+ """
236
+ Delete a data sink by ID.
237
+
238
+ Parameters:
239
+ - data_sink_id: str.
240
+ ---
241
+ from platform.client import PlatformApi
242
+
243
+ client = PlatformApi(
244
+ token="YOUR_TOKEN",
245
+ base_url="https://yourhost.com/path/to/api",
246
+ )
247
+ client.data_sinks.delete_data_sink(
248
+ data_sink_id="string",
249
+ )
250
+ """
251
+ _response = self._client_wrapper.httpx_client.request(
252
+ "DELETE",
253
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sinks/{data_sink_id}"),
254
+ headers=self._client_wrapper.get_headers(),
255
+ timeout=60,
256
+ )
257
+ if 200 <= _response.status_code < 300:
258
+ return
259
+ if _response.status_code == 422:
260
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
261
+ try:
262
+ _response_json = _response.json()
263
+ except JSONDecodeError:
264
+ raise ApiError(status_code=_response.status_code, body=_response.text)
265
+ raise ApiError(status_code=_response.status_code, body=_response_json)
266
+
267
+
268
+ class AsyncDataSinksClient:
269
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
270
+ self._client_wrapper = client_wrapper
271
+
272
+ async def list_data_sinks(self, *, project_id: typing.Optional[str] = None) -> typing.List[DataSink]:
273
+ """
274
+ Get all data sinks for a given project.
275
+ If project_id is not provided, uses the default project.
276
+
277
+ Parameters:
278
+ - project_id: typing.Optional[str].
279
+ ---
280
+ from platform.client import AsyncPlatformApi
281
+
282
+ client = AsyncPlatformApi(
283
+ token="YOUR_TOKEN",
284
+ base_url="https://yourhost.com/path/to/api",
285
+ )
286
+ await client.data_sinks.list_data_sinks()
287
+ """
288
+ _response = await self._client_wrapper.httpx_client.request(
289
+ "GET",
290
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sinks"),
291
+ params=remove_none_from_dict({"project_id": project_id}),
292
+ headers=self._client_wrapper.get_headers(),
293
+ timeout=60,
294
+ )
295
+ if 200 <= _response.status_code < 300:
296
+ return pydantic.parse_obj_as(typing.List[DataSink], _response.json()) # type: ignore
297
+ if _response.status_code == 422:
298
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
299
+ try:
300
+ _response_json = _response.json()
301
+ except JSONDecodeError:
302
+ raise ApiError(status_code=_response.status_code, body=_response.text)
303
+ raise ApiError(status_code=_response.status_code, body=_response_json)
304
+
305
+ async def create_data_sink(self, *, project_id: typing.Optional[str] = None, request: DataSinkCreate) -> DataSink:
306
+ """
307
+ Create a new data sink.
308
+
309
+ Parameters:
310
+ - project_id: typing.Optional[str].
311
+
312
+ - request: DataSinkCreate.
313
+ ---
314
+ from platform import ConfigurableDataSinkNames, DataSinkCreate
315
+ from platform.client import AsyncPlatformApi
316
+
317
+ client = AsyncPlatformApi(
318
+ token="YOUR_TOKEN",
319
+ base_url="https://yourhost.com/path/to/api",
320
+ )
321
+ await client.data_sinks.create_data_sink(
322
+ request=DataSinkCreate(
323
+ name="string",
324
+ sink_type=ConfigurableDataSinkNames.CHROMA,
325
+ ),
326
+ )
327
+ """
328
+ _response = await self._client_wrapper.httpx_client.request(
329
+ "POST",
330
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sinks"),
331
+ params=remove_none_from_dict({"project_id": project_id}),
332
+ json=jsonable_encoder(request),
333
+ headers=self._client_wrapper.get_headers(),
334
+ timeout=60,
335
+ )
336
+ if 200 <= _response.status_code < 300:
337
+ return pydantic.parse_obj_as(DataSink, _response.json()) # type: ignore
338
+ if _response.status_code == 422:
339
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
340
+ try:
341
+ _response_json = _response.json()
342
+ except JSONDecodeError:
343
+ raise ApiError(status_code=_response.status_code, body=_response.text)
344
+ raise ApiError(status_code=_response.status_code, body=_response_json)
345
+
346
+ async def upsert_data_sink(self, *, project_id: typing.Optional[str] = None, request: DataSinkCreate) -> DataSink:
347
+ """
348
+ Upserts a data sink.
349
+ Updates if a data sink with the same name and project_id already exists. Otherwise, creates a new data sink.
350
+
351
+ Parameters:
352
+ - project_id: typing.Optional[str].
353
+
354
+ - request: DataSinkCreate.
355
+ ---
356
+ from platform import ConfigurableDataSinkNames, DataSinkCreate
357
+ from platform.client import AsyncPlatformApi
358
+
359
+ client = AsyncPlatformApi(
360
+ token="YOUR_TOKEN",
361
+ base_url="https://yourhost.com/path/to/api",
362
+ )
363
+ await client.data_sinks.upsert_data_sink(
364
+ request=DataSinkCreate(
365
+ name="string",
366
+ sink_type=ConfigurableDataSinkNames.CHROMA,
367
+ ),
368
+ )
369
+ """
370
+ _response = await self._client_wrapper.httpx_client.request(
371
+ "PUT",
372
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sinks"),
373
+ params=remove_none_from_dict({"project_id": project_id}),
374
+ json=jsonable_encoder(request),
375
+ headers=self._client_wrapper.get_headers(),
376
+ timeout=60,
377
+ )
378
+ if 200 <= _response.status_code < 300:
379
+ return pydantic.parse_obj_as(DataSink, _response.json()) # type: ignore
380
+ if _response.status_code == 422:
381
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
382
+ try:
383
+ _response_json = _response.json()
384
+ except JSONDecodeError:
385
+ raise ApiError(status_code=_response.status_code, body=_response.text)
386
+ raise ApiError(status_code=_response.status_code, body=_response_json)
387
+
388
+ async def get_data_sink(self, data_sink_id: str) -> DataSink:
389
+ """
390
+ Get a data sink by ID.
391
+
392
+ Parameters:
393
+ - data_sink_id: str.
394
+ ---
395
+ from platform.client import AsyncPlatformApi
396
+
397
+ client = AsyncPlatformApi(
398
+ token="YOUR_TOKEN",
399
+ base_url="https://yourhost.com/path/to/api",
400
+ )
401
+ await client.data_sinks.get_data_sink(
402
+ data_sink_id="string",
403
+ )
404
+ """
405
+ _response = await self._client_wrapper.httpx_client.request(
406
+ "GET",
407
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sinks/{data_sink_id}"),
408
+ headers=self._client_wrapper.get_headers(),
409
+ timeout=60,
410
+ )
411
+ if 200 <= _response.status_code < 300:
412
+ return pydantic.parse_obj_as(DataSink, _response.json()) # type: ignore
413
+ if _response.status_code == 422:
414
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
415
+ try:
416
+ _response_json = _response.json()
417
+ except JSONDecodeError:
418
+ raise ApiError(status_code=_response.status_code, body=_response.text)
419
+ raise ApiError(status_code=_response.status_code, body=_response_json)
420
+
421
+ async def update_data_sink(
422
+ self,
423
+ data_sink_id: str,
424
+ *,
425
+ name: typing.Optional[str] = OMIT,
426
+ sink_type: ConfigurableDataSinkNames,
427
+ component: typing.Optional[DataSinkUpdateComponent] = OMIT,
428
+ ) -> DataSink:
429
+ """
430
+ Update a data sink by ID.
431
+
432
+ Parameters:
433
+ - data_sink_id: str.
434
+
435
+ - name: typing.Optional[str]. The name of the data sink.
436
+
437
+ - sink_type: ConfigurableDataSinkNames.
438
+
439
+ - component: typing.Optional[DataSinkUpdateComponent].
440
+ ---
441
+ from platform import ConfigurableDataSinkNames
442
+ from platform.client import AsyncPlatformApi
443
+
444
+ client = AsyncPlatformApi(
445
+ token="YOUR_TOKEN",
446
+ base_url="https://yourhost.com/path/to/api",
447
+ )
448
+ await client.data_sinks.update_data_sink(
449
+ data_sink_id="string",
450
+ sink_type=ConfigurableDataSinkNames.CHROMA,
451
+ )
452
+ """
453
+ _request: typing.Dict[str, typing.Any] = {"sink_type": sink_type}
454
+ if name is not OMIT:
455
+ _request["name"] = name
456
+ if component is not OMIT:
457
+ _request["component"] = component
458
+ _response = await self._client_wrapper.httpx_client.request(
459
+ "PUT",
460
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sinks/{data_sink_id}"),
461
+ json=jsonable_encoder(_request),
462
+ headers=self._client_wrapper.get_headers(),
463
+ timeout=60,
464
+ )
465
+ if 200 <= _response.status_code < 300:
466
+ return pydantic.parse_obj_as(DataSink, _response.json()) # type: ignore
467
+ if _response.status_code == 422:
468
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
469
+ try:
470
+ _response_json = _response.json()
471
+ except JSONDecodeError:
472
+ raise ApiError(status_code=_response.status_code, body=_response.text)
473
+ raise ApiError(status_code=_response.status_code, body=_response_json)
474
+
475
+ async def delete_data_sink(self, data_sink_id: str) -> None:
476
+ """
477
+ Delete a data sink by ID.
478
+
479
+ Parameters:
480
+ - data_sink_id: str.
481
+ ---
482
+ from platform.client import AsyncPlatformApi
483
+
484
+ client = AsyncPlatformApi(
485
+ token="YOUR_TOKEN",
486
+ base_url="https://yourhost.com/path/to/api",
487
+ )
488
+ await client.data_sinks.delete_data_sink(
489
+ data_sink_id="string",
490
+ )
491
+ """
492
+ _response = await self._client_wrapper.httpx_client.request(
493
+ "DELETE",
494
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sinks/{data_sink_id}"),
495
+ headers=self._client_wrapper.get_headers(),
496
+ timeout=60,
497
+ )
498
+ if 200 <= _response.status_code < 300:
499
+ return
500
+ if _response.status_code == 422:
501
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
502
+ try:
503
+ _response_json = _response.json()
504
+ except JSONDecodeError:
505
+ raise ApiError(status_code=_response.status_code, body=_response.text)
506
+ raise ApiError(status_code=_response.status_code, body=_response_json)
@@ -0,0 +1,6 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .data_sink_update_component import DataSinkUpdateComponent
4
+ from .data_sink_update_component_one import DataSinkUpdateComponentOne
5
+
6
+ __all__ = ["DataSinkUpdateComponent", "DataSinkUpdateComponentOne"]
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from .data_sink_update_component_one import DataSinkUpdateComponentOne
6
+
7
+ DataSinkUpdateComponent = typing.Union[typing.Dict[str, typing.Any], DataSinkUpdateComponentOne]
@@ -0,0 +1,17 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from ....types.cloud_chroma_vector_store import CloudChromaVectorStore
6
+ from ....types.cloud_pinecone_vector_store import CloudPineconeVectorStore
7
+ from ....types.cloud_postgres_vector_store import CloudPostgresVectorStore
8
+ from ....types.cloud_qdrant_vector_store import CloudQdrantVectorStore
9
+ from ....types.cloud_weaviate_vector_store import CloudWeaviateVectorStore
10
+
11
+ DataSinkUpdateComponentOne = typing.Union[
12
+ CloudChromaVectorStore,
13
+ CloudPineconeVectorStore,
14
+ CloudPostgresVectorStore,
15
+ CloudQdrantVectorStore,
16
+ CloudWeaviateVectorStore,
17
+ ]
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .types import DataSourceUpdateComponent, DataSourceUpdateComponentOne, DataSourceUpdateCustomMetadataValue
4
+
5
+ __all__ = ["DataSourceUpdateComponent", "DataSourceUpdateComponentOne", "DataSourceUpdateCustomMetadataValue"]