llama-cloud 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (173) hide show
  1. llama_cloud/__init__.py +295 -0
  2. llama_cloud/client.py +72 -0
  3. llama_cloud/core/__init__.py +17 -0
  4. llama_cloud/core/api_error.py +15 -0
  5. llama_cloud/core/client_wrapper.py +51 -0
  6. llama_cloud/core/datetime_utils.py +28 -0
  7. llama_cloud/core/jsonable_encoder.py +103 -0
  8. llama_cloud/core/remove_none_from_dict.py +11 -0
  9. llama_cloud/errors/__init__.py +5 -0
  10. llama_cloud/errors/unprocessable_entity_error.py +9 -0
  11. llama_cloud/resources/__init__.py +40 -0
  12. llama_cloud/resources/api_keys/__init__.py +2 -0
  13. llama_cloud/resources/api_keys/client.py +302 -0
  14. llama_cloud/resources/billing/__init__.py +2 -0
  15. llama_cloud/resources/billing/client.py +234 -0
  16. llama_cloud/resources/component_definitions/__init__.py +2 -0
  17. llama_cloud/resources/component_definitions/client.py +192 -0
  18. llama_cloud/resources/data_sinks/__init__.py +5 -0
  19. llama_cloud/resources/data_sinks/client.py +506 -0
  20. llama_cloud/resources/data_sinks/types/__init__.py +6 -0
  21. llama_cloud/resources/data_sinks/types/data_sink_update_component.py +7 -0
  22. llama_cloud/resources/data_sinks/types/data_sink_update_component_one.py +17 -0
  23. llama_cloud/resources/data_sources/__init__.py +5 -0
  24. llama_cloud/resources/data_sources/client.py +521 -0
  25. llama_cloud/resources/data_sources/types/__init__.py +7 -0
  26. llama_cloud/resources/data_sources/types/data_source_update_component.py +7 -0
  27. llama_cloud/resources/data_sources/types/data_source_update_component_one.py +19 -0
  28. llama_cloud/resources/data_sources/types/data_source_update_custom_metadata_value.py +7 -0
  29. llama_cloud/resources/deprecated/__init__.py +2 -0
  30. llama_cloud/resources/deprecated/client.py +982 -0
  31. llama_cloud/resources/evals/__init__.py +2 -0
  32. llama_cloud/resources/evals/client.py +745 -0
  33. llama_cloud/resources/files/__init__.py +5 -0
  34. llama_cloud/resources/files/client.py +560 -0
  35. llama_cloud/resources/files/types/__init__.py +5 -0
  36. llama_cloud/resources/files/types/file_create_resource_info_value.py +5 -0
  37. llama_cloud/resources/parsing/__init__.py +2 -0
  38. llama_cloud/resources/parsing/client.py +982 -0
  39. llama_cloud/resources/pipelines/__init__.py +5 -0
  40. llama_cloud/resources/pipelines/client.py +2599 -0
  41. llama_cloud/resources/pipelines/types/__init__.py +5 -0
  42. llama_cloud/resources/pipelines/types/pipeline_file_update_custom_metadata_value.py +7 -0
  43. llama_cloud/resources/projects/__init__.py +2 -0
  44. llama_cloud/resources/projects/client.py +1231 -0
  45. llama_cloud/types/__init__.py +253 -0
  46. llama_cloud/types/api_key.py +37 -0
  47. llama_cloud/types/azure_open_ai_embedding.py +75 -0
  48. llama_cloud/types/base.py +26 -0
  49. llama_cloud/types/base_prompt_template.py +44 -0
  50. llama_cloud/types/bedrock_embedding.py +56 -0
  51. llama_cloud/types/chat_message.py +35 -0
  52. llama_cloud/types/cloud_az_storage_blob_data_source.py +40 -0
  53. llama_cloud/types/cloud_chroma_vector_store.py +40 -0
  54. llama_cloud/types/cloud_document.py +36 -0
  55. llama_cloud/types/cloud_document_create.py +36 -0
  56. llama_cloud/types/cloud_gcs_data_source.py +37 -0
  57. llama_cloud/types/cloud_google_drive_data_source.py +36 -0
  58. llama_cloud/types/cloud_one_drive_data_source.py +38 -0
  59. llama_cloud/types/cloud_pinecone_vector_store.py +46 -0
  60. llama_cloud/types/cloud_postgres_vector_store.py +44 -0
  61. llama_cloud/types/cloud_qdrant_vector_store.py +48 -0
  62. llama_cloud/types/cloud_s_3_data_source.py +42 -0
  63. llama_cloud/types/cloud_sharepoint_data_source.py +38 -0
  64. llama_cloud/types/cloud_weaviate_vector_store.py +38 -0
  65. llama_cloud/types/code_splitter.py +46 -0
  66. llama_cloud/types/cohere_embedding.py +46 -0
  67. llama_cloud/types/configurable_data_sink_names.py +37 -0
  68. llama_cloud/types/configurable_data_source_names.py +41 -0
  69. llama_cloud/types/configurable_transformation_definition.py +45 -0
  70. llama_cloud/types/configurable_transformation_names.py +73 -0
  71. llama_cloud/types/configured_transformation_item.py +43 -0
  72. llama_cloud/types/configured_transformation_item_component.py +9 -0
  73. llama_cloud/types/configured_transformation_item_component_one.py +35 -0
  74. llama_cloud/types/data_sink.py +40 -0
  75. llama_cloud/types/data_sink_component.py +7 -0
  76. llama_cloud/types/data_sink_component_one.py +17 -0
  77. llama_cloud/types/data_sink_create.py +36 -0
  78. llama_cloud/types/data_sink_create_component.py +7 -0
  79. llama_cloud/types/data_sink_create_component_one.py +17 -0
  80. llama_cloud/types/data_sink_definition.py +41 -0
  81. llama_cloud/types/data_source.py +44 -0
  82. llama_cloud/types/data_source_component.py +7 -0
  83. llama_cloud/types/data_source_component_one.py +19 -0
  84. llama_cloud/types/data_source_create.py +40 -0
  85. llama_cloud/types/data_source_create_component.py +7 -0
  86. llama_cloud/types/data_source_create_component_one.py +19 -0
  87. llama_cloud/types/data_source_create_custom_metadata_value.py +7 -0
  88. llama_cloud/types/data_source_custom_metadata_value.py +7 -0
  89. llama_cloud/types/data_source_definition.py +41 -0
  90. llama_cloud/types/eval_dataset.py +37 -0
  91. llama_cloud/types/eval_dataset_job_params.py +36 -0
  92. llama_cloud/types/eval_dataset_job_record.py +59 -0
  93. llama_cloud/types/eval_execution_params.py +38 -0
  94. llama_cloud/types/eval_execution_params_override.py +38 -0
  95. llama_cloud/types/eval_llm_model_data.py +33 -0
  96. llama_cloud/types/eval_question.py +39 -0
  97. llama_cloud/types/eval_question_create.py +28 -0
  98. llama_cloud/types/eval_question_result.py +49 -0
  99. llama_cloud/types/file.py +46 -0
  100. llama_cloud/types/file_resource_info_value.py +5 -0
  101. llama_cloud/types/filter_condition.py +21 -0
  102. llama_cloud/types/filter_operator.py +65 -0
  103. llama_cloud/types/gemini_embedding.py +51 -0
  104. llama_cloud/types/html_node_parser.py +44 -0
  105. llama_cloud/types/http_validation_error.py +29 -0
  106. llama_cloud/types/hugging_face_inference_api_embedding.py +68 -0
  107. llama_cloud/types/hugging_face_inference_api_embedding_token.py +5 -0
  108. llama_cloud/types/json_node_parser.py +43 -0
  109. llama_cloud/types/llama_parse_supported_file_extensions.py +161 -0
  110. llama_cloud/types/llm.py +55 -0
  111. llama_cloud/types/local_eval.py +46 -0
  112. llama_cloud/types/local_eval_results.py +37 -0
  113. llama_cloud/types/local_eval_sets.py +30 -0
  114. llama_cloud/types/managed_ingestion_status.py +37 -0
  115. llama_cloud/types/markdown_element_node_parser.py +49 -0
  116. llama_cloud/types/markdown_node_parser.py +43 -0
  117. llama_cloud/types/message_role.py +45 -0
  118. llama_cloud/types/metadata_filter.py +41 -0
  119. llama_cloud/types/metadata_filter_value.py +5 -0
  120. llama_cloud/types/metadata_filters.py +41 -0
  121. llama_cloud/types/metadata_filters_filters_item.py +8 -0
  122. llama_cloud/types/metric_result.py +30 -0
  123. llama_cloud/types/node_parser.py +37 -0
  124. llama_cloud/types/object_type.py +33 -0
  125. llama_cloud/types/open_ai_embedding.py +73 -0
  126. llama_cloud/types/parser_languages.py +361 -0
  127. llama_cloud/types/parsing_history_item.py +36 -0
  128. llama_cloud/types/parsing_job.py +30 -0
  129. llama_cloud/types/parsing_job_json_result.py +29 -0
  130. llama_cloud/types/parsing_job_markdown_result.py +29 -0
  131. llama_cloud/types/parsing_job_text_result.py +29 -0
  132. llama_cloud/types/parsing_usage.py +29 -0
  133. llama_cloud/types/pipeline.py +64 -0
  134. llama_cloud/types/pipeline_create.py +61 -0
  135. llama_cloud/types/pipeline_data_source.py +46 -0
  136. llama_cloud/types/pipeline_data_source_component.py +7 -0
  137. llama_cloud/types/pipeline_data_source_component_one.py +19 -0
  138. llama_cloud/types/pipeline_data_source_create.py +32 -0
  139. llama_cloud/types/pipeline_data_source_custom_metadata_value.py +7 -0
  140. llama_cloud/types/pipeline_deployment.py +38 -0
  141. llama_cloud/types/pipeline_file.py +52 -0
  142. llama_cloud/types/pipeline_file_create.py +36 -0
  143. llama_cloud/types/pipeline_file_create_custom_metadata_value.py +7 -0
  144. llama_cloud/types/pipeline_file_custom_metadata_value.py +7 -0
  145. llama_cloud/types/pipeline_file_resource_info_value.py +7 -0
  146. llama_cloud/types/pipeline_file_status_response.py +35 -0
  147. llama_cloud/types/pipeline_type.py +21 -0
  148. llama_cloud/types/pooling.py +29 -0
  149. llama_cloud/types/preset_retrieval_params.py +40 -0
  150. llama_cloud/types/presigned_url.py +36 -0
  151. llama_cloud/types/project.py +42 -0
  152. llama_cloud/types/project_create.py +32 -0
  153. llama_cloud/types/prompt_mixin_prompts.py +36 -0
  154. llama_cloud/types/prompt_spec.py +35 -0
  155. llama_cloud/types/pydantic_program_mode.py +41 -0
  156. llama_cloud/types/related_node_info.py +37 -0
  157. llama_cloud/types/retrieve_results.py +40 -0
  158. llama_cloud/types/sentence_splitter.py +48 -0
  159. llama_cloud/types/simple_file_node_parser.py +44 -0
  160. llama_cloud/types/status_enum.py +33 -0
  161. llama_cloud/types/supported_eval_llm_model.py +35 -0
  162. llama_cloud/types/supported_eval_llm_model_names.py +29 -0
  163. llama_cloud/types/text_node.py +62 -0
  164. llama_cloud/types/text_node_relationships_value.py +7 -0
  165. llama_cloud/types/text_node_with_score.py +36 -0
  166. llama_cloud/types/token_text_splitter.py +43 -0
  167. llama_cloud/types/transformation_category_names.py +21 -0
  168. llama_cloud/types/validation_error.py +31 -0
  169. llama_cloud/types/validation_error_loc_item.py +5 -0
  170. llama_cloud-0.0.1.dist-info/LICENSE +21 -0
  171. llama_cloud-0.0.1.dist-info/METADATA +25 -0
  172. llama_cloud-0.0.1.dist-info/RECORD +173 -0
  173. llama_cloud-0.0.1.dist-info/WHEEL +4 -0
@@ -0,0 +1,521 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+ import urllib.parse
5
+ from json.decoder import JSONDecodeError
6
+
7
+ from ...core.api_error import ApiError
8
+ from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
9
+ from ...core.jsonable_encoder import jsonable_encoder
10
+ from ...core.remove_none_from_dict import remove_none_from_dict
11
+ from ...errors.unprocessable_entity_error import UnprocessableEntityError
12
+ from ...types.configurable_data_source_names import ConfigurableDataSourceNames
13
+ from ...types.data_source import DataSource
14
+ from ...types.data_source_create import DataSourceCreate
15
+ from ...types.http_validation_error import HttpValidationError
16
+ from .types.data_source_update_component import DataSourceUpdateComponent
17
+ from .types.data_source_update_custom_metadata_value import DataSourceUpdateCustomMetadataValue
18
+
19
+ try:
20
+ import pydantic.v1 as pydantic # type: ignore
21
+ except ImportError:
22
+ import pydantic # type: ignore
23
+
24
+ # this is used as the default value for optional parameters
25
+ OMIT = typing.cast(typing.Any, ...)
26
+
27
+
28
+ class DataSourcesClient:
29
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
30
+ self._client_wrapper = client_wrapper
31
+
32
+ def list_data_sources(self, *, project_id: typing.Optional[str] = None) -> typing.List[DataSource]:
33
+ """
34
+ Get all data sources for a given project.
35
+ If project_id is not provided, uses the default project.
36
+
37
+ Parameters:
38
+ - project_id: typing.Optional[str].
39
+ ---
40
+ from platform.client import PlatformApi
41
+
42
+ client = PlatformApi(
43
+ token="YOUR_TOKEN",
44
+ base_url="https://yourhost.com/path/to/api",
45
+ )
46
+ client.data_sources.list_data_sources()
47
+ """
48
+ _response = self._client_wrapper.httpx_client.request(
49
+ "GET",
50
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
51
+ params=remove_none_from_dict({"project_id": project_id}),
52
+ headers=self._client_wrapper.get_headers(),
53
+ timeout=60,
54
+ )
55
+ if 200 <= _response.status_code < 300:
56
+ return pydantic.parse_obj_as(typing.List[DataSource], _response.json()) # type: ignore
57
+ if _response.status_code == 422:
58
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
59
+ try:
60
+ _response_json = _response.json()
61
+ except JSONDecodeError:
62
+ raise ApiError(status_code=_response.status_code, body=_response.text)
63
+ raise ApiError(status_code=_response.status_code, body=_response_json)
64
+
65
+ def create_data_source(self, *, project_id: typing.Optional[str] = None, request: DataSourceCreate) -> DataSource:
66
+ """
67
+ Create a new data source.
68
+
69
+ Parameters:
70
+ - project_id: typing.Optional[str].
71
+
72
+ - request: DataSourceCreate.
73
+ ---
74
+ from platform import ConfigurableDataSourceNames, DataSourceCreate
75
+ from platform.client import PlatformApi
76
+
77
+ client = PlatformApi(
78
+ token="YOUR_TOKEN",
79
+ base_url="https://yourhost.com/path/to/api",
80
+ )
81
+ client.data_sources.create_data_source(
82
+ request=DataSourceCreate(
83
+ name="string",
84
+ source_type=ConfigurableDataSourceNames.S_3,
85
+ ),
86
+ )
87
+ """
88
+ _response = self._client_wrapper.httpx_client.request(
89
+ "POST",
90
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
91
+ params=remove_none_from_dict({"project_id": project_id}),
92
+ json=jsonable_encoder(request),
93
+ headers=self._client_wrapper.get_headers(),
94
+ timeout=60,
95
+ )
96
+ if 200 <= _response.status_code < 300:
97
+ return pydantic.parse_obj_as(DataSource, _response.json()) # type: ignore
98
+ if _response.status_code == 422:
99
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
100
+ try:
101
+ _response_json = _response.json()
102
+ except JSONDecodeError:
103
+ raise ApiError(status_code=_response.status_code, body=_response.text)
104
+ raise ApiError(status_code=_response.status_code, body=_response_json)
105
+
106
+ def upsert_data_source(self, *, project_id: typing.Optional[str] = None, request: DataSourceCreate) -> DataSource:
107
+ """
108
+ Upserts a data source.
109
+ Updates if a data source with the same name and project_id already exists. Otherwise, creates a new data source.
110
+
111
+ Parameters:
112
+ - project_id: typing.Optional[str].
113
+
114
+ - request: DataSourceCreate.
115
+ ---
116
+ from platform import ConfigurableDataSourceNames, DataSourceCreate
117
+ from platform.client import PlatformApi
118
+
119
+ client = PlatformApi(
120
+ token="YOUR_TOKEN",
121
+ base_url="https://yourhost.com/path/to/api",
122
+ )
123
+ client.data_sources.upsert_data_source(
124
+ request=DataSourceCreate(
125
+ name="string",
126
+ source_type=ConfigurableDataSourceNames.S_3,
127
+ ),
128
+ )
129
+ """
130
+ _response = self._client_wrapper.httpx_client.request(
131
+ "PUT",
132
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
133
+ params=remove_none_from_dict({"project_id": project_id}),
134
+ json=jsonable_encoder(request),
135
+ headers=self._client_wrapper.get_headers(),
136
+ timeout=60,
137
+ )
138
+ if 200 <= _response.status_code < 300:
139
+ return pydantic.parse_obj_as(DataSource, _response.json()) # type: ignore
140
+ if _response.status_code == 422:
141
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
142
+ try:
143
+ _response_json = _response.json()
144
+ except JSONDecodeError:
145
+ raise ApiError(status_code=_response.status_code, body=_response.text)
146
+ raise ApiError(status_code=_response.status_code, body=_response_json)
147
+
148
+ def get_data_source(self, data_source_id: str) -> DataSource:
149
+ """
150
+ Get a data source by ID.
151
+
152
+ Parameters:
153
+ - data_source_id: str.
154
+ ---
155
+ from platform.client import PlatformApi
156
+
157
+ client = PlatformApi(
158
+ token="YOUR_TOKEN",
159
+ base_url="https://yourhost.com/path/to/api",
160
+ )
161
+ client.data_sources.get_data_source(
162
+ data_source_id="string",
163
+ )
164
+ """
165
+ _response = self._client_wrapper.httpx_client.request(
166
+ "GET",
167
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sources/{data_source_id}"),
168
+ headers=self._client_wrapper.get_headers(),
169
+ timeout=60,
170
+ )
171
+ if 200 <= _response.status_code < 300:
172
+ return pydantic.parse_obj_as(DataSource, _response.json()) # type: ignore
173
+ if _response.status_code == 422:
174
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
175
+ try:
176
+ _response_json = _response.json()
177
+ except JSONDecodeError:
178
+ raise ApiError(status_code=_response.status_code, body=_response.text)
179
+ raise ApiError(status_code=_response.status_code, body=_response_json)
180
+
181
+ def update_data_source(
182
+ self,
183
+ data_source_id: str,
184
+ *,
185
+ name: typing.Optional[str] = OMIT,
186
+ source_type: ConfigurableDataSourceNames,
187
+ custom_metadata: typing.Optional[typing.Dict[str, DataSourceUpdateCustomMetadataValue]] = OMIT,
188
+ component: typing.Optional[DataSourceUpdateComponent] = OMIT,
189
+ ) -> DataSource:
190
+ """
191
+ Update a data source by ID.
192
+
193
+ Parameters:
194
+ - data_source_id: str.
195
+
196
+ - name: typing.Optional[str]. The name of the data source.
197
+
198
+ - source_type: ConfigurableDataSourceNames.
199
+
200
+ - custom_metadata: typing.Optional[typing.Dict[str, DataSourceUpdateCustomMetadataValue]]. Custom metadata that will be present on all data loaded from the data source
201
+
202
+ - component: typing.Optional[DataSourceUpdateComponent].
203
+ ---
204
+ from platform import ConfigurableDataSourceNames
205
+ from platform.client import PlatformApi
206
+
207
+ client = PlatformApi(
208
+ token="YOUR_TOKEN",
209
+ base_url="https://yourhost.com/path/to/api",
210
+ )
211
+ client.data_sources.update_data_source(
212
+ data_source_id="string",
213
+ source_type=ConfigurableDataSourceNames.S_3,
214
+ )
215
+ """
216
+ _request: typing.Dict[str, typing.Any] = {"source_type": source_type}
217
+ if name is not OMIT:
218
+ _request["name"] = name
219
+ if custom_metadata is not OMIT:
220
+ _request["custom_metadata"] = custom_metadata
221
+ if component is not OMIT:
222
+ _request["component"] = component
223
+ _response = self._client_wrapper.httpx_client.request(
224
+ "PUT",
225
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sources/{data_source_id}"),
226
+ json=jsonable_encoder(_request),
227
+ headers=self._client_wrapper.get_headers(),
228
+ timeout=60,
229
+ )
230
+ if 200 <= _response.status_code < 300:
231
+ return pydantic.parse_obj_as(DataSource, _response.json()) # type: ignore
232
+ if _response.status_code == 422:
233
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
234
+ try:
235
+ _response_json = _response.json()
236
+ except JSONDecodeError:
237
+ raise ApiError(status_code=_response.status_code, body=_response.text)
238
+ raise ApiError(status_code=_response.status_code, body=_response_json)
239
+
240
+ def delete_data_source(self, data_source_id: str) -> None:
241
+ """
242
+ Delete a data source by ID.
243
+
244
+ Parameters:
245
+ - data_source_id: str.
246
+ ---
247
+ from platform.client import PlatformApi
248
+
249
+ client = PlatformApi(
250
+ token="YOUR_TOKEN",
251
+ base_url="https://yourhost.com/path/to/api",
252
+ )
253
+ client.data_sources.delete_data_source(
254
+ data_source_id="string",
255
+ )
256
+ """
257
+ _response = self._client_wrapper.httpx_client.request(
258
+ "DELETE",
259
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sources/{data_source_id}"),
260
+ headers=self._client_wrapper.get_headers(),
261
+ timeout=60,
262
+ )
263
+ if 200 <= _response.status_code < 300:
264
+ return
265
+ if _response.status_code == 422:
266
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
267
+ try:
268
+ _response_json = _response.json()
269
+ except JSONDecodeError:
270
+ raise ApiError(status_code=_response.status_code, body=_response.text)
271
+ raise ApiError(status_code=_response.status_code, body=_response_json)
272
+
273
+
274
+ class AsyncDataSourcesClient:
275
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
276
+ self._client_wrapper = client_wrapper
277
+
278
+ async def list_data_sources(self, *, project_id: typing.Optional[str] = None) -> typing.List[DataSource]:
279
+ """
280
+ Get all data sources for a given project.
281
+ If project_id is not provided, uses the default project.
282
+
283
+ Parameters:
284
+ - project_id: typing.Optional[str].
285
+ ---
286
+ from platform.client import AsyncPlatformApi
287
+
288
+ client = AsyncPlatformApi(
289
+ token="YOUR_TOKEN",
290
+ base_url="https://yourhost.com/path/to/api",
291
+ )
292
+ await client.data_sources.list_data_sources()
293
+ """
294
+ _response = await self._client_wrapper.httpx_client.request(
295
+ "GET",
296
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
297
+ params=remove_none_from_dict({"project_id": project_id}),
298
+ headers=self._client_wrapper.get_headers(),
299
+ timeout=60,
300
+ )
301
+ if 200 <= _response.status_code < 300:
302
+ return pydantic.parse_obj_as(typing.List[DataSource], _response.json()) # type: ignore
303
+ if _response.status_code == 422:
304
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
305
+ try:
306
+ _response_json = _response.json()
307
+ except JSONDecodeError:
308
+ raise ApiError(status_code=_response.status_code, body=_response.text)
309
+ raise ApiError(status_code=_response.status_code, body=_response_json)
310
+
311
+ async def create_data_source(
312
+ self, *, project_id: typing.Optional[str] = None, request: DataSourceCreate
313
+ ) -> DataSource:
314
+ """
315
+ Create a new data source.
316
+
317
+ Parameters:
318
+ - project_id: typing.Optional[str].
319
+
320
+ - request: DataSourceCreate.
321
+ ---
322
+ from platform import ConfigurableDataSourceNames, DataSourceCreate
323
+ from platform.client import AsyncPlatformApi
324
+
325
+ client = AsyncPlatformApi(
326
+ token="YOUR_TOKEN",
327
+ base_url="https://yourhost.com/path/to/api",
328
+ )
329
+ await client.data_sources.create_data_source(
330
+ request=DataSourceCreate(
331
+ name="string",
332
+ source_type=ConfigurableDataSourceNames.S_3,
333
+ ),
334
+ )
335
+ """
336
+ _response = await self._client_wrapper.httpx_client.request(
337
+ "POST",
338
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
339
+ params=remove_none_from_dict({"project_id": project_id}),
340
+ json=jsonable_encoder(request),
341
+ headers=self._client_wrapper.get_headers(),
342
+ timeout=60,
343
+ )
344
+ if 200 <= _response.status_code < 300:
345
+ return pydantic.parse_obj_as(DataSource, _response.json()) # type: ignore
346
+ if _response.status_code == 422:
347
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
348
+ try:
349
+ _response_json = _response.json()
350
+ except JSONDecodeError:
351
+ raise ApiError(status_code=_response.status_code, body=_response.text)
352
+ raise ApiError(status_code=_response.status_code, body=_response_json)
353
+
354
+ async def upsert_data_source(
355
+ self, *, project_id: typing.Optional[str] = None, request: DataSourceCreate
356
+ ) -> DataSource:
357
+ """
358
+ Upserts a data source.
359
+ Updates if a data source with the same name and project_id already exists. Otherwise, creates a new data source.
360
+
361
+ Parameters:
362
+ - project_id: typing.Optional[str].
363
+
364
+ - request: DataSourceCreate.
365
+ ---
366
+ from platform import ConfigurableDataSourceNames, DataSourceCreate
367
+ from platform.client import AsyncPlatformApi
368
+
369
+ client = AsyncPlatformApi(
370
+ token="YOUR_TOKEN",
371
+ base_url="https://yourhost.com/path/to/api",
372
+ )
373
+ await client.data_sources.upsert_data_source(
374
+ request=DataSourceCreate(
375
+ name="string",
376
+ source_type=ConfigurableDataSourceNames.S_3,
377
+ ),
378
+ )
379
+ """
380
+ _response = await self._client_wrapper.httpx_client.request(
381
+ "PUT",
382
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/data-sources"),
383
+ params=remove_none_from_dict({"project_id": project_id}),
384
+ json=jsonable_encoder(request),
385
+ headers=self._client_wrapper.get_headers(),
386
+ timeout=60,
387
+ )
388
+ if 200 <= _response.status_code < 300:
389
+ return pydantic.parse_obj_as(DataSource, _response.json()) # type: ignore
390
+ if _response.status_code == 422:
391
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
392
+ try:
393
+ _response_json = _response.json()
394
+ except JSONDecodeError:
395
+ raise ApiError(status_code=_response.status_code, body=_response.text)
396
+ raise ApiError(status_code=_response.status_code, body=_response_json)
397
+
398
+ async def get_data_source(self, data_source_id: str) -> DataSource:
399
+ """
400
+ Get a data source by ID.
401
+
402
+ Parameters:
403
+ - data_source_id: str.
404
+ ---
405
+ from platform.client import AsyncPlatformApi
406
+
407
+ client = AsyncPlatformApi(
408
+ token="YOUR_TOKEN",
409
+ base_url="https://yourhost.com/path/to/api",
410
+ )
411
+ await client.data_sources.get_data_source(
412
+ data_source_id="string",
413
+ )
414
+ """
415
+ _response = await self._client_wrapper.httpx_client.request(
416
+ "GET",
417
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sources/{data_source_id}"),
418
+ headers=self._client_wrapper.get_headers(),
419
+ timeout=60,
420
+ )
421
+ if 200 <= _response.status_code < 300:
422
+ return pydantic.parse_obj_as(DataSource, _response.json()) # type: ignore
423
+ if _response.status_code == 422:
424
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
425
+ try:
426
+ _response_json = _response.json()
427
+ except JSONDecodeError:
428
+ raise ApiError(status_code=_response.status_code, body=_response.text)
429
+ raise ApiError(status_code=_response.status_code, body=_response_json)
430
+
431
+ async def update_data_source(
432
+ self,
433
+ data_source_id: str,
434
+ *,
435
+ name: typing.Optional[str] = OMIT,
436
+ source_type: ConfigurableDataSourceNames,
437
+ custom_metadata: typing.Optional[typing.Dict[str, DataSourceUpdateCustomMetadataValue]] = OMIT,
438
+ component: typing.Optional[DataSourceUpdateComponent] = OMIT,
439
+ ) -> DataSource:
440
+ """
441
+ Update a data source by ID.
442
+
443
+ Parameters:
444
+ - data_source_id: str.
445
+
446
+ - name: typing.Optional[str]. The name of the data source.
447
+
448
+ - source_type: ConfigurableDataSourceNames.
449
+
450
+ - custom_metadata: typing.Optional[typing.Dict[str, DataSourceUpdateCustomMetadataValue]]. Custom metadata that will be present on all data loaded from the data source
451
+
452
+ - component: typing.Optional[DataSourceUpdateComponent].
453
+ ---
454
+ from platform import ConfigurableDataSourceNames
455
+ from platform.client import AsyncPlatformApi
456
+
457
+ client = AsyncPlatformApi(
458
+ token="YOUR_TOKEN",
459
+ base_url="https://yourhost.com/path/to/api",
460
+ )
461
+ await client.data_sources.update_data_source(
462
+ data_source_id="string",
463
+ source_type=ConfigurableDataSourceNames.S_3,
464
+ )
465
+ """
466
+ _request: typing.Dict[str, typing.Any] = {"source_type": source_type}
467
+ if name is not OMIT:
468
+ _request["name"] = name
469
+ if custom_metadata is not OMIT:
470
+ _request["custom_metadata"] = custom_metadata
471
+ if component is not OMIT:
472
+ _request["component"] = component
473
+ _response = await self._client_wrapper.httpx_client.request(
474
+ "PUT",
475
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sources/{data_source_id}"),
476
+ json=jsonable_encoder(_request),
477
+ headers=self._client_wrapper.get_headers(),
478
+ timeout=60,
479
+ )
480
+ if 200 <= _response.status_code < 300:
481
+ return pydantic.parse_obj_as(DataSource, _response.json()) # type: ignore
482
+ if _response.status_code == 422:
483
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
484
+ try:
485
+ _response_json = _response.json()
486
+ except JSONDecodeError:
487
+ raise ApiError(status_code=_response.status_code, body=_response.text)
488
+ raise ApiError(status_code=_response.status_code, body=_response_json)
489
+
490
+ async def delete_data_source(self, data_source_id: str) -> None:
491
+ """
492
+ Delete a data source by ID.
493
+
494
+ Parameters:
495
+ - data_source_id: str.
496
+ ---
497
+ from platform.client import AsyncPlatformApi
498
+
499
+ client = AsyncPlatformApi(
500
+ token="YOUR_TOKEN",
501
+ base_url="https://yourhost.com/path/to/api",
502
+ )
503
+ await client.data_sources.delete_data_source(
504
+ data_source_id="string",
505
+ )
506
+ """
507
+ _response = await self._client_wrapper.httpx_client.request(
508
+ "DELETE",
509
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/data-sources/{data_source_id}"),
510
+ headers=self._client_wrapper.get_headers(),
511
+ timeout=60,
512
+ )
513
+ if 200 <= _response.status_code < 300:
514
+ return
515
+ if _response.status_code == 422:
516
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
517
+ try:
518
+ _response_json = _response.json()
519
+ except JSONDecodeError:
520
+ raise ApiError(status_code=_response.status_code, body=_response.text)
521
+ raise ApiError(status_code=_response.status_code, body=_response_json)
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .data_source_update_component import DataSourceUpdateComponent
4
+ from .data_source_update_component_one import DataSourceUpdateComponentOne
5
+ from .data_source_update_custom_metadata_value import DataSourceUpdateCustomMetadataValue
6
+
7
+ __all__ = ["DataSourceUpdateComponent", "DataSourceUpdateComponentOne", "DataSourceUpdateCustomMetadataValue"]
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from .data_source_update_component_one import DataSourceUpdateComponentOne
6
+
7
+ DataSourceUpdateComponent = typing.Union[typing.Dict[str, typing.Any], DataSourceUpdateComponentOne]
@@ -0,0 +1,19 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ from ....types.cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
6
+ from ....types.cloud_gcs_data_source import CloudGcsDataSource
7
+ from ....types.cloud_google_drive_data_source import CloudGoogleDriveDataSource
8
+ from ....types.cloud_one_drive_data_source import CloudOneDriveDataSource
9
+ from ....types.cloud_s_3_data_source import CloudS3DataSource
10
+ from ....types.cloud_sharepoint_data_source import CloudSharepointDataSource
11
+
12
+ DataSourceUpdateComponentOne = typing.Union[
13
+ CloudS3DataSource,
14
+ CloudAzStorageBlobDataSource,
15
+ CloudGcsDataSource,
16
+ CloudGoogleDriveDataSource,
17
+ CloudOneDriveDataSource,
18
+ CloudSharepointDataSource,
19
+ ]
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ DataSourceUpdateCustomMetadataValue = typing.Union[
6
+ typing.Dict[str, typing.Any], typing.List[typing.Any], str, int, float, bool
7
+ ]
@@ -0,0 +1,2 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+