llama-cloud 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (173) hide show
  1. llama_cloud/__init__.py +295 -0
  2. llama_cloud/client.py +72 -0
  3. llama_cloud/core/__init__.py +17 -0
  4. llama_cloud/core/api_error.py +15 -0
  5. llama_cloud/core/client_wrapper.py +51 -0
  6. llama_cloud/core/datetime_utils.py +28 -0
  7. llama_cloud/core/jsonable_encoder.py +103 -0
  8. llama_cloud/core/remove_none_from_dict.py +11 -0
  9. llama_cloud/errors/__init__.py +5 -0
  10. llama_cloud/errors/unprocessable_entity_error.py +9 -0
  11. llama_cloud/resources/__init__.py +40 -0
  12. llama_cloud/resources/api_keys/__init__.py +2 -0
  13. llama_cloud/resources/api_keys/client.py +302 -0
  14. llama_cloud/resources/billing/__init__.py +2 -0
  15. llama_cloud/resources/billing/client.py +234 -0
  16. llama_cloud/resources/component_definitions/__init__.py +2 -0
  17. llama_cloud/resources/component_definitions/client.py +192 -0
  18. llama_cloud/resources/data_sinks/__init__.py +5 -0
  19. llama_cloud/resources/data_sinks/client.py +506 -0
  20. llama_cloud/resources/data_sinks/types/__init__.py +6 -0
  21. llama_cloud/resources/data_sinks/types/data_sink_update_component.py +7 -0
  22. llama_cloud/resources/data_sinks/types/data_sink_update_component_one.py +17 -0
  23. llama_cloud/resources/data_sources/__init__.py +5 -0
  24. llama_cloud/resources/data_sources/client.py +521 -0
  25. llama_cloud/resources/data_sources/types/__init__.py +7 -0
  26. llama_cloud/resources/data_sources/types/data_source_update_component.py +7 -0
  27. llama_cloud/resources/data_sources/types/data_source_update_component_one.py +19 -0
  28. llama_cloud/resources/data_sources/types/data_source_update_custom_metadata_value.py +7 -0
  29. llama_cloud/resources/deprecated/__init__.py +2 -0
  30. llama_cloud/resources/deprecated/client.py +982 -0
  31. llama_cloud/resources/evals/__init__.py +2 -0
  32. llama_cloud/resources/evals/client.py +745 -0
  33. llama_cloud/resources/files/__init__.py +5 -0
  34. llama_cloud/resources/files/client.py +560 -0
  35. llama_cloud/resources/files/types/__init__.py +5 -0
  36. llama_cloud/resources/files/types/file_create_resource_info_value.py +5 -0
  37. llama_cloud/resources/parsing/__init__.py +2 -0
  38. llama_cloud/resources/parsing/client.py +982 -0
  39. llama_cloud/resources/pipelines/__init__.py +5 -0
  40. llama_cloud/resources/pipelines/client.py +2599 -0
  41. llama_cloud/resources/pipelines/types/__init__.py +5 -0
  42. llama_cloud/resources/pipelines/types/pipeline_file_update_custom_metadata_value.py +7 -0
  43. llama_cloud/resources/projects/__init__.py +2 -0
  44. llama_cloud/resources/projects/client.py +1231 -0
  45. llama_cloud/types/__init__.py +253 -0
  46. llama_cloud/types/api_key.py +37 -0
  47. llama_cloud/types/azure_open_ai_embedding.py +75 -0
  48. llama_cloud/types/base.py +26 -0
  49. llama_cloud/types/base_prompt_template.py +44 -0
  50. llama_cloud/types/bedrock_embedding.py +56 -0
  51. llama_cloud/types/chat_message.py +35 -0
  52. llama_cloud/types/cloud_az_storage_blob_data_source.py +40 -0
  53. llama_cloud/types/cloud_chroma_vector_store.py +40 -0
  54. llama_cloud/types/cloud_document.py +36 -0
  55. llama_cloud/types/cloud_document_create.py +36 -0
  56. llama_cloud/types/cloud_gcs_data_source.py +37 -0
  57. llama_cloud/types/cloud_google_drive_data_source.py +36 -0
  58. llama_cloud/types/cloud_one_drive_data_source.py +38 -0
  59. llama_cloud/types/cloud_pinecone_vector_store.py +46 -0
  60. llama_cloud/types/cloud_postgres_vector_store.py +44 -0
  61. llama_cloud/types/cloud_qdrant_vector_store.py +48 -0
  62. llama_cloud/types/cloud_s_3_data_source.py +42 -0
  63. llama_cloud/types/cloud_sharepoint_data_source.py +38 -0
  64. llama_cloud/types/cloud_weaviate_vector_store.py +38 -0
  65. llama_cloud/types/code_splitter.py +46 -0
  66. llama_cloud/types/cohere_embedding.py +46 -0
  67. llama_cloud/types/configurable_data_sink_names.py +37 -0
  68. llama_cloud/types/configurable_data_source_names.py +41 -0
  69. llama_cloud/types/configurable_transformation_definition.py +45 -0
  70. llama_cloud/types/configurable_transformation_names.py +73 -0
  71. llama_cloud/types/configured_transformation_item.py +43 -0
  72. llama_cloud/types/configured_transformation_item_component.py +9 -0
  73. llama_cloud/types/configured_transformation_item_component_one.py +35 -0
  74. llama_cloud/types/data_sink.py +40 -0
  75. llama_cloud/types/data_sink_component.py +7 -0
  76. llama_cloud/types/data_sink_component_one.py +17 -0
  77. llama_cloud/types/data_sink_create.py +36 -0
  78. llama_cloud/types/data_sink_create_component.py +7 -0
  79. llama_cloud/types/data_sink_create_component_one.py +17 -0
  80. llama_cloud/types/data_sink_definition.py +41 -0
  81. llama_cloud/types/data_source.py +44 -0
  82. llama_cloud/types/data_source_component.py +7 -0
  83. llama_cloud/types/data_source_component_one.py +19 -0
  84. llama_cloud/types/data_source_create.py +40 -0
  85. llama_cloud/types/data_source_create_component.py +7 -0
  86. llama_cloud/types/data_source_create_component_one.py +19 -0
  87. llama_cloud/types/data_source_create_custom_metadata_value.py +7 -0
  88. llama_cloud/types/data_source_custom_metadata_value.py +7 -0
  89. llama_cloud/types/data_source_definition.py +41 -0
  90. llama_cloud/types/eval_dataset.py +37 -0
  91. llama_cloud/types/eval_dataset_job_params.py +36 -0
  92. llama_cloud/types/eval_dataset_job_record.py +59 -0
  93. llama_cloud/types/eval_execution_params.py +38 -0
  94. llama_cloud/types/eval_execution_params_override.py +38 -0
  95. llama_cloud/types/eval_llm_model_data.py +33 -0
  96. llama_cloud/types/eval_question.py +39 -0
  97. llama_cloud/types/eval_question_create.py +28 -0
  98. llama_cloud/types/eval_question_result.py +49 -0
  99. llama_cloud/types/file.py +46 -0
  100. llama_cloud/types/file_resource_info_value.py +5 -0
  101. llama_cloud/types/filter_condition.py +21 -0
  102. llama_cloud/types/filter_operator.py +65 -0
  103. llama_cloud/types/gemini_embedding.py +51 -0
  104. llama_cloud/types/html_node_parser.py +44 -0
  105. llama_cloud/types/http_validation_error.py +29 -0
  106. llama_cloud/types/hugging_face_inference_api_embedding.py +68 -0
  107. llama_cloud/types/hugging_face_inference_api_embedding_token.py +5 -0
  108. llama_cloud/types/json_node_parser.py +43 -0
  109. llama_cloud/types/llama_parse_supported_file_extensions.py +161 -0
  110. llama_cloud/types/llm.py +55 -0
  111. llama_cloud/types/local_eval.py +46 -0
  112. llama_cloud/types/local_eval_results.py +37 -0
  113. llama_cloud/types/local_eval_sets.py +30 -0
  114. llama_cloud/types/managed_ingestion_status.py +37 -0
  115. llama_cloud/types/markdown_element_node_parser.py +49 -0
  116. llama_cloud/types/markdown_node_parser.py +43 -0
  117. llama_cloud/types/message_role.py +45 -0
  118. llama_cloud/types/metadata_filter.py +41 -0
  119. llama_cloud/types/metadata_filter_value.py +5 -0
  120. llama_cloud/types/metadata_filters.py +41 -0
  121. llama_cloud/types/metadata_filters_filters_item.py +8 -0
  122. llama_cloud/types/metric_result.py +30 -0
  123. llama_cloud/types/node_parser.py +37 -0
  124. llama_cloud/types/object_type.py +33 -0
  125. llama_cloud/types/open_ai_embedding.py +73 -0
  126. llama_cloud/types/parser_languages.py +361 -0
  127. llama_cloud/types/parsing_history_item.py +36 -0
  128. llama_cloud/types/parsing_job.py +30 -0
  129. llama_cloud/types/parsing_job_json_result.py +29 -0
  130. llama_cloud/types/parsing_job_markdown_result.py +29 -0
  131. llama_cloud/types/parsing_job_text_result.py +29 -0
  132. llama_cloud/types/parsing_usage.py +29 -0
  133. llama_cloud/types/pipeline.py +64 -0
  134. llama_cloud/types/pipeline_create.py +61 -0
  135. llama_cloud/types/pipeline_data_source.py +46 -0
  136. llama_cloud/types/pipeline_data_source_component.py +7 -0
  137. llama_cloud/types/pipeline_data_source_component_one.py +19 -0
  138. llama_cloud/types/pipeline_data_source_create.py +32 -0
  139. llama_cloud/types/pipeline_data_source_custom_metadata_value.py +7 -0
  140. llama_cloud/types/pipeline_deployment.py +38 -0
  141. llama_cloud/types/pipeline_file.py +52 -0
  142. llama_cloud/types/pipeline_file_create.py +36 -0
  143. llama_cloud/types/pipeline_file_create_custom_metadata_value.py +7 -0
  144. llama_cloud/types/pipeline_file_custom_metadata_value.py +7 -0
  145. llama_cloud/types/pipeline_file_resource_info_value.py +7 -0
  146. llama_cloud/types/pipeline_file_status_response.py +35 -0
  147. llama_cloud/types/pipeline_type.py +21 -0
  148. llama_cloud/types/pooling.py +29 -0
  149. llama_cloud/types/preset_retrieval_params.py +40 -0
  150. llama_cloud/types/presigned_url.py +36 -0
  151. llama_cloud/types/project.py +42 -0
  152. llama_cloud/types/project_create.py +32 -0
  153. llama_cloud/types/prompt_mixin_prompts.py +36 -0
  154. llama_cloud/types/prompt_spec.py +35 -0
  155. llama_cloud/types/pydantic_program_mode.py +41 -0
  156. llama_cloud/types/related_node_info.py +37 -0
  157. llama_cloud/types/retrieve_results.py +40 -0
  158. llama_cloud/types/sentence_splitter.py +48 -0
  159. llama_cloud/types/simple_file_node_parser.py +44 -0
  160. llama_cloud/types/status_enum.py +33 -0
  161. llama_cloud/types/supported_eval_llm_model.py +35 -0
  162. llama_cloud/types/supported_eval_llm_model_names.py +29 -0
  163. llama_cloud/types/text_node.py +62 -0
  164. llama_cloud/types/text_node_relationships_value.py +7 -0
  165. llama_cloud/types/text_node_with_score.py +36 -0
  166. llama_cloud/types/token_text_splitter.py +43 -0
  167. llama_cloud/types/transformation_category_names.py +21 -0
  168. llama_cloud/types/validation_error.py +31 -0
  169. llama_cloud/types/validation_error_loc_item.py +5 -0
  170. llama_cloud-0.0.1.dist-info/LICENSE +21 -0
  171. llama_cloud-0.0.1.dist-info/METADATA +25 -0
  172. llama_cloud-0.0.1.dist-info/RECORD +173 -0
  173. llama_cloud-0.0.1.dist-info/WHEEL +4 -0
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .types import FileCreateResourceInfoValue
4
+
5
+ __all__ = ["FileCreateResourceInfoValue"]
@@ -0,0 +1,560 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+ import urllib.parse
6
+ from json.decoder import JSONDecodeError
7
+
8
+ from ...core.api_error import ApiError
9
+ from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
10
+ from ...core.jsonable_encoder import jsonable_encoder
11
+ from ...core.remove_none_from_dict import remove_none_from_dict
12
+ from ...errors.unprocessable_entity_error import UnprocessableEntityError
13
+ from ...types.file import File
14
+ from ...types.http_validation_error import HttpValidationError
15
+ from ...types.presigned_url import PresignedUrl
16
+ from .types.file_create_resource_info_value import FileCreateResourceInfoValue
17
+
18
+ try:
19
+ import pydantic.v1 as pydantic # type: ignore
20
+ except ImportError:
21
+ import pydantic # type: ignore
22
+
23
+ # this is used as the default value for optional parameters
24
+ OMIT = typing.cast(typing.Any, ...)
25
+
26
+
27
+ class FilesClient:
28
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
29
+ self._client_wrapper = client_wrapper
30
+
31
+ def read_file(self, id: str, *, project_id: typing.Optional[str] = None) -> File:
32
+ """
33
+ Read File metadata objects.
34
+
35
+ Parameters:
36
+ - id: str.
37
+
38
+ - project_id: typing.Optional[str].
39
+ ---
40
+ from platform.client import PlatformApi
41
+
42
+ client = PlatformApi(
43
+ token="YOUR_TOKEN",
44
+ base_url="https://yourhost.com/path/to/api",
45
+ )
46
+ client.files.read_file(
47
+ id="string",
48
+ )
49
+ """
50
+ _response = self._client_wrapper.httpx_client.request(
51
+ "GET",
52
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
53
+ params=remove_none_from_dict({"project_id": project_id}),
54
+ headers=self._client_wrapper.get_headers(),
55
+ timeout=60,
56
+ )
57
+ if 200 <= _response.status_code < 300:
58
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
59
+ if _response.status_code == 422:
60
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
61
+ try:
62
+ _response_json = _response.json()
63
+ except JSONDecodeError:
64
+ raise ApiError(status_code=_response.status_code, body=_response.text)
65
+ raise ApiError(status_code=_response.status_code, body=_response_json)
66
+
67
+ def delete_file(self, id: str, *, project_id: typing.Optional[str] = None) -> None:
68
+ """
69
+ Delete the file from S3.
70
+
71
+ Parameters:
72
+ - id: str.
73
+
74
+ - project_id: typing.Optional[str].
75
+ ---
76
+ from platform.client import PlatformApi
77
+
78
+ client = PlatformApi(
79
+ token="YOUR_TOKEN",
80
+ base_url="https://yourhost.com/path/to/api",
81
+ )
82
+ client.files.delete_file(
83
+ id="string",
84
+ )
85
+ """
86
+ _response = self._client_wrapper.httpx_client.request(
87
+ "DELETE",
88
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
89
+ params=remove_none_from_dict({"project_id": project_id}),
90
+ headers=self._client_wrapper.get_headers(),
91
+ timeout=60,
92
+ )
93
+ if 200 <= _response.status_code < 300:
94
+ return
95
+ if _response.status_code == 422:
96
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
97
+ try:
98
+ _response_json = _response.json()
99
+ except JSONDecodeError:
100
+ raise ApiError(status_code=_response.status_code, body=_response.text)
101
+ raise ApiError(status_code=_response.status_code, body=_response_json)
102
+
103
+ def read_files(self, *, project_id: typing.Optional[str] = None) -> typing.List[File]:
104
+ """
105
+ Read File metadata objects.
106
+
107
+ Parameters:
108
+ - project_id: typing.Optional[str].
109
+ ---
110
+ from platform.client import PlatformApi
111
+
112
+ client = PlatformApi(
113
+ token="YOUR_TOKEN",
114
+ base_url="https://yourhost.com/path/to/api",
115
+ )
116
+ client.files.read_files()
117
+ """
118
+ _response = self._client_wrapper.httpx_client.request(
119
+ "GET",
120
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
121
+ params=remove_none_from_dict({"project_id": project_id}),
122
+ headers=self._client_wrapper.get_headers(),
123
+ timeout=60,
124
+ )
125
+ if 200 <= _response.status_code < 300:
126
+ return pydantic.parse_obj_as(typing.List[File], _response.json()) # type: ignore
127
+ if _response.status_code == 422:
128
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
129
+ try:
130
+ _response_json = _response.json()
131
+ except JSONDecodeError:
132
+ raise ApiError(status_code=_response.status_code, body=_response.text)
133
+ raise ApiError(status_code=_response.status_code, body=_response_json)
134
+
135
+ def upload_file(self, *, project_id: typing.Optional[str] = None, upload_file: typing.IO) -> File:
136
+ """
137
+ Upload a file to S3.
138
+
139
+ Parameters:
140
+ - project_id: typing.Optional[str].
141
+
142
+ - upload_file: typing.IO.
143
+ """
144
+ _response = self._client_wrapper.httpx_client.request(
145
+ "POST",
146
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
147
+ params=remove_none_from_dict({"project_id": project_id}),
148
+ data=jsonable_encoder({}),
149
+ files={"upload_file": upload_file},
150
+ headers=self._client_wrapper.get_headers(),
151
+ timeout=60,
152
+ )
153
+ if 200 <= _response.status_code < 300:
154
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
155
+ if _response.status_code == 422:
156
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
157
+ try:
158
+ _response_json = _response.json()
159
+ except JSONDecodeError:
160
+ raise ApiError(status_code=_response.status_code, body=_response.text)
161
+ raise ApiError(status_code=_response.status_code, body=_response_json)
162
+
163
+ def generate_presigned_url(
164
+ self,
165
+ *,
166
+ project_id: typing.Optional[str] = None,
167
+ name: str,
168
+ file_size: typing.Optional[int] = OMIT,
169
+ last_modified_at: typing.Optional[dt.datetime] = OMIT,
170
+ resource_info: typing.Optional[typing.Dict[str, FileCreateResourceInfoValue]] = OMIT,
171
+ data_source_id: typing.Optional[str] = OMIT,
172
+ ) -> PresignedUrl:
173
+ """
174
+ Create a presigned url for uploading a file.
175
+
176
+ Parameters:
177
+ - project_id: typing.Optional[str].
178
+
179
+ - name: str.
180
+
181
+ - file_size: typing.Optional[int]. Size of the file in bytes
182
+
183
+ - last_modified_at: typing.Optional[dt.datetime]. The last modified time of the file
184
+
185
+ - resource_info: typing.Optional[typing.Dict[str, FileCreateResourceInfoValue]]. Resource information for the file
186
+
187
+ - data_source_id: typing.Optional[str]. The ID of the data source that the file belongs to
188
+ ---
189
+ from platform.client import PlatformApi
190
+
191
+ client = PlatformApi(
192
+ token="YOUR_TOKEN",
193
+ base_url="https://yourhost.com/path/to/api",
194
+ )
195
+ client.files.generate_presigned_url(
196
+ name="string",
197
+ )
198
+ """
199
+ _request: typing.Dict[str, typing.Any] = {"name": name}
200
+ if file_size is not OMIT:
201
+ _request["file_size"] = file_size
202
+ if last_modified_at is not OMIT:
203
+ _request["last_modified_at"] = last_modified_at
204
+ if resource_info is not OMIT:
205
+ _request["resource_info"] = resource_info
206
+ if data_source_id is not OMIT:
207
+ _request["data_source_id"] = data_source_id
208
+ _response = self._client_wrapper.httpx_client.request(
209
+ "PUT",
210
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
211
+ params=remove_none_from_dict({"project_id": project_id}),
212
+ json=jsonable_encoder(_request),
213
+ headers=self._client_wrapper.get_headers(),
214
+ timeout=60,
215
+ )
216
+ if 200 <= _response.status_code < 300:
217
+ return pydantic.parse_obj_as(PresignedUrl, _response.json()) # type: ignore
218
+ if _response.status_code == 422:
219
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
220
+ try:
221
+ _response_json = _response.json()
222
+ except JSONDecodeError:
223
+ raise ApiError(status_code=_response.status_code, body=_response.text)
224
+ raise ApiError(status_code=_response.status_code, body=_response_json)
225
+
226
+ def sync_files(self, *, project_id: typing.Optional[str] = None) -> typing.List[File]:
227
+ """
228
+ Sync Files API against file contents uploaded via S3 presigned urls.
229
+
230
+ Parameters:
231
+ - project_id: typing.Optional[str].
232
+ ---
233
+ from platform.client import PlatformApi
234
+
235
+ client = PlatformApi(
236
+ token="YOUR_TOKEN",
237
+ base_url="https://yourhost.com/path/to/api",
238
+ )
239
+ client.files.sync_files()
240
+ """
241
+ _response = self._client_wrapper.httpx_client.request(
242
+ "PUT",
243
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/sync"),
244
+ params=remove_none_from_dict({"project_id": project_id}),
245
+ headers=self._client_wrapper.get_headers(),
246
+ timeout=60,
247
+ )
248
+ if 200 <= _response.status_code < 300:
249
+ return pydantic.parse_obj_as(typing.List[File], _response.json()) # type: ignore
250
+ if _response.status_code == 422:
251
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
252
+ try:
253
+ _response_json = _response.json()
254
+ except JSONDecodeError:
255
+ raise ApiError(status_code=_response.status_code, body=_response.text)
256
+ raise ApiError(status_code=_response.status_code, body=_response_json)
257
+
258
+ def read_file_content(self, id: str, *, project_id: typing.Optional[str] = None) -> PresignedUrl:
259
+ """
260
+ Returns a presigned url to read the file content.
261
+
262
+ Parameters:
263
+ - id: str.
264
+
265
+ - project_id: typing.Optional[str].
266
+ ---
267
+ from platform.client import PlatformApi
268
+
269
+ client = PlatformApi(
270
+ token="YOUR_TOKEN",
271
+ base_url="https://yourhost.com/path/to/api",
272
+ )
273
+ client.files.read_file_content(
274
+ id="string",
275
+ )
276
+ """
277
+ _response = self._client_wrapper.httpx_client.request(
278
+ "GET",
279
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/content"),
280
+ params=remove_none_from_dict({"project_id": project_id}),
281
+ headers=self._client_wrapper.get_headers(),
282
+ timeout=60,
283
+ )
284
+ if 200 <= _response.status_code < 300:
285
+ return pydantic.parse_obj_as(PresignedUrl, _response.json()) # type: ignore
286
+ if _response.status_code == 422:
287
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
288
+ try:
289
+ _response_json = _response.json()
290
+ except JSONDecodeError:
291
+ raise ApiError(status_code=_response.status_code, body=_response.text)
292
+ raise ApiError(status_code=_response.status_code, body=_response_json)
293
+
294
+
295
+ class AsyncFilesClient:
296
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
297
+ self._client_wrapper = client_wrapper
298
+
299
+ async def read_file(self, id: str, *, project_id: typing.Optional[str] = None) -> File:
300
+ """
301
+ Read File metadata objects.
302
+
303
+ Parameters:
304
+ - id: str.
305
+
306
+ - project_id: typing.Optional[str].
307
+ ---
308
+ from platform.client import AsyncPlatformApi
309
+
310
+ client = AsyncPlatformApi(
311
+ token="YOUR_TOKEN",
312
+ base_url="https://yourhost.com/path/to/api",
313
+ )
314
+ await client.files.read_file(
315
+ id="string",
316
+ )
317
+ """
318
+ _response = await self._client_wrapper.httpx_client.request(
319
+ "GET",
320
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
321
+ params=remove_none_from_dict({"project_id": project_id}),
322
+ headers=self._client_wrapper.get_headers(),
323
+ timeout=60,
324
+ )
325
+ if 200 <= _response.status_code < 300:
326
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
327
+ if _response.status_code == 422:
328
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
329
+ try:
330
+ _response_json = _response.json()
331
+ except JSONDecodeError:
332
+ raise ApiError(status_code=_response.status_code, body=_response.text)
333
+ raise ApiError(status_code=_response.status_code, body=_response_json)
334
+
335
+ async def delete_file(self, id: str, *, project_id: typing.Optional[str] = None) -> None:
336
+ """
337
+ Delete the file from S3.
338
+
339
+ Parameters:
340
+ - id: str.
341
+
342
+ - project_id: typing.Optional[str].
343
+ ---
344
+ from platform.client import AsyncPlatformApi
345
+
346
+ client = AsyncPlatformApi(
347
+ token="YOUR_TOKEN",
348
+ base_url="https://yourhost.com/path/to/api",
349
+ )
350
+ await client.files.delete_file(
351
+ id="string",
352
+ )
353
+ """
354
+ _response = await self._client_wrapper.httpx_client.request(
355
+ "DELETE",
356
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
357
+ params=remove_none_from_dict({"project_id": project_id}),
358
+ headers=self._client_wrapper.get_headers(),
359
+ timeout=60,
360
+ )
361
+ if 200 <= _response.status_code < 300:
362
+ return
363
+ if _response.status_code == 422:
364
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
365
+ try:
366
+ _response_json = _response.json()
367
+ except JSONDecodeError:
368
+ raise ApiError(status_code=_response.status_code, body=_response.text)
369
+ raise ApiError(status_code=_response.status_code, body=_response_json)
370
+
371
+ async def read_files(self, *, project_id: typing.Optional[str] = None) -> typing.List[File]:
372
+ """
373
+ Read File metadata objects.
374
+
375
+ Parameters:
376
+ - project_id: typing.Optional[str].
377
+ ---
378
+ from platform.client import AsyncPlatformApi
379
+
380
+ client = AsyncPlatformApi(
381
+ token="YOUR_TOKEN",
382
+ base_url="https://yourhost.com/path/to/api",
383
+ )
384
+ await client.files.read_files()
385
+ """
386
+ _response = await self._client_wrapper.httpx_client.request(
387
+ "GET",
388
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
389
+ params=remove_none_from_dict({"project_id": project_id}),
390
+ headers=self._client_wrapper.get_headers(),
391
+ timeout=60,
392
+ )
393
+ if 200 <= _response.status_code < 300:
394
+ return pydantic.parse_obj_as(typing.List[File], _response.json()) # type: ignore
395
+ if _response.status_code == 422:
396
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
397
+ try:
398
+ _response_json = _response.json()
399
+ except JSONDecodeError:
400
+ raise ApiError(status_code=_response.status_code, body=_response.text)
401
+ raise ApiError(status_code=_response.status_code, body=_response_json)
402
+
403
+ async def upload_file(self, *, project_id: typing.Optional[str] = None, upload_file: typing.IO) -> File:
404
+ """
405
+ Upload a file to S3.
406
+
407
+ Parameters:
408
+ - project_id: typing.Optional[str].
409
+
410
+ - upload_file: typing.IO.
411
+ """
412
+ _response = await self._client_wrapper.httpx_client.request(
413
+ "POST",
414
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
415
+ params=remove_none_from_dict({"project_id": project_id}),
416
+ data=jsonable_encoder({}),
417
+ files={"upload_file": upload_file},
418
+ headers=self._client_wrapper.get_headers(),
419
+ timeout=60,
420
+ )
421
+ if 200 <= _response.status_code < 300:
422
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
423
+ if _response.status_code == 422:
424
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
425
+ try:
426
+ _response_json = _response.json()
427
+ except JSONDecodeError:
428
+ raise ApiError(status_code=_response.status_code, body=_response.text)
429
+ raise ApiError(status_code=_response.status_code, body=_response_json)
430
+
431
+ async def generate_presigned_url(
432
+ self,
433
+ *,
434
+ project_id: typing.Optional[str] = None,
435
+ name: str,
436
+ file_size: typing.Optional[int] = OMIT,
437
+ last_modified_at: typing.Optional[dt.datetime] = OMIT,
438
+ resource_info: typing.Optional[typing.Dict[str, FileCreateResourceInfoValue]] = OMIT,
439
+ data_source_id: typing.Optional[str] = OMIT,
440
+ ) -> PresignedUrl:
441
+ """
442
+ Create a presigned url for uploading a file.
443
+
444
+ Parameters:
445
+ - project_id: typing.Optional[str].
446
+
447
+ - name: str.
448
+
449
+ - file_size: typing.Optional[int]. Size of the file in bytes
450
+
451
+ - last_modified_at: typing.Optional[dt.datetime]. The last modified time of the file
452
+
453
+ - resource_info: typing.Optional[typing.Dict[str, FileCreateResourceInfoValue]]. Resource information for the file
454
+
455
+ - data_source_id: typing.Optional[str]. The ID of the data source that the file belongs to
456
+ ---
457
+ from platform.client import AsyncPlatformApi
458
+
459
+ client = AsyncPlatformApi(
460
+ token="YOUR_TOKEN",
461
+ base_url="https://yourhost.com/path/to/api",
462
+ )
463
+ await client.files.generate_presigned_url(
464
+ name="string",
465
+ )
466
+ """
467
+ _request: typing.Dict[str, typing.Any] = {"name": name}
468
+ if file_size is not OMIT:
469
+ _request["file_size"] = file_size
470
+ if last_modified_at is not OMIT:
471
+ _request["last_modified_at"] = last_modified_at
472
+ if resource_info is not OMIT:
473
+ _request["resource_info"] = resource_info
474
+ if data_source_id is not OMIT:
475
+ _request["data_source_id"] = data_source_id
476
+ _response = await self._client_wrapper.httpx_client.request(
477
+ "PUT",
478
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
479
+ params=remove_none_from_dict({"project_id": project_id}),
480
+ json=jsonable_encoder(_request),
481
+ headers=self._client_wrapper.get_headers(),
482
+ timeout=60,
483
+ )
484
+ if 200 <= _response.status_code < 300:
485
+ return pydantic.parse_obj_as(PresignedUrl, _response.json()) # type: ignore
486
+ if _response.status_code == 422:
487
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
488
+ try:
489
+ _response_json = _response.json()
490
+ except JSONDecodeError:
491
+ raise ApiError(status_code=_response.status_code, body=_response.text)
492
+ raise ApiError(status_code=_response.status_code, body=_response_json)
493
+
494
+ async def sync_files(self, *, project_id: typing.Optional[str] = None) -> typing.List[File]:
495
+ """
496
+ Sync Files API against file contents uploaded via S3 presigned urls.
497
+
498
+ Parameters:
499
+ - project_id: typing.Optional[str].
500
+ ---
501
+ from platform.client import AsyncPlatformApi
502
+
503
+ client = AsyncPlatformApi(
504
+ token="YOUR_TOKEN",
505
+ base_url="https://yourhost.com/path/to/api",
506
+ )
507
+ await client.files.sync_files()
508
+ """
509
+ _response = await self._client_wrapper.httpx_client.request(
510
+ "PUT",
511
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/sync"),
512
+ params=remove_none_from_dict({"project_id": project_id}),
513
+ headers=self._client_wrapper.get_headers(),
514
+ timeout=60,
515
+ )
516
+ if 200 <= _response.status_code < 300:
517
+ return pydantic.parse_obj_as(typing.List[File], _response.json()) # type: ignore
518
+ if _response.status_code == 422:
519
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
520
+ try:
521
+ _response_json = _response.json()
522
+ except JSONDecodeError:
523
+ raise ApiError(status_code=_response.status_code, body=_response.text)
524
+ raise ApiError(status_code=_response.status_code, body=_response_json)
525
+
526
+ async def read_file_content(self, id: str, *, project_id: typing.Optional[str] = None) -> PresignedUrl:
527
+ """
528
+ Returns a presigned url to read the file content.
529
+
530
+ Parameters:
531
+ - id: str.
532
+
533
+ - project_id: typing.Optional[str].
534
+ ---
535
+ from platform.client import AsyncPlatformApi
536
+
537
+ client = AsyncPlatformApi(
538
+ token="YOUR_TOKEN",
539
+ base_url="https://yourhost.com/path/to/api",
540
+ )
541
+ await client.files.read_file_content(
542
+ id="string",
543
+ )
544
+ """
545
+ _response = await self._client_wrapper.httpx_client.request(
546
+ "GET",
547
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/content"),
548
+ params=remove_none_from_dict({"project_id": project_id}),
549
+ headers=self._client_wrapper.get_headers(),
550
+ timeout=60,
551
+ )
552
+ if 200 <= _response.status_code < 300:
553
+ return pydantic.parse_obj_as(PresignedUrl, _response.json()) # type: ignore
554
+ if _response.status_code == 422:
555
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
556
+ try:
557
+ _response_json = _response.json()
558
+ except JSONDecodeError:
559
+ raise ApiError(status_code=_response.status_code, body=_response.text)
560
+ raise ApiError(status_code=_response.status_code, body=_response_json)
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .file_create_resource_info_value import FileCreateResourceInfoValue
4
+
5
+ __all__ = ["FileCreateResourceInfoValue"]
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ FileCreateResourceInfoValue = typing.Union[typing.Dict[str, typing.Any], typing.List[typing.Any], str, int, float, bool]
@@ -0,0 +1,2 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+