llama-cloud 0.1.4__py3-none-any.whl → 0.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (117) hide show
  1. llama_cloud/__init__.py +76 -10
  2. llama_cloud/client.py +3 -0
  3. llama_cloud/environment.py +1 -1
  4. llama_cloud/resources/__init__.py +23 -1
  5. llama_cloud/resources/data_sinks/client.py +26 -20
  6. llama_cloud/resources/data_sources/client.py +16 -16
  7. llama_cloud/resources/embedding_model_configs/__init__.py +23 -0
  8. llama_cloud/resources/embedding_model_configs/client.py +416 -0
  9. llama_cloud/resources/embedding_model_configs/types/__init__.py +23 -0
  10. llama_cloud/resources/embedding_model_configs/types/embedding_model_config_create_embedding_config.py +89 -0
  11. llama_cloud/resources/evals/client.py +36 -26
  12. llama_cloud/resources/extraction/client.py +32 -32
  13. llama_cloud/resources/files/__init__.py +2 -2
  14. llama_cloud/resources/files/client.py +310 -54
  15. llama_cloud/resources/files/types/__init__.py +3 -1
  16. llama_cloud/resources/files/types/file_create_from_url_resource_info_value.py +7 -0
  17. llama_cloud/resources/files/types/file_create_permission_info_value.py +7 -0
  18. llama_cloud/resources/organizations/client.py +125 -56
  19. llama_cloud/resources/parsing/client.py +652 -264
  20. llama_cloud/resources/pipelines/client.py +617 -310
  21. llama_cloud/resources/projects/client.py +341 -136
  22. llama_cloud/types/__init__.py +58 -10
  23. llama_cloud/types/azure_open_ai_embedding.py +12 -6
  24. llama_cloud/types/base_prompt_template.py +6 -2
  25. llama_cloud/types/bedrock_embedding.py +12 -6
  26. llama_cloud/types/character_splitter.py +4 -2
  27. llama_cloud/types/chat_message.py +1 -1
  28. llama_cloud/types/cloud_az_storage_blob_data_source.py +16 -7
  29. llama_cloud/types/cloud_box_data_source.py +13 -6
  30. llama_cloud/types/cloud_confluence_data_source.py +7 -6
  31. llama_cloud/types/cloud_document.py +3 -1
  32. llama_cloud/types/cloud_document_create.py +3 -1
  33. llama_cloud/types/cloud_google_drive_data_source.py +1 -0
  34. llama_cloud/types/cloud_jira_data_source.py +7 -4
  35. llama_cloud/types/cloud_notion_page_data_source.py +3 -2
  36. llama_cloud/types/cloud_one_drive_data_source.py +6 -2
  37. llama_cloud/types/cloud_postgres_vector_store.py +1 -1
  38. llama_cloud/types/cloud_s_3_data_source.py +9 -4
  39. llama_cloud/types/cloud_sharepoint_data_source.py +9 -5
  40. llama_cloud/types/cloud_slack_data_source.py +7 -6
  41. llama_cloud/types/code_splitter.py +1 -1
  42. llama_cloud/types/cohere_embedding.py +7 -3
  43. llama_cloud/types/data_sink.py +4 -4
  44. llama_cloud/types/data_sink_create.py +1 -1
  45. llama_cloud/types/data_source.py +7 -5
  46. llama_cloud/types/data_source_create.py +4 -2
  47. llama_cloud/types/embedding_model_config.py +43 -0
  48. llama_cloud/types/embedding_model_config_embedding_config.py +89 -0
  49. llama_cloud/types/embedding_model_config_update.py +35 -0
  50. llama_cloud/types/embedding_model_config_update_embedding_config.py +89 -0
  51. llama_cloud/types/eval_dataset.py +2 -2
  52. llama_cloud/types/eval_dataset_job_record.py +13 -7
  53. llama_cloud/types/eval_execution_params_override.py +6 -2
  54. llama_cloud/types/eval_question.py +2 -2
  55. llama_cloud/types/extraction_result.py +2 -2
  56. llama_cloud/types/extraction_schema.py +5 -3
  57. llama_cloud/types/file.py +15 -7
  58. llama_cloud/types/file_permission_info_value.py +5 -0
  59. llama_cloud/types/filter_operator.py +2 -2
  60. llama_cloud/types/gemini_embedding.py +10 -6
  61. llama_cloud/types/hugging_face_inference_api_embedding.py +27 -11
  62. llama_cloud/types/input_message.py +3 -1
  63. llama_cloud/types/interval_usage_and_plan.py +36 -0
  64. llama_cloud/types/job_name_mapping.py +4 -0
  65. llama_cloud/types/llama_parse_parameters.py +21 -0
  66. llama_cloud/types/llm.py +4 -2
  67. llama_cloud/types/llm_parameters.py +5 -2
  68. llama_cloud/types/local_eval.py +10 -8
  69. llama_cloud/types/local_eval_results.py +1 -1
  70. llama_cloud/types/managed_ingestion_status_response.py +5 -3
  71. llama_cloud/types/markdown_element_node_parser.py +5 -3
  72. llama_cloud/types/markdown_node_parser.py +3 -2
  73. llama_cloud/types/metadata_filter.py +2 -2
  74. llama_cloud/types/metric_result.py +3 -3
  75. llama_cloud/types/node_parser.py +1 -1
  76. llama_cloud/types/open_ai_embedding.py +12 -6
  77. llama_cloud/types/organization.py +2 -2
  78. llama_cloud/types/page_splitter_node_parser.py +2 -2
  79. llama_cloud/types/paginated_list_pipeline_files_response.py +35 -0
  80. llama_cloud/types/parsing_job_structured_result.py +32 -0
  81. llama_cloud/types/permission.py +3 -3
  82. llama_cloud/types/pipeline.py +17 -6
  83. llama_cloud/types/pipeline_configuration_hashes.py +3 -3
  84. llama_cloud/types/pipeline_create.py +15 -4
  85. llama_cloud/types/pipeline_data_source.py +13 -7
  86. llama_cloud/types/pipeline_data_source_create.py +3 -1
  87. llama_cloud/types/pipeline_deployment.py +4 -4
  88. llama_cloud/types/pipeline_file.py +25 -10
  89. llama_cloud/types/pipeline_file_create.py +3 -1
  90. llama_cloud/types/pipeline_file_permission_info_value.py +7 -0
  91. llama_cloud/types/plan.py +40 -0
  92. llama_cloud/types/playground_session.py +2 -2
  93. llama_cloud/types/preset_retrieval_params.py +14 -7
  94. llama_cloud/types/presigned_url.py +3 -1
  95. llama_cloud/types/project.py +2 -2
  96. llama_cloud/types/prompt_mixin_prompts.py +1 -1
  97. llama_cloud/types/prompt_spec.py +4 -2
  98. llama_cloud/types/role.py +3 -3
  99. llama_cloud/types/sentence_splitter.py +4 -2
  100. llama_cloud/types/text_node.py +3 -3
  101. llama_cloud/types/{hugging_face_inference_api_embedding_token.py → token.py} +1 -1
  102. llama_cloud/types/token_text_splitter.py +1 -1
  103. llama_cloud/types/usage.py +41 -0
  104. llama_cloud/types/user_organization.py +9 -5
  105. llama_cloud/types/user_organization_create.py +4 -4
  106. llama_cloud/types/user_organization_delete.py +2 -2
  107. llama_cloud/types/user_organization_role.py +2 -2
  108. llama_cloud/types/value.py +5 -0
  109. llama_cloud/types/vertex_text_embedding.py +9 -5
  110. {llama_cloud-0.1.4.dist-info → llama_cloud-0.1.6.dist-info}/METADATA +1 -1
  111. {llama_cloud-0.1.4.dist-info → llama_cloud-0.1.6.dist-info}/RECORD +113 -99
  112. llama_cloud/types/data_sink_component.py +0 -20
  113. llama_cloud/types/data_source_component.py +0 -28
  114. llama_cloud/types/metadata_filter_value.py +0 -5
  115. llama_cloud/types/pipeline_data_source_component.py +0 -28
  116. {llama_cloud-0.1.4.dist-info → llama_cloud-0.1.6.dist-info}/LICENSE +0 -0
  117. {llama_cloud-0.1.4.dist-info → llama_cloud-0.1.6.dist-info}/WHEEL +0 -0
@@ -14,6 +14,8 @@ from ...types.file import File
14
14
  from ...types.http_validation_error import HttpValidationError
15
15
  from ...types.page_screenshot_metadata import PageScreenshotMetadata
16
16
  from ...types.presigned_url import PresignedUrl
17
+ from .types.file_create_from_url_resource_info_value import FileCreateFromUrlResourceInfoValue
18
+ from .types.file_create_permission_info_value import FileCreatePermissionInfoValue
17
19
  from .types.file_create_resource_info_value import FileCreateResourceInfoValue
18
20
 
19
21
  try:
@@ -32,7 +34,9 @@ class FilesClient:
32
34
  def __init__(self, *, client_wrapper: SyncClientWrapper):
33
35
  self._client_wrapper = client_wrapper
34
36
 
35
- def get_file(self, id: str, *, project_id: typing.Optional[str] = None) -> File:
37
+ def get_file(
38
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
39
+ ) -> File:
36
40
  """
37
41
  Read File metadata objects.
38
42
 
@@ -40,6 +44,8 @@ class FilesClient:
40
44
  - id: str.
41
45
 
42
46
  - project_id: typing.Optional[str].
47
+
48
+ - organization_id: typing.Optional[str].
43
49
  ---
44
50
  from llama_cloud.client import LlamaCloud
45
51
 
@@ -47,13 +53,13 @@ class FilesClient:
47
53
  token="YOUR_TOKEN",
48
54
  )
49
55
  client.files.get_file(
50
- id="string",
56
+ id="id",
51
57
  )
52
58
  """
53
59
  _response = self._client_wrapper.httpx_client.request(
54
60
  "GET",
55
61
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
56
- params=remove_none_from_dict({"project_id": project_id}),
62
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
57
63
  headers=self._client_wrapper.get_headers(),
58
64
  timeout=60,
59
65
  )
@@ -67,7 +73,9 @@ class FilesClient:
67
73
  raise ApiError(status_code=_response.status_code, body=_response.text)
68
74
  raise ApiError(status_code=_response.status_code, body=_response_json)
69
75
 
70
- def delete_file(self, id: str, *, project_id: typing.Optional[str] = None) -> None:
76
+ def delete_file(
77
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
78
+ ) -> None:
71
79
  """
72
80
  Delete the file from S3.
73
81
 
@@ -75,6 +83,8 @@ class FilesClient:
75
83
  - id: str.
76
84
 
77
85
  - project_id: typing.Optional[str].
86
+
87
+ - organization_id: typing.Optional[str].
78
88
  ---
79
89
  from llama_cloud.client import LlamaCloud
80
90
 
@@ -82,13 +92,13 @@ class FilesClient:
82
92
  token="YOUR_TOKEN",
83
93
  )
84
94
  client.files.delete_file(
85
- id="string",
95
+ id="id",
86
96
  )
87
97
  """
88
98
  _response = self._client_wrapper.httpx_client.request(
89
99
  "DELETE",
90
100
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
91
- params=remove_none_from_dict({"project_id": project_id}),
101
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
92
102
  headers=self._client_wrapper.get_headers(),
93
103
  timeout=60,
94
104
  )
@@ -102,12 +112,16 @@ class FilesClient:
102
112
  raise ApiError(status_code=_response.status_code, body=_response.text)
103
113
  raise ApiError(status_code=_response.status_code, body=_response_json)
104
114
 
105
- def list_files(self, *, project_id: typing.Optional[str] = None) -> typing.List[File]:
115
+ def list_files(
116
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
117
+ ) -> typing.List[File]:
106
118
  """
107
119
  Read File metadata objects.
108
120
 
109
121
  Parameters:
110
122
  - project_id: typing.Optional[str].
123
+
124
+ - organization_id: typing.Optional[str].
111
125
  ---
112
126
  from llama_cloud.client import LlamaCloud
113
127
 
@@ -119,7 +133,7 @@ class FilesClient:
119
133
  _response = self._client_wrapper.httpx_client.request(
120
134
  "GET",
121
135
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
122
- params=remove_none_from_dict({"project_id": project_id}),
136
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
123
137
  headers=self._client_wrapper.get_headers(),
124
138
  timeout=60,
125
139
  )
@@ -133,19 +147,34 @@ class FilesClient:
133
147
  raise ApiError(status_code=_response.status_code, body=_response.text)
134
148
  raise ApiError(status_code=_response.status_code, body=_response_json)
135
149
 
136
- def upload_file(self, *, project_id: typing.Optional[str] = None, upload_file: typing.IO) -> File:
150
+ def upload_file(
151
+ self,
152
+ *,
153
+ project_id: typing.Optional[str] = None,
154
+ organization_id: typing.Optional[str] = None,
155
+ upload_file: typing.IO,
156
+ ) -> File:
137
157
  """
138
158
  Upload a file to S3.
139
159
 
140
160
  Parameters:
141
161
  - project_id: typing.Optional[str].
142
162
 
163
+ - organization_id: typing.Optional[str].
164
+
143
165
  - upload_file: typing.IO.
166
+ ---
167
+ from llama_cloud.client import LlamaCloud
168
+
169
+ client = LlamaCloud(
170
+ token="YOUR_TOKEN",
171
+ )
172
+ client.files.upload_file()
144
173
  """
145
174
  _response = self._client_wrapper.httpx_client.request(
146
175
  "POST",
147
176
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
148
- params=remove_none_from_dict({"project_id": project_id}),
177
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
149
178
  data=jsonable_encoder({}),
150
179
  files={"upload_file": upload_file},
151
180
  headers=self._client_wrapper.get_headers(),
@@ -165,10 +194,12 @@ class FilesClient:
165
194
  self,
166
195
  *,
167
196
  project_id: typing.Optional[str] = None,
197
+ organization_id: typing.Optional[str] = None,
168
198
  name: str,
169
199
  file_size: typing.Optional[int] = OMIT,
170
200
  last_modified_at: typing.Optional[dt.datetime] = OMIT,
171
201
  resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]] = OMIT,
202
+ permission_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreatePermissionInfoValue]]] = OMIT,
172
203
  data_source_id: typing.Optional[str] = OMIT,
173
204
  ) -> PresignedUrl:
174
205
  """
@@ -177,15 +208,19 @@ class FilesClient:
177
208
  Parameters:
178
209
  - project_id: typing.Optional[str].
179
210
 
211
+ - organization_id: typing.Optional[str].
212
+
180
213
  - name: str.
181
214
 
182
- - file_size: typing.Optional[int].
215
+ - file_size: typing.Optional[int]. Size of the file in bytes
183
216
 
184
- - last_modified_at: typing.Optional[dt.datetime].
217
+ - last_modified_at: typing.Optional[dt.datetime]. The last modified time of the file
185
218
 
186
- - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]].
219
+ - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]]. Resource information for the file
187
220
 
188
- - data_source_id: typing.Optional[str].
221
+ - permission_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreatePermissionInfoValue]]]. Permission information for the file
222
+
223
+ - data_source_id: typing.Optional[str]. The ID of the data source that the file belongs to
189
224
  ---
190
225
  from llama_cloud.client import LlamaCloud
191
226
 
@@ -193,7 +228,7 @@ class FilesClient:
193
228
  token="YOUR_TOKEN",
194
229
  )
195
230
  client.files.generate_presigned_url(
196
- name="string",
231
+ name="name",
197
232
  )
198
233
  """
199
234
  _request: typing.Dict[str, typing.Any] = {"name": name}
@@ -203,12 +238,14 @@ class FilesClient:
203
238
  _request["last_modified_at"] = last_modified_at
204
239
  if resource_info is not OMIT:
205
240
  _request["resource_info"] = resource_info
241
+ if permission_info is not OMIT:
242
+ _request["permission_info"] = permission_info
206
243
  if data_source_id is not OMIT:
207
244
  _request["data_source_id"] = data_source_id
208
245
  _response = self._client_wrapper.httpx_client.request(
209
246
  "PUT",
210
247
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
211
- params=remove_none_from_dict({"project_id": project_id}),
248
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
212
249
  json=jsonable_encoder(_request),
213
250
  headers=self._client_wrapper.get_headers(),
214
251
  timeout=60,
@@ -223,12 +260,16 @@ class FilesClient:
223
260
  raise ApiError(status_code=_response.status_code, body=_response.text)
224
261
  raise ApiError(status_code=_response.status_code, body=_response_json)
225
262
 
226
- def sync_files(self, *, project_id: typing.Optional[str] = None) -> typing.List[File]:
263
+ def sync_files(
264
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
265
+ ) -> typing.List[File]:
227
266
  """
228
267
  Sync Files API against file contents uploaded via S3 presigned urls.
229
268
 
230
269
  Parameters:
231
270
  - project_id: typing.Optional[str].
271
+
272
+ - organization_id: typing.Optional[str].
232
273
  ---
233
274
  from llama_cloud.client import LlamaCloud
234
275
 
@@ -240,7 +281,7 @@ class FilesClient:
240
281
  _response = self._client_wrapper.httpx_client.request(
241
282
  "PUT",
242
283
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/sync"),
243
- params=remove_none_from_dict({"project_id": project_id}),
284
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
244
285
  headers=self._client_wrapper.get_headers(),
245
286
  timeout=60,
246
287
  )
@@ -254,7 +295,84 @@ class FilesClient:
254
295
  raise ApiError(status_code=_response.status_code, body=_response.text)
255
296
  raise ApiError(status_code=_response.status_code, body=_response_json)
256
297
 
257
- def read_file_content(self, id: str, *, project_id: typing.Optional[str] = None) -> PresignedUrl:
298
+ def upload_file_from_url(
299
+ self,
300
+ *,
301
+ project_id: typing.Optional[str] = None,
302
+ organization_id: typing.Optional[str] = None,
303
+ name: typing.Optional[str] = OMIT,
304
+ url: str,
305
+ proxy_url: typing.Optional[str] = OMIT,
306
+ request_headers: typing.Optional[typing.Dict[str, typing.Optional[str]]] = OMIT,
307
+ verify_ssl: typing.Optional[bool] = OMIT,
308
+ follow_redirects: typing.Optional[bool] = OMIT,
309
+ resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]] = OMIT,
310
+ ) -> File:
311
+ """
312
+ Upload a file to S3 from a URL.
313
+
314
+ Parameters:
315
+ - project_id: typing.Optional[str].
316
+
317
+ - organization_id: typing.Optional[str].
318
+
319
+ - name: typing.Optional[str].
320
+
321
+ - url: str. URL of the file to download
322
+
323
+ - proxy_url: typing.Optional[str]. URL of the proxy server to use for downloading the file
324
+
325
+ - request_headers: typing.Optional[typing.Dict[str, typing.Optional[str]]]. Headers to include in the request when downloading the file
326
+
327
+ - verify_ssl: typing.Optional[bool]. Whether to verify the SSL certificate when downloading the file
328
+
329
+ - follow_redirects: typing.Optional[bool]. Whether to follow redirects when downloading the file
330
+
331
+ - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]]. Resource information for the file
332
+ ---
333
+ from llama_cloud.client import LlamaCloud
334
+
335
+ client = LlamaCloud(
336
+ token="YOUR_TOKEN",
337
+ )
338
+ client.files.upload_file_from_url(
339
+ url="url",
340
+ )
341
+ """
342
+ _request: typing.Dict[str, typing.Any] = {"url": url}
343
+ if name is not OMIT:
344
+ _request["name"] = name
345
+ if proxy_url is not OMIT:
346
+ _request["proxy_url"] = proxy_url
347
+ if request_headers is not OMIT:
348
+ _request["request_headers"] = request_headers
349
+ if verify_ssl is not OMIT:
350
+ _request["verify_ssl"] = verify_ssl
351
+ if follow_redirects is not OMIT:
352
+ _request["follow_redirects"] = follow_redirects
353
+ if resource_info is not OMIT:
354
+ _request["resource_info"] = resource_info
355
+ _response = self._client_wrapper.httpx_client.request(
356
+ "PUT",
357
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/upload_from_url"),
358
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
359
+ json=jsonable_encoder(_request),
360
+ headers=self._client_wrapper.get_headers(),
361
+ timeout=60,
362
+ )
363
+ if 200 <= _response.status_code < 300:
364
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
365
+ if _response.status_code == 422:
366
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
367
+ try:
368
+ _response_json = _response.json()
369
+ except JSONDecodeError:
370
+ raise ApiError(status_code=_response.status_code, body=_response.text)
371
+ raise ApiError(status_code=_response.status_code, body=_response_json)
372
+
373
+ def read_file_content(
374
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
375
+ ) -> PresignedUrl:
258
376
  """
259
377
  Returns a presigned url to read the file content.
260
378
 
@@ -262,6 +380,8 @@ class FilesClient:
262
380
  - id: str.
263
381
 
264
382
  - project_id: typing.Optional[str].
383
+
384
+ - organization_id: typing.Optional[str].
265
385
  ---
266
386
  from llama_cloud.client import LlamaCloud
267
387
 
@@ -269,13 +389,13 @@ class FilesClient:
269
389
  token="YOUR_TOKEN",
270
390
  )
271
391
  client.files.read_file_content(
272
- id="string",
392
+ id="id",
273
393
  )
274
394
  """
275
395
  _response = self._client_wrapper.httpx_client.request(
276
396
  "GET",
277
397
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/content"),
278
- params=remove_none_from_dict({"project_id": project_id}),
398
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
279
399
  headers=self._client_wrapper.get_headers(),
280
400
  timeout=60,
281
401
  )
@@ -290,7 +410,7 @@ class FilesClient:
290
410
  raise ApiError(status_code=_response.status_code, body=_response_json)
291
411
 
292
412
  def list_file_page_screenshots(
293
- self, id: str, *, project_id: typing.Optional[str] = None
413
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
294
414
  ) -> typing.List[PageScreenshotMetadata]:
295
415
  """
296
416
  List metadata for all screenshots of pages from a file.
@@ -299,6 +419,8 @@ class FilesClient:
299
419
  - id: str.
300
420
 
301
421
  - project_id: typing.Optional[str].
422
+
423
+ - organization_id: typing.Optional[str].
302
424
  ---
303
425
  from llama_cloud.client import LlamaCloud
304
426
 
@@ -306,13 +428,13 @@ class FilesClient:
306
428
  token="YOUR_TOKEN",
307
429
  )
308
430
  client.files.list_file_page_screenshots(
309
- id="string",
431
+ id="id",
310
432
  )
311
433
  """
312
434
  _response = self._client_wrapper.httpx_client.request(
313
435
  "GET",
314
436
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots"),
315
- params=remove_none_from_dict({"project_id": project_id}),
437
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
316
438
  headers=self._client_wrapper.get_headers(),
317
439
  timeout=60,
318
440
  )
@@ -327,7 +449,12 @@ class FilesClient:
327
449
  raise ApiError(status_code=_response.status_code, body=_response_json)
328
450
 
329
451
  def get_file_page_screenshot(
330
- self, id: str, page_index: int, *, project_id: typing.Optional[str] = None
452
+ self,
453
+ id: str,
454
+ page_index: int,
455
+ *,
456
+ project_id: typing.Optional[str] = None,
457
+ organization_id: typing.Optional[str] = None,
331
458
  ) -> typing.Any:
332
459
  """
333
460
  Get screenshot of a page from a file.
@@ -338,6 +465,8 @@ class FilesClient:
338
465
  - page_index: int.
339
466
 
340
467
  - project_id: typing.Optional[str].
468
+
469
+ - organization_id: typing.Optional[str].
341
470
  ---
342
471
  from llama_cloud.client import LlamaCloud
343
472
 
@@ -345,7 +474,7 @@ class FilesClient:
345
474
  token="YOUR_TOKEN",
346
475
  )
347
476
  client.files.get_file_page_screenshot(
348
- id="string",
477
+ id="id",
349
478
  page_index=1,
350
479
  )
351
480
  """
@@ -354,7 +483,7 @@ class FilesClient:
354
483
  urllib.parse.urljoin(
355
484
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots/{page_index}"
356
485
  ),
357
- params=remove_none_from_dict({"project_id": project_id}),
486
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
358
487
  headers=self._client_wrapper.get_headers(),
359
488
  timeout=60,
360
489
  )
@@ -373,7 +502,9 @@ class AsyncFilesClient:
373
502
  def __init__(self, *, client_wrapper: AsyncClientWrapper):
374
503
  self._client_wrapper = client_wrapper
375
504
 
376
- async def get_file(self, id: str, *, project_id: typing.Optional[str] = None) -> File:
505
+ async def get_file(
506
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
507
+ ) -> File:
377
508
  """
378
509
  Read File metadata objects.
379
510
 
@@ -381,6 +512,8 @@ class AsyncFilesClient:
381
512
  - id: str.
382
513
 
383
514
  - project_id: typing.Optional[str].
515
+
516
+ - organization_id: typing.Optional[str].
384
517
  ---
385
518
  from llama_cloud.client import AsyncLlamaCloud
386
519
 
@@ -388,13 +521,13 @@ class AsyncFilesClient:
388
521
  token="YOUR_TOKEN",
389
522
  )
390
523
  await client.files.get_file(
391
- id="string",
524
+ id="id",
392
525
  )
393
526
  """
394
527
  _response = await self._client_wrapper.httpx_client.request(
395
528
  "GET",
396
529
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
397
- params=remove_none_from_dict({"project_id": project_id}),
530
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
398
531
  headers=self._client_wrapper.get_headers(),
399
532
  timeout=60,
400
533
  )
@@ -408,7 +541,9 @@ class AsyncFilesClient:
408
541
  raise ApiError(status_code=_response.status_code, body=_response.text)
409
542
  raise ApiError(status_code=_response.status_code, body=_response_json)
410
543
 
411
- async def delete_file(self, id: str, *, project_id: typing.Optional[str] = None) -> None:
544
+ async def delete_file(
545
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
546
+ ) -> None:
412
547
  """
413
548
  Delete the file from S3.
414
549
 
@@ -416,6 +551,8 @@ class AsyncFilesClient:
416
551
  - id: str.
417
552
 
418
553
  - project_id: typing.Optional[str].
554
+
555
+ - organization_id: typing.Optional[str].
419
556
  ---
420
557
  from llama_cloud.client import AsyncLlamaCloud
421
558
 
@@ -423,13 +560,13 @@ class AsyncFilesClient:
423
560
  token="YOUR_TOKEN",
424
561
  )
425
562
  await client.files.delete_file(
426
- id="string",
563
+ id="id",
427
564
  )
428
565
  """
429
566
  _response = await self._client_wrapper.httpx_client.request(
430
567
  "DELETE",
431
568
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
432
- params=remove_none_from_dict({"project_id": project_id}),
569
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
433
570
  headers=self._client_wrapper.get_headers(),
434
571
  timeout=60,
435
572
  )
@@ -443,12 +580,16 @@ class AsyncFilesClient:
443
580
  raise ApiError(status_code=_response.status_code, body=_response.text)
444
581
  raise ApiError(status_code=_response.status_code, body=_response_json)
445
582
 
446
- async def list_files(self, *, project_id: typing.Optional[str] = None) -> typing.List[File]:
583
+ async def list_files(
584
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
585
+ ) -> typing.List[File]:
447
586
  """
448
587
  Read File metadata objects.
449
588
 
450
589
  Parameters:
451
590
  - project_id: typing.Optional[str].
591
+
592
+ - organization_id: typing.Optional[str].
452
593
  ---
453
594
  from llama_cloud.client import AsyncLlamaCloud
454
595
 
@@ -460,7 +601,7 @@ class AsyncFilesClient:
460
601
  _response = await self._client_wrapper.httpx_client.request(
461
602
  "GET",
462
603
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
463
- params=remove_none_from_dict({"project_id": project_id}),
604
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
464
605
  headers=self._client_wrapper.get_headers(),
465
606
  timeout=60,
466
607
  )
@@ -474,19 +615,34 @@ class AsyncFilesClient:
474
615
  raise ApiError(status_code=_response.status_code, body=_response.text)
475
616
  raise ApiError(status_code=_response.status_code, body=_response_json)
476
617
 
477
- async def upload_file(self, *, project_id: typing.Optional[str] = None, upload_file: typing.IO) -> File:
618
+ async def upload_file(
619
+ self,
620
+ *,
621
+ project_id: typing.Optional[str] = None,
622
+ organization_id: typing.Optional[str] = None,
623
+ upload_file: typing.IO,
624
+ ) -> File:
478
625
  """
479
626
  Upload a file to S3.
480
627
 
481
628
  Parameters:
482
629
  - project_id: typing.Optional[str].
483
630
 
631
+ - organization_id: typing.Optional[str].
632
+
484
633
  - upload_file: typing.IO.
634
+ ---
635
+ from llama_cloud.client import AsyncLlamaCloud
636
+
637
+ client = AsyncLlamaCloud(
638
+ token="YOUR_TOKEN",
639
+ )
640
+ await client.files.upload_file()
485
641
  """
486
642
  _response = await self._client_wrapper.httpx_client.request(
487
643
  "POST",
488
644
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
489
- params=remove_none_from_dict({"project_id": project_id}),
645
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
490
646
  data=jsonable_encoder({}),
491
647
  files={"upload_file": upload_file},
492
648
  headers=self._client_wrapper.get_headers(),
@@ -506,10 +662,12 @@ class AsyncFilesClient:
506
662
  self,
507
663
  *,
508
664
  project_id: typing.Optional[str] = None,
665
+ organization_id: typing.Optional[str] = None,
509
666
  name: str,
510
667
  file_size: typing.Optional[int] = OMIT,
511
668
  last_modified_at: typing.Optional[dt.datetime] = OMIT,
512
669
  resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]] = OMIT,
670
+ permission_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreatePermissionInfoValue]]] = OMIT,
513
671
  data_source_id: typing.Optional[str] = OMIT,
514
672
  ) -> PresignedUrl:
515
673
  """
@@ -518,15 +676,19 @@ class AsyncFilesClient:
518
676
  Parameters:
519
677
  - project_id: typing.Optional[str].
520
678
 
679
+ - organization_id: typing.Optional[str].
680
+
521
681
  - name: str.
522
682
 
523
- - file_size: typing.Optional[int].
683
+ - file_size: typing.Optional[int]. Size of the file in bytes
524
684
 
525
- - last_modified_at: typing.Optional[dt.datetime].
685
+ - last_modified_at: typing.Optional[dt.datetime]. The last modified time of the file
526
686
 
527
- - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]].
687
+ - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]]. Resource information for the file
528
688
 
529
- - data_source_id: typing.Optional[str].
689
+ - permission_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreatePermissionInfoValue]]]. Permission information for the file
690
+
691
+ - data_source_id: typing.Optional[str]. The ID of the data source that the file belongs to
530
692
  ---
531
693
  from llama_cloud.client import AsyncLlamaCloud
532
694
 
@@ -534,7 +696,7 @@ class AsyncFilesClient:
534
696
  token="YOUR_TOKEN",
535
697
  )
536
698
  await client.files.generate_presigned_url(
537
- name="string",
699
+ name="name",
538
700
  )
539
701
  """
540
702
  _request: typing.Dict[str, typing.Any] = {"name": name}
@@ -544,12 +706,14 @@ class AsyncFilesClient:
544
706
  _request["last_modified_at"] = last_modified_at
545
707
  if resource_info is not OMIT:
546
708
  _request["resource_info"] = resource_info
709
+ if permission_info is not OMIT:
710
+ _request["permission_info"] = permission_info
547
711
  if data_source_id is not OMIT:
548
712
  _request["data_source_id"] = data_source_id
549
713
  _response = await self._client_wrapper.httpx_client.request(
550
714
  "PUT",
551
715
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
552
- params=remove_none_from_dict({"project_id": project_id}),
716
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
553
717
  json=jsonable_encoder(_request),
554
718
  headers=self._client_wrapper.get_headers(),
555
719
  timeout=60,
@@ -564,12 +728,16 @@ class AsyncFilesClient:
564
728
  raise ApiError(status_code=_response.status_code, body=_response.text)
565
729
  raise ApiError(status_code=_response.status_code, body=_response_json)
566
730
 
567
- async def sync_files(self, *, project_id: typing.Optional[str] = None) -> typing.List[File]:
731
+ async def sync_files(
732
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
733
+ ) -> typing.List[File]:
568
734
  """
569
735
  Sync Files API against file contents uploaded via S3 presigned urls.
570
736
 
571
737
  Parameters:
572
738
  - project_id: typing.Optional[str].
739
+
740
+ - organization_id: typing.Optional[str].
573
741
  ---
574
742
  from llama_cloud.client import AsyncLlamaCloud
575
743
 
@@ -581,7 +749,7 @@ class AsyncFilesClient:
581
749
  _response = await self._client_wrapper.httpx_client.request(
582
750
  "PUT",
583
751
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/sync"),
584
- params=remove_none_from_dict({"project_id": project_id}),
752
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
585
753
  headers=self._client_wrapper.get_headers(),
586
754
  timeout=60,
587
755
  )
@@ -595,7 +763,84 @@ class AsyncFilesClient:
595
763
  raise ApiError(status_code=_response.status_code, body=_response.text)
596
764
  raise ApiError(status_code=_response.status_code, body=_response_json)
597
765
 
598
- async def read_file_content(self, id: str, *, project_id: typing.Optional[str] = None) -> PresignedUrl:
766
+ async def upload_file_from_url(
767
+ self,
768
+ *,
769
+ project_id: typing.Optional[str] = None,
770
+ organization_id: typing.Optional[str] = None,
771
+ name: typing.Optional[str] = OMIT,
772
+ url: str,
773
+ proxy_url: typing.Optional[str] = OMIT,
774
+ request_headers: typing.Optional[typing.Dict[str, typing.Optional[str]]] = OMIT,
775
+ verify_ssl: typing.Optional[bool] = OMIT,
776
+ follow_redirects: typing.Optional[bool] = OMIT,
777
+ resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]] = OMIT,
778
+ ) -> File:
779
+ """
780
+ Upload a file to S3 from a URL.
781
+
782
+ Parameters:
783
+ - project_id: typing.Optional[str].
784
+
785
+ - organization_id: typing.Optional[str].
786
+
787
+ - name: typing.Optional[str].
788
+
789
+ - url: str. URL of the file to download
790
+
791
+ - proxy_url: typing.Optional[str]. URL of the proxy server to use for downloading the file
792
+
793
+ - request_headers: typing.Optional[typing.Dict[str, typing.Optional[str]]]. Headers to include in the request when downloading the file
794
+
795
+ - verify_ssl: typing.Optional[bool]. Whether to verify the SSL certificate when downloading the file
796
+
797
+ - follow_redirects: typing.Optional[bool]. Whether to follow redirects when downloading the file
798
+
799
+ - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]]. Resource information for the file
800
+ ---
801
+ from llama_cloud.client import AsyncLlamaCloud
802
+
803
+ client = AsyncLlamaCloud(
804
+ token="YOUR_TOKEN",
805
+ )
806
+ await client.files.upload_file_from_url(
807
+ url="url",
808
+ )
809
+ """
810
+ _request: typing.Dict[str, typing.Any] = {"url": url}
811
+ if name is not OMIT:
812
+ _request["name"] = name
813
+ if proxy_url is not OMIT:
814
+ _request["proxy_url"] = proxy_url
815
+ if request_headers is not OMIT:
816
+ _request["request_headers"] = request_headers
817
+ if verify_ssl is not OMIT:
818
+ _request["verify_ssl"] = verify_ssl
819
+ if follow_redirects is not OMIT:
820
+ _request["follow_redirects"] = follow_redirects
821
+ if resource_info is not OMIT:
822
+ _request["resource_info"] = resource_info
823
+ _response = await self._client_wrapper.httpx_client.request(
824
+ "PUT",
825
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/upload_from_url"),
826
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
827
+ json=jsonable_encoder(_request),
828
+ headers=self._client_wrapper.get_headers(),
829
+ timeout=60,
830
+ )
831
+ if 200 <= _response.status_code < 300:
832
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
833
+ if _response.status_code == 422:
834
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
835
+ try:
836
+ _response_json = _response.json()
837
+ except JSONDecodeError:
838
+ raise ApiError(status_code=_response.status_code, body=_response.text)
839
+ raise ApiError(status_code=_response.status_code, body=_response_json)
840
+
841
+ async def read_file_content(
842
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
843
+ ) -> PresignedUrl:
599
844
  """
600
845
  Returns a presigned url to read the file content.
601
846
 
@@ -603,6 +848,8 @@ class AsyncFilesClient:
603
848
  - id: str.
604
849
 
605
850
  - project_id: typing.Optional[str].
851
+
852
+ - organization_id: typing.Optional[str].
606
853
  ---
607
854
  from llama_cloud.client import AsyncLlamaCloud
608
855
 
@@ -610,13 +857,13 @@ class AsyncFilesClient:
610
857
  token="YOUR_TOKEN",
611
858
  )
612
859
  await client.files.read_file_content(
613
- id="string",
860
+ id="id",
614
861
  )
615
862
  """
616
863
  _response = await self._client_wrapper.httpx_client.request(
617
864
  "GET",
618
865
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/content"),
619
- params=remove_none_from_dict({"project_id": project_id}),
866
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
620
867
  headers=self._client_wrapper.get_headers(),
621
868
  timeout=60,
622
869
  )
@@ -631,7 +878,7 @@ class AsyncFilesClient:
631
878
  raise ApiError(status_code=_response.status_code, body=_response_json)
632
879
 
633
880
  async def list_file_page_screenshots(
634
- self, id: str, *, project_id: typing.Optional[str] = None
881
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
635
882
  ) -> typing.List[PageScreenshotMetadata]:
636
883
  """
637
884
  List metadata for all screenshots of pages from a file.
@@ -640,6 +887,8 @@ class AsyncFilesClient:
640
887
  - id: str.
641
888
 
642
889
  - project_id: typing.Optional[str].
890
+
891
+ - organization_id: typing.Optional[str].
643
892
  ---
644
893
  from llama_cloud.client import AsyncLlamaCloud
645
894
 
@@ -647,13 +896,13 @@ class AsyncFilesClient:
647
896
  token="YOUR_TOKEN",
648
897
  )
649
898
  await client.files.list_file_page_screenshots(
650
- id="string",
899
+ id="id",
651
900
  )
652
901
  """
653
902
  _response = await self._client_wrapper.httpx_client.request(
654
903
  "GET",
655
904
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots"),
656
- params=remove_none_from_dict({"project_id": project_id}),
905
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
657
906
  headers=self._client_wrapper.get_headers(),
658
907
  timeout=60,
659
908
  )
@@ -668,7 +917,12 @@ class AsyncFilesClient:
668
917
  raise ApiError(status_code=_response.status_code, body=_response_json)
669
918
 
670
919
  async def get_file_page_screenshot(
671
- self, id: str, page_index: int, *, project_id: typing.Optional[str] = None
920
+ self,
921
+ id: str,
922
+ page_index: int,
923
+ *,
924
+ project_id: typing.Optional[str] = None,
925
+ organization_id: typing.Optional[str] = None,
672
926
  ) -> typing.Any:
673
927
  """
674
928
  Get screenshot of a page from a file.
@@ -679,6 +933,8 @@ class AsyncFilesClient:
679
933
  - page_index: int.
680
934
 
681
935
  - project_id: typing.Optional[str].
936
+
937
+ - organization_id: typing.Optional[str].
682
938
  ---
683
939
  from llama_cloud.client import AsyncLlamaCloud
684
940
 
@@ -686,7 +942,7 @@ class AsyncFilesClient:
686
942
  token="YOUR_TOKEN",
687
943
  )
688
944
  await client.files.get_file_page_screenshot(
689
- id="string",
945
+ id="id",
690
946
  page_index=1,
691
947
  )
692
948
  """
@@ -695,7 +951,7 @@ class AsyncFilesClient:
695
951
  urllib.parse.urljoin(
696
952
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots/{page_index}"
697
953
  ),
698
- params=remove_none_from_dict({"project_id": project_id}),
954
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
699
955
  headers=self._client_wrapper.get_headers(),
700
956
  timeout=60,
701
957
  )