llama-cloud 0.1.4__py3-none-any.whl → 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (37) hide show
  1. llama_cloud/__init__.py +64 -0
  2. llama_cloud/client.py +3 -0
  3. llama_cloud/resources/__init__.py +22 -1
  4. llama_cloud/resources/data_sinks/client.py +12 -6
  5. llama_cloud/resources/embedding_model_configs/__init__.py +23 -0
  6. llama_cloud/resources/embedding_model_configs/client.py +360 -0
  7. llama_cloud/resources/embedding_model_configs/types/__init__.py +23 -0
  8. llama_cloud/resources/embedding_model_configs/types/embedding_model_config_create_embedding_config.py +89 -0
  9. llama_cloud/resources/files/__init__.py +2 -2
  10. llama_cloud/resources/files/client.py +265 -34
  11. llama_cloud/resources/files/types/__init__.py +2 -1
  12. llama_cloud/resources/files/types/file_create_from_url_resource_info_value.py +7 -0
  13. llama_cloud/resources/organizations/client.py +65 -0
  14. llama_cloud/resources/parsing/client.py +157 -0
  15. llama_cloud/resources/pipelines/client.py +177 -14
  16. llama_cloud/resources/projects/client.py +71 -0
  17. llama_cloud/types/__init__.py +48 -0
  18. llama_cloud/types/cloud_one_drive_data_source.py +1 -0
  19. llama_cloud/types/cloud_postgres_vector_store.py +1 -1
  20. llama_cloud/types/cloud_sharepoint_data_source.py +1 -0
  21. llama_cloud/types/embedding_model_config.py +43 -0
  22. llama_cloud/types/embedding_model_config_embedding_config.py +89 -0
  23. llama_cloud/types/embedding_model_config_update.py +33 -0
  24. llama_cloud/types/embedding_model_config_update_embedding_config.py +89 -0
  25. llama_cloud/types/interval_usage_and_plan.py +36 -0
  26. llama_cloud/types/llama_parse_parameters.py +10 -0
  27. llama_cloud/types/markdown_node_parser.py +2 -1
  28. llama_cloud/types/paginated_list_pipeline_files_response.py +35 -0
  29. llama_cloud/types/pipeline.py +1 -0
  30. llama_cloud/types/pipeline_create.py +1 -0
  31. llama_cloud/types/pipeline_file.py +1 -0
  32. llama_cloud/types/plan.py +40 -0
  33. llama_cloud/types/usage.py +41 -0
  34. {llama_cloud-0.1.4.dist-info → llama_cloud-0.1.5.dist-info}/METADATA +1 -2
  35. {llama_cloud-0.1.4.dist-info → llama_cloud-0.1.5.dist-info}/RECORD +37 -24
  36. {llama_cloud-0.1.4.dist-info → llama_cloud-0.1.5.dist-info}/WHEEL +1 -1
  37. {llama_cloud-0.1.4.dist-info → llama_cloud-0.1.5.dist-info}/LICENSE +0 -0
@@ -14,6 +14,7 @@ from ...types.file import File
14
14
  from ...types.http_validation_error import HttpValidationError
15
15
  from ...types.page_screenshot_metadata import PageScreenshotMetadata
16
16
  from ...types.presigned_url import PresignedUrl
17
+ from .types.file_create_from_url_resource_info_value import FileCreateFromUrlResourceInfoValue
17
18
  from .types.file_create_resource_info_value import FileCreateResourceInfoValue
18
19
 
19
20
  try:
@@ -32,7 +33,9 @@ class FilesClient:
32
33
  def __init__(self, *, client_wrapper: SyncClientWrapper):
33
34
  self._client_wrapper = client_wrapper
34
35
 
35
- def get_file(self, id: str, *, project_id: typing.Optional[str] = None) -> File:
36
+ def get_file(
37
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
38
+ ) -> File:
36
39
  """
37
40
  Read File metadata objects.
38
41
 
@@ -40,6 +43,8 @@ class FilesClient:
40
43
  - id: str.
41
44
 
42
45
  - project_id: typing.Optional[str].
46
+
47
+ - organization_id: typing.Optional[str].
43
48
  ---
44
49
  from llama_cloud.client import LlamaCloud
45
50
 
@@ -53,7 +58,7 @@ class FilesClient:
53
58
  _response = self._client_wrapper.httpx_client.request(
54
59
  "GET",
55
60
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
56
- params=remove_none_from_dict({"project_id": project_id}),
61
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
57
62
  headers=self._client_wrapper.get_headers(),
58
63
  timeout=60,
59
64
  )
@@ -67,7 +72,9 @@ class FilesClient:
67
72
  raise ApiError(status_code=_response.status_code, body=_response.text)
68
73
  raise ApiError(status_code=_response.status_code, body=_response_json)
69
74
 
70
- def delete_file(self, id: str, *, project_id: typing.Optional[str] = None) -> None:
75
+ def delete_file(
76
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
77
+ ) -> None:
71
78
  """
72
79
  Delete the file from S3.
73
80
 
@@ -75,6 +82,8 @@ class FilesClient:
75
82
  - id: str.
76
83
 
77
84
  - project_id: typing.Optional[str].
85
+
86
+ - organization_id: typing.Optional[str].
78
87
  ---
79
88
  from llama_cloud.client import LlamaCloud
80
89
 
@@ -88,7 +97,7 @@ class FilesClient:
88
97
  _response = self._client_wrapper.httpx_client.request(
89
98
  "DELETE",
90
99
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
91
- params=remove_none_from_dict({"project_id": project_id}),
100
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
92
101
  headers=self._client_wrapper.get_headers(),
93
102
  timeout=60,
94
103
  )
@@ -102,12 +111,16 @@ class FilesClient:
102
111
  raise ApiError(status_code=_response.status_code, body=_response.text)
103
112
  raise ApiError(status_code=_response.status_code, body=_response_json)
104
113
 
105
- def list_files(self, *, project_id: typing.Optional[str] = None) -> typing.List[File]:
114
+ def list_files(
115
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
116
+ ) -> typing.List[File]:
106
117
  """
107
118
  Read File metadata objects.
108
119
 
109
120
  Parameters:
110
121
  - project_id: typing.Optional[str].
122
+
123
+ - organization_id: typing.Optional[str].
111
124
  ---
112
125
  from llama_cloud.client import LlamaCloud
113
126
 
@@ -119,7 +132,7 @@ class FilesClient:
119
132
  _response = self._client_wrapper.httpx_client.request(
120
133
  "GET",
121
134
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
122
- params=remove_none_from_dict({"project_id": project_id}),
135
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
123
136
  headers=self._client_wrapper.get_headers(),
124
137
  timeout=60,
125
138
  )
@@ -133,19 +146,27 @@ class FilesClient:
133
146
  raise ApiError(status_code=_response.status_code, body=_response.text)
134
147
  raise ApiError(status_code=_response.status_code, body=_response_json)
135
148
 
136
- def upload_file(self, *, project_id: typing.Optional[str] = None, upload_file: typing.IO) -> File:
149
+ def upload_file(
150
+ self,
151
+ *,
152
+ project_id: typing.Optional[str] = None,
153
+ organization_id: typing.Optional[str] = None,
154
+ upload_file: typing.IO,
155
+ ) -> File:
137
156
  """
138
157
  Upload a file to S3.
139
158
 
140
159
  Parameters:
141
160
  - project_id: typing.Optional[str].
142
161
 
162
+ - organization_id: typing.Optional[str].
163
+
143
164
  - upload_file: typing.IO.
144
165
  """
145
166
  _response = self._client_wrapper.httpx_client.request(
146
167
  "POST",
147
168
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
148
- params=remove_none_from_dict({"project_id": project_id}),
169
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
149
170
  data=jsonable_encoder({}),
150
171
  files={"upload_file": upload_file},
151
172
  headers=self._client_wrapper.get_headers(),
@@ -165,6 +186,7 @@ class FilesClient:
165
186
  self,
166
187
  *,
167
188
  project_id: typing.Optional[str] = None,
189
+ organization_id: typing.Optional[str] = None,
168
190
  name: str,
169
191
  file_size: typing.Optional[int] = OMIT,
170
192
  last_modified_at: typing.Optional[dt.datetime] = OMIT,
@@ -177,6 +199,8 @@ class FilesClient:
177
199
  Parameters:
178
200
  - project_id: typing.Optional[str].
179
201
 
202
+ - organization_id: typing.Optional[str].
203
+
180
204
  - name: str.
181
205
 
182
206
  - file_size: typing.Optional[int].
@@ -208,7 +232,7 @@ class FilesClient:
208
232
  _response = self._client_wrapper.httpx_client.request(
209
233
  "PUT",
210
234
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
211
- params=remove_none_from_dict({"project_id": project_id}),
235
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
212
236
  json=jsonable_encoder(_request),
213
237
  headers=self._client_wrapper.get_headers(),
214
238
  timeout=60,
@@ -223,12 +247,16 @@ class FilesClient:
223
247
  raise ApiError(status_code=_response.status_code, body=_response.text)
224
248
  raise ApiError(status_code=_response.status_code, body=_response_json)
225
249
 
226
- def sync_files(self, *, project_id: typing.Optional[str] = None) -> typing.List[File]:
250
+ def sync_files(
251
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
252
+ ) -> typing.List[File]:
227
253
  """
228
254
  Sync Files API against file contents uploaded via S3 presigned urls.
229
255
 
230
256
  Parameters:
231
257
  - project_id: typing.Optional[str].
258
+
259
+ - organization_id: typing.Optional[str].
232
260
  ---
233
261
  from llama_cloud.client import LlamaCloud
234
262
 
@@ -240,7 +268,7 @@ class FilesClient:
240
268
  _response = self._client_wrapper.httpx_client.request(
241
269
  "PUT",
242
270
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/sync"),
243
- params=remove_none_from_dict({"project_id": project_id}),
271
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
244
272
  headers=self._client_wrapper.get_headers(),
245
273
  timeout=60,
246
274
  )
@@ -254,7 +282,84 @@ class FilesClient:
254
282
  raise ApiError(status_code=_response.status_code, body=_response.text)
255
283
  raise ApiError(status_code=_response.status_code, body=_response_json)
256
284
 
257
- def read_file_content(self, id: str, *, project_id: typing.Optional[str] = None) -> PresignedUrl:
285
+ def upload_file_from_url(
286
+ self,
287
+ *,
288
+ project_id: typing.Optional[str] = None,
289
+ organization_id: typing.Optional[str] = None,
290
+ name: typing.Optional[str] = OMIT,
291
+ url: str,
292
+ proxy_url: typing.Optional[str] = OMIT,
293
+ request_headers: typing.Optional[typing.Dict[str, typing.Optional[str]]] = OMIT,
294
+ verify_ssl: typing.Optional[bool] = OMIT,
295
+ follow_redirects: typing.Optional[bool] = OMIT,
296
+ resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]] = OMIT,
297
+ ) -> File:
298
+ """
299
+ Upload a file to S3 from a URL.
300
+
301
+ Parameters:
302
+ - project_id: typing.Optional[str].
303
+
304
+ - organization_id: typing.Optional[str].
305
+
306
+ - name: typing.Optional[str].
307
+
308
+ - url: str. URL of the file to download
309
+
310
+ - proxy_url: typing.Optional[str].
311
+
312
+ - request_headers: typing.Optional[typing.Dict[str, typing.Optional[str]]].
313
+
314
+ - verify_ssl: typing.Optional[bool]. Whether to verify the SSL certificate when downloading the file
315
+
316
+ - follow_redirects: typing.Optional[bool]. Whether to follow redirects when downloading the file
317
+
318
+ - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]].
319
+ ---
320
+ from llama_cloud.client import LlamaCloud
321
+
322
+ client = LlamaCloud(
323
+ token="YOUR_TOKEN",
324
+ )
325
+ client.files.upload_file_from_url(
326
+ url="string",
327
+ )
328
+ """
329
+ _request: typing.Dict[str, typing.Any] = {"url": url}
330
+ if name is not OMIT:
331
+ _request["name"] = name
332
+ if proxy_url is not OMIT:
333
+ _request["proxy_url"] = proxy_url
334
+ if request_headers is not OMIT:
335
+ _request["request_headers"] = request_headers
336
+ if verify_ssl is not OMIT:
337
+ _request["verify_ssl"] = verify_ssl
338
+ if follow_redirects is not OMIT:
339
+ _request["follow_redirects"] = follow_redirects
340
+ if resource_info is not OMIT:
341
+ _request["resource_info"] = resource_info
342
+ _response = self._client_wrapper.httpx_client.request(
343
+ "PUT",
344
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/upload_from_url"),
345
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
346
+ json=jsonable_encoder(_request),
347
+ headers=self._client_wrapper.get_headers(),
348
+ timeout=60,
349
+ )
350
+ if 200 <= _response.status_code < 300:
351
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
352
+ if _response.status_code == 422:
353
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
354
+ try:
355
+ _response_json = _response.json()
356
+ except JSONDecodeError:
357
+ raise ApiError(status_code=_response.status_code, body=_response.text)
358
+ raise ApiError(status_code=_response.status_code, body=_response_json)
359
+
360
+ def read_file_content(
361
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
362
+ ) -> PresignedUrl:
258
363
  """
259
364
  Returns a presigned url to read the file content.
260
365
 
@@ -262,6 +367,8 @@ class FilesClient:
262
367
  - id: str.
263
368
 
264
369
  - project_id: typing.Optional[str].
370
+
371
+ - organization_id: typing.Optional[str].
265
372
  ---
266
373
  from llama_cloud.client import LlamaCloud
267
374
 
@@ -275,7 +382,7 @@ class FilesClient:
275
382
  _response = self._client_wrapper.httpx_client.request(
276
383
  "GET",
277
384
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/content"),
278
- params=remove_none_from_dict({"project_id": project_id}),
385
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
279
386
  headers=self._client_wrapper.get_headers(),
280
387
  timeout=60,
281
388
  )
@@ -290,7 +397,7 @@ class FilesClient:
290
397
  raise ApiError(status_code=_response.status_code, body=_response_json)
291
398
 
292
399
  def list_file_page_screenshots(
293
- self, id: str, *, project_id: typing.Optional[str] = None
400
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
294
401
  ) -> typing.List[PageScreenshotMetadata]:
295
402
  """
296
403
  List metadata for all screenshots of pages from a file.
@@ -299,6 +406,8 @@ class FilesClient:
299
406
  - id: str.
300
407
 
301
408
  - project_id: typing.Optional[str].
409
+
410
+ - organization_id: typing.Optional[str].
302
411
  ---
303
412
  from llama_cloud.client import LlamaCloud
304
413
 
@@ -312,7 +421,7 @@ class FilesClient:
312
421
  _response = self._client_wrapper.httpx_client.request(
313
422
  "GET",
314
423
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots"),
315
- params=remove_none_from_dict({"project_id": project_id}),
424
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
316
425
  headers=self._client_wrapper.get_headers(),
317
426
  timeout=60,
318
427
  )
@@ -327,7 +436,12 @@ class FilesClient:
327
436
  raise ApiError(status_code=_response.status_code, body=_response_json)
328
437
 
329
438
  def get_file_page_screenshot(
330
- self, id: str, page_index: int, *, project_id: typing.Optional[str] = None
439
+ self,
440
+ id: str,
441
+ page_index: int,
442
+ *,
443
+ project_id: typing.Optional[str] = None,
444
+ organization_id: typing.Optional[str] = None,
331
445
  ) -> typing.Any:
332
446
  """
333
447
  Get screenshot of a page from a file.
@@ -338,6 +452,8 @@ class FilesClient:
338
452
  - page_index: int.
339
453
 
340
454
  - project_id: typing.Optional[str].
455
+
456
+ - organization_id: typing.Optional[str].
341
457
  ---
342
458
  from llama_cloud.client import LlamaCloud
343
459
 
@@ -354,7 +470,7 @@ class FilesClient:
354
470
  urllib.parse.urljoin(
355
471
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots/{page_index}"
356
472
  ),
357
- params=remove_none_from_dict({"project_id": project_id}),
473
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
358
474
  headers=self._client_wrapper.get_headers(),
359
475
  timeout=60,
360
476
  )
@@ -373,7 +489,9 @@ class AsyncFilesClient:
373
489
  def __init__(self, *, client_wrapper: AsyncClientWrapper):
374
490
  self._client_wrapper = client_wrapper
375
491
 
376
- async def get_file(self, id: str, *, project_id: typing.Optional[str] = None) -> File:
492
+ async def get_file(
493
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
494
+ ) -> File:
377
495
  """
378
496
  Read File metadata objects.
379
497
 
@@ -381,6 +499,8 @@ class AsyncFilesClient:
381
499
  - id: str.
382
500
 
383
501
  - project_id: typing.Optional[str].
502
+
503
+ - organization_id: typing.Optional[str].
384
504
  ---
385
505
  from llama_cloud.client import AsyncLlamaCloud
386
506
 
@@ -394,7 +514,7 @@ class AsyncFilesClient:
394
514
  _response = await self._client_wrapper.httpx_client.request(
395
515
  "GET",
396
516
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
397
- params=remove_none_from_dict({"project_id": project_id}),
517
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
398
518
  headers=self._client_wrapper.get_headers(),
399
519
  timeout=60,
400
520
  )
@@ -408,7 +528,9 @@ class AsyncFilesClient:
408
528
  raise ApiError(status_code=_response.status_code, body=_response.text)
409
529
  raise ApiError(status_code=_response.status_code, body=_response_json)
410
530
 
411
- async def delete_file(self, id: str, *, project_id: typing.Optional[str] = None) -> None:
531
+ async def delete_file(
532
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
533
+ ) -> None:
412
534
  """
413
535
  Delete the file from S3.
414
536
 
@@ -416,6 +538,8 @@ class AsyncFilesClient:
416
538
  - id: str.
417
539
 
418
540
  - project_id: typing.Optional[str].
541
+
542
+ - organization_id: typing.Optional[str].
419
543
  ---
420
544
  from llama_cloud.client import AsyncLlamaCloud
421
545
 
@@ -429,7 +553,7 @@ class AsyncFilesClient:
429
553
  _response = await self._client_wrapper.httpx_client.request(
430
554
  "DELETE",
431
555
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
432
- params=remove_none_from_dict({"project_id": project_id}),
556
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
433
557
  headers=self._client_wrapper.get_headers(),
434
558
  timeout=60,
435
559
  )
@@ -443,12 +567,16 @@ class AsyncFilesClient:
443
567
  raise ApiError(status_code=_response.status_code, body=_response.text)
444
568
  raise ApiError(status_code=_response.status_code, body=_response_json)
445
569
 
446
- async def list_files(self, *, project_id: typing.Optional[str] = None) -> typing.List[File]:
570
+ async def list_files(
571
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
572
+ ) -> typing.List[File]:
447
573
  """
448
574
  Read File metadata objects.
449
575
 
450
576
  Parameters:
451
577
  - project_id: typing.Optional[str].
578
+
579
+ - organization_id: typing.Optional[str].
452
580
  ---
453
581
  from llama_cloud.client import AsyncLlamaCloud
454
582
 
@@ -460,7 +588,7 @@ class AsyncFilesClient:
460
588
  _response = await self._client_wrapper.httpx_client.request(
461
589
  "GET",
462
590
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
463
- params=remove_none_from_dict({"project_id": project_id}),
591
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
464
592
  headers=self._client_wrapper.get_headers(),
465
593
  timeout=60,
466
594
  )
@@ -474,19 +602,27 @@ class AsyncFilesClient:
474
602
  raise ApiError(status_code=_response.status_code, body=_response.text)
475
603
  raise ApiError(status_code=_response.status_code, body=_response_json)
476
604
 
477
- async def upload_file(self, *, project_id: typing.Optional[str] = None, upload_file: typing.IO) -> File:
605
+ async def upload_file(
606
+ self,
607
+ *,
608
+ project_id: typing.Optional[str] = None,
609
+ organization_id: typing.Optional[str] = None,
610
+ upload_file: typing.IO,
611
+ ) -> File:
478
612
  """
479
613
  Upload a file to S3.
480
614
 
481
615
  Parameters:
482
616
  - project_id: typing.Optional[str].
483
617
 
618
+ - organization_id: typing.Optional[str].
619
+
484
620
  - upload_file: typing.IO.
485
621
  """
486
622
  _response = await self._client_wrapper.httpx_client.request(
487
623
  "POST",
488
624
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
489
- params=remove_none_from_dict({"project_id": project_id}),
625
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
490
626
  data=jsonable_encoder({}),
491
627
  files={"upload_file": upload_file},
492
628
  headers=self._client_wrapper.get_headers(),
@@ -506,6 +642,7 @@ class AsyncFilesClient:
506
642
  self,
507
643
  *,
508
644
  project_id: typing.Optional[str] = None,
645
+ organization_id: typing.Optional[str] = None,
509
646
  name: str,
510
647
  file_size: typing.Optional[int] = OMIT,
511
648
  last_modified_at: typing.Optional[dt.datetime] = OMIT,
@@ -518,6 +655,8 @@ class AsyncFilesClient:
518
655
  Parameters:
519
656
  - project_id: typing.Optional[str].
520
657
 
658
+ - organization_id: typing.Optional[str].
659
+
521
660
  - name: str.
522
661
 
523
662
  - file_size: typing.Optional[int].
@@ -549,7 +688,7 @@ class AsyncFilesClient:
549
688
  _response = await self._client_wrapper.httpx_client.request(
550
689
  "PUT",
551
690
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
552
- params=remove_none_from_dict({"project_id": project_id}),
691
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
553
692
  json=jsonable_encoder(_request),
554
693
  headers=self._client_wrapper.get_headers(),
555
694
  timeout=60,
@@ -564,12 +703,16 @@ class AsyncFilesClient:
564
703
  raise ApiError(status_code=_response.status_code, body=_response.text)
565
704
  raise ApiError(status_code=_response.status_code, body=_response_json)
566
705
 
567
- async def sync_files(self, *, project_id: typing.Optional[str] = None) -> typing.List[File]:
706
+ async def sync_files(
707
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
708
+ ) -> typing.List[File]:
568
709
  """
569
710
  Sync Files API against file contents uploaded via S3 presigned urls.
570
711
 
571
712
  Parameters:
572
713
  - project_id: typing.Optional[str].
714
+
715
+ - organization_id: typing.Optional[str].
573
716
  ---
574
717
  from llama_cloud.client import AsyncLlamaCloud
575
718
 
@@ -581,7 +724,7 @@ class AsyncFilesClient:
581
724
  _response = await self._client_wrapper.httpx_client.request(
582
725
  "PUT",
583
726
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/sync"),
584
- params=remove_none_from_dict({"project_id": project_id}),
727
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
585
728
  headers=self._client_wrapper.get_headers(),
586
729
  timeout=60,
587
730
  )
@@ -595,7 +738,84 @@ class AsyncFilesClient:
595
738
  raise ApiError(status_code=_response.status_code, body=_response.text)
596
739
  raise ApiError(status_code=_response.status_code, body=_response_json)
597
740
 
598
- async def read_file_content(self, id: str, *, project_id: typing.Optional[str] = None) -> PresignedUrl:
741
+ async def upload_file_from_url(
742
+ self,
743
+ *,
744
+ project_id: typing.Optional[str] = None,
745
+ organization_id: typing.Optional[str] = None,
746
+ name: typing.Optional[str] = OMIT,
747
+ url: str,
748
+ proxy_url: typing.Optional[str] = OMIT,
749
+ request_headers: typing.Optional[typing.Dict[str, typing.Optional[str]]] = OMIT,
750
+ verify_ssl: typing.Optional[bool] = OMIT,
751
+ follow_redirects: typing.Optional[bool] = OMIT,
752
+ resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]] = OMIT,
753
+ ) -> File:
754
+ """
755
+ Upload a file to S3 from a URL.
756
+
757
+ Parameters:
758
+ - project_id: typing.Optional[str].
759
+
760
+ - organization_id: typing.Optional[str].
761
+
762
+ - name: typing.Optional[str].
763
+
764
+ - url: str. URL of the file to download
765
+
766
+ - proxy_url: typing.Optional[str].
767
+
768
+ - request_headers: typing.Optional[typing.Dict[str, typing.Optional[str]]].
769
+
770
+ - verify_ssl: typing.Optional[bool]. Whether to verify the SSL certificate when downloading the file
771
+
772
+ - follow_redirects: typing.Optional[bool]. Whether to follow redirects when downloading the file
773
+
774
+ - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]].
775
+ ---
776
+ from llama_cloud.client import AsyncLlamaCloud
777
+
778
+ client = AsyncLlamaCloud(
779
+ token="YOUR_TOKEN",
780
+ )
781
+ await client.files.upload_file_from_url(
782
+ url="string",
783
+ )
784
+ """
785
+ _request: typing.Dict[str, typing.Any] = {"url": url}
786
+ if name is not OMIT:
787
+ _request["name"] = name
788
+ if proxy_url is not OMIT:
789
+ _request["proxy_url"] = proxy_url
790
+ if request_headers is not OMIT:
791
+ _request["request_headers"] = request_headers
792
+ if verify_ssl is not OMIT:
793
+ _request["verify_ssl"] = verify_ssl
794
+ if follow_redirects is not OMIT:
795
+ _request["follow_redirects"] = follow_redirects
796
+ if resource_info is not OMIT:
797
+ _request["resource_info"] = resource_info
798
+ _response = await self._client_wrapper.httpx_client.request(
799
+ "PUT",
800
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/upload_from_url"),
801
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
802
+ json=jsonable_encoder(_request),
803
+ headers=self._client_wrapper.get_headers(),
804
+ timeout=60,
805
+ )
806
+ if 200 <= _response.status_code < 300:
807
+ return pydantic.parse_obj_as(File, _response.json()) # type: ignore
808
+ if _response.status_code == 422:
809
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
810
+ try:
811
+ _response_json = _response.json()
812
+ except JSONDecodeError:
813
+ raise ApiError(status_code=_response.status_code, body=_response.text)
814
+ raise ApiError(status_code=_response.status_code, body=_response_json)
815
+
816
+ async def read_file_content(
817
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
818
+ ) -> PresignedUrl:
599
819
  """
600
820
  Returns a presigned url to read the file content.
601
821
 
@@ -603,6 +823,8 @@ class AsyncFilesClient:
603
823
  - id: str.
604
824
 
605
825
  - project_id: typing.Optional[str].
826
+
827
+ - organization_id: typing.Optional[str].
606
828
  ---
607
829
  from llama_cloud.client import AsyncLlamaCloud
608
830
 
@@ -616,7 +838,7 @@ class AsyncFilesClient:
616
838
  _response = await self._client_wrapper.httpx_client.request(
617
839
  "GET",
618
840
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/content"),
619
- params=remove_none_from_dict({"project_id": project_id}),
841
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
620
842
  headers=self._client_wrapper.get_headers(),
621
843
  timeout=60,
622
844
  )
@@ -631,7 +853,7 @@ class AsyncFilesClient:
631
853
  raise ApiError(status_code=_response.status_code, body=_response_json)
632
854
 
633
855
  async def list_file_page_screenshots(
634
- self, id: str, *, project_id: typing.Optional[str] = None
856
+ self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
635
857
  ) -> typing.List[PageScreenshotMetadata]:
636
858
  """
637
859
  List metadata for all screenshots of pages from a file.
@@ -640,6 +862,8 @@ class AsyncFilesClient:
640
862
  - id: str.
641
863
 
642
864
  - project_id: typing.Optional[str].
865
+
866
+ - organization_id: typing.Optional[str].
643
867
  ---
644
868
  from llama_cloud.client import AsyncLlamaCloud
645
869
 
@@ -653,7 +877,7 @@ class AsyncFilesClient:
653
877
  _response = await self._client_wrapper.httpx_client.request(
654
878
  "GET",
655
879
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots"),
656
- params=remove_none_from_dict({"project_id": project_id}),
880
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
657
881
  headers=self._client_wrapper.get_headers(),
658
882
  timeout=60,
659
883
  )
@@ -668,7 +892,12 @@ class AsyncFilesClient:
668
892
  raise ApiError(status_code=_response.status_code, body=_response_json)
669
893
 
670
894
  async def get_file_page_screenshot(
671
- self, id: str, page_index: int, *, project_id: typing.Optional[str] = None
895
+ self,
896
+ id: str,
897
+ page_index: int,
898
+ *,
899
+ project_id: typing.Optional[str] = None,
900
+ organization_id: typing.Optional[str] = None,
672
901
  ) -> typing.Any:
673
902
  """
674
903
  Get screenshot of a page from a file.
@@ -679,6 +908,8 @@ class AsyncFilesClient:
679
908
  - page_index: int.
680
909
 
681
910
  - project_id: typing.Optional[str].
911
+
912
+ - organization_id: typing.Optional[str].
682
913
  ---
683
914
  from llama_cloud.client import AsyncLlamaCloud
684
915
 
@@ -695,7 +926,7 @@ class AsyncFilesClient:
695
926
  urllib.parse.urljoin(
696
927
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots/{page_index}"
697
928
  ),
698
- params=remove_none_from_dict({"project_id": project_id}),
929
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
699
930
  headers=self._client_wrapper.get_headers(),
700
931
  timeout=60,
701
932
  )
@@ -1,5 +1,6 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
+ from .file_create_from_url_resource_info_value import FileCreateFromUrlResourceInfoValue
3
4
  from .file_create_resource_info_value import FileCreateResourceInfoValue
4
5
 
5
- __all__ = ["FileCreateResourceInfoValue"]
6
+ __all__ = ["FileCreateFromUrlResourceInfoValue", "FileCreateResourceInfoValue"]
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ FileCreateFromUrlResourceInfoValue = typing.Union[
6
+ typing.Dict[str, typing.Any], typing.List[typing.Any], str, int, float, bool
7
+ ]