llama-cloud 0.1.36__py3-none-any.whl → 0.1.38__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (47) hide show
  1. llama_cloud/__init__.py +20 -4
  2. llama_cloud/client.py +3 -0
  3. llama_cloud/resources/__init__.py +3 -3
  4. llama_cloud/resources/admin/client.py +57 -0
  5. llama_cloud/resources/alpha/__init__.py +2 -0
  6. llama_cloud/resources/alpha/client.py +118 -0
  7. llama_cloud/resources/beta/client.py +576 -20
  8. llama_cloud/resources/chat_apps/client.py +32 -8
  9. llama_cloud/resources/classifier/client.py +139 -11
  10. llama_cloud/resources/data_sinks/client.py +32 -8
  11. llama_cloud/resources/data_sources/client.py +32 -8
  12. llama_cloud/resources/data_sources/types/data_source_update_component.py +2 -0
  13. llama_cloud/resources/embedding_model_configs/client.py +48 -12
  14. llama_cloud/resources/files/__init__.py +2 -2
  15. llama_cloud/resources/files/client.py +189 -113
  16. llama_cloud/resources/files/types/__init__.py +1 -3
  17. llama_cloud/resources/jobs/client.py +12 -6
  18. llama_cloud/resources/llama_extract/client.py +138 -32
  19. llama_cloud/resources/organizations/client.py +18 -4
  20. llama_cloud/resources/parsing/client.py +16 -4
  21. llama_cloud/resources/pipelines/client.py +32 -8
  22. llama_cloud/resources/projects/client.py +78 -18
  23. llama_cloud/resources/reports/client.py +126 -30
  24. llama_cloud/resources/retrievers/client.py +48 -12
  25. llama_cloud/types/__init__.py +20 -2
  26. llama_cloud/types/agent_deployment_summary.py +1 -0
  27. llama_cloud/types/classify_job.py +2 -0
  28. llama_cloud/types/cloud_jira_data_source_v_2.py +52 -0
  29. llama_cloud/types/cloud_jira_data_source_v_2_api_version.py +21 -0
  30. llama_cloud/types/configurable_data_source_names.py +4 -0
  31. llama_cloud/types/data_source_component.py +2 -0
  32. llama_cloud/types/data_source_create_component.py +2 -0
  33. llama_cloud/types/data_source_reader_version_metadata_reader_version.py +9 -1
  34. llama_cloud/types/file_create.py +41 -0
  35. llama_cloud/types/{classify_job_with_status.py → file_filter.py} +8 -15
  36. llama_cloud/types/file_query_response.py +38 -0
  37. llama_cloud/types/llama_extract_mode_availability.py +37 -0
  38. llama_cloud/types/llama_extract_mode_availability_status.py +17 -0
  39. llama_cloud/types/paginated_response_classify_job.py +34 -0
  40. llama_cloud/types/pipeline_data_source_component.py +2 -0
  41. llama_cloud/types/usage_response_active_alerts_item.py +4 -0
  42. {llama_cloud-0.1.36.dist-info → llama_cloud-0.1.38.dist-info}/METADATA +2 -1
  43. {llama_cloud-0.1.36.dist-info → llama_cloud-0.1.38.dist-info}/RECORD +47 -38
  44. {llama_cloud-0.1.36.dist-info → llama_cloud-0.1.38.dist-info}/WHEEL +1 -1
  45. /llama_cloud/{resources/files/types → types}/file_create_permission_info_value.py +0 -0
  46. /llama_cloud/{resources/files/types → types}/file_create_resource_info_value.py +0 -0
  47. {llama_cloud-0.1.36.dist-info → llama_cloud-0.1.38.dist-info}/LICENSE +0 -0
@@ -1,6 +1,5 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- import datetime as dt
4
3
  import typing
5
4
  import urllib.parse
6
5
  from json.decoder import JSONDecodeError
@@ -11,14 +10,13 @@ from ...core.jsonable_encoder import jsonable_encoder
11
10
  from ...core.remove_none_from_dict import remove_none_from_dict
12
11
  from ...errors.unprocessable_entity_error import UnprocessableEntityError
13
12
  from ...types.file import File
13
+ from ...types.file_create import FileCreate
14
14
  from ...types.file_id_presigned_url import FileIdPresignedUrl
15
15
  from ...types.http_validation_error import HttpValidationError
16
16
  from ...types.page_figure_metadata import PageFigureMetadata
17
17
  from ...types.page_screenshot_metadata import PageScreenshotMetadata
18
18
  from ...types.presigned_url import PresignedUrl
19
19
  from .types.file_create_from_url_resource_info_value import FileCreateFromUrlResourceInfoValue
20
- from .types.file_create_permission_info_value import FileCreatePermissionInfoValue
21
- from .types.file_create_resource_info_value import FileCreateResourceInfoValue
22
20
 
23
21
  try:
24
22
  import pydantic
@@ -37,7 +35,12 @@ class FilesClient:
37
35
  self._client_wrapper = client_wrapper
38
36
 
39
37
  def get_file(
40
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
38
+ self,
39
+ id: str,
40
+ *,
41
+ project_id: typing.Optional[str] = None,
42
+ organization_id: typing.Optional[str] = None,
43
+ project_id: typing.Optional[str] = None,
41
44
  ) -> File:
42
45
  """
43
46
  Read File metadata objects.
@@ -48,6 +51,8 @@ class FilesClient:
48
51
  - project_id: typing.Optional[str].
49
52
 
50
53
  - organization_id: typing.Optional[str].
54
+
55
+ - project_id: typing.Optional[str].
51
56
  ---
52
57
  from llama_cloud.client import LlamaCloud
53
58
 
@@ -62,7 +67,7 @@ class FilesClient:
62
67
  "GET",
63
68
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
64
69
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
65
- headers=self._client_wrapper.get_headers(),
70
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
66
71
  timeout=60,
67
72
  )
68
73
  if 200 <= _response.status_code < 300:
@@ -76,7 +81,12 @@ class FilesClient:
76
81
  raise ApiError(status_code=_response.status_code, body=_response_json)
77
82
 
78
83
  def delete_file(
79
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
84
+ self,
85
+ id: str,
86
+ *,
87
+ project_id: typing.Optional[str] = None,
88
+ organization_id: typing.Optional[str] = None,
89
+ project_id: typing.Optional[str] = None,
80
90
  ) -> None:
81
91
  """
82
92
  Delete the file from S3.
@@ -87,6 +97,8 @@ class FilesClient:
87
97
  - project_id: typing.Optional[str].
88
98
 
89
99
  - organization_id: typing.Optional[str].
100
+
101
+ - project_id: typing.Optional[str].
90
102
  ---
91
103
  from llama_cloud.client import LlamaCloud
92
104
 
@@ -101,7 +113,7 @@ class FilesClient:
101
113
  "DELETE",
102
114
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
103
115
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
104
- headers=self._client_wrapper.get_headers(),
116
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
105
117
  timeout=60,
106
118
  )
107
119
  if 200 <= _response.status_code < 300:
@@ -115,7 +127,11 @@ class FilesClient:
115
127
  raise ApiError(status_code=_response.status_code, body=_response_json)
116
128
 
117
129
  def list_files(
118
- self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
130
+ self,
131
+ *,
132
+ project_id: typing.Optional[str] = None,
133
+ organization_id: typing.Optional[str] = None,
134
+ project_id: typing.Optional[str] = None,
119
135
  ) -> typing.List[File]:
120
136
  """
121
137
  Read File metadata objects.
@@ -124,6 +140,8 @@ class FilesClient:
124
140
  - project_id: typing.Optional[str].
125
141
 
126
142
  - organization_id: typing.Optional[str].
143
+
144
+ - project_id: typing.Optional[str].
127
145
  ---
128
146
  from llama_cloud.client import LlamaCloud
129
147
 
@@ -136,7 +154,7 @@ class FilesClient:
136
154
  "GET",
137
155
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
138
156
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
139
- headers=self._client_wrapper.get_headers(),
157
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
140
158
  timeout=60,
141
159
  )
142
160
  if 200 <= _response.status_code < 300:
@@ -156,6 +174,7 @@ class FilesClient:
156
174
  project_id: typing.Optional[str] = None,
157
175
  organization_id: typing.Optional[str] = None,
158
176
  upload_file: typing.IO,
177
+ project_id: typing.Optional[str] = None,
159
178
  ) -> File:
160
179
  """
161
180
  Upload a file to S3.
@@ -168,6 +187,8 @@ class FilesClient:
168
187
  - organization_id: typing.Optional[str].
169
188
 
170
189
  - upload_file: typing.IO.
190
+
191
+ - project_id: typing.Optional[str].
171
192
  """
172
193
  _response = self._client_wrapper.httpx_client.request(
173
194
  "POST",
@@ -177,7 +198,7 @@ class FilesClient:
177
198
  ),
178
199
  data=jsonable_encoder({}),
179
200
  files={"upload_file": upload_file},
180
- headers=self._client_wrapper.get_headers(),
201
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
181
202
  timeout=60,
182
203
  )
183
204
  if 200 <= _response.status_code < 300:
@@ -196,13 +217,8 @@ class FilesClient:
196
217
  expires_at_seconds: typing.Optional[int] = None,
197
218
  project_id: typing.Optional[str] = None,
198
219
  organization_id: typing.Optional[str] = None,
199
- name: str,
200
- external_file_id: typing.Optional[str] = OMIT,
201
- file_size: typing.Optional[int] = OMIT,
202
- last_modified_at: typing.Optional[dt.datetime] = OMIT,
203
- resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]] = OMIT,
204
- permission_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreatePermissionInfoValue]]] = OMIT,
205
- data_source_id: typing.Optional[str] = OMIT,
220
+ request: FileCreate,
221
+ project_id: typing.Optional[str] = None,
206
222
  ) -> FileIdPresignedUrl:
207
223
  """
208
224
  Create a presigned url for uploading a file.
@@ -217,50 +233,30 @@ class FilesClient:
217
233
 
218
234
  - organization_id: typing.Optional[str].
219
235
 
220
- - name: str. Name that will be used for created file. If possible, always include the file extension in the name.
221
-
222
- - external_file_id: typing.Optional[str].
223
-
224
- - file_size: typing.Optional[int].
225
-
226
- - last_modified_at: typing.Optional[dt.datetime].
236
+ - request: FileCreate.
227
237
 
228
- - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]].
229
-
230
- - permission_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreatePermissionInfoValue]]].
231
-
232
- - data_source_id: typing.Optional[str].
238
+ - project_id: typing.Optional[str].
233
239
  ---
240
+ from llama_cloud import FileCreate
234
241
  from llama_cloud.client import LlamaCloud
235
242
 
236
243
  client = LlamaCloud(
237
244
  token="YOUR_TOKEN",
238
245
  )
239
246
  client.files.generate_presigned_url(
240
- name="string",
247
+ request=FileCreate(
248
+ name="string",
249
+ ),
241
250
  )
242
251
  """
243
- _request: typing.Dict[str, typing.Any] = {"name": name}
244
- if external_file_id is not OMIT:
245
- _request["external_file_id"] = external_file_id
246
- if file_size is not OMIT:
247
- _request["file_size"] = file_size
248
- if last_modified_at is not OMIT:
249
- _request["last_modified_at"] = last_modified_at
250
- if resource_info is not OMIT:
251
- _request["resource_info"] = resource_info
252
- if permission_info is not OMIT:
253
- _request["permission_info"] = permission_info
254
- if data_source_id is not OMIT:
255
- _request["data_source_id"] = data_source_id
256
252
  _response = self._client_wrapper.httpx_client.request(
257
253
  "PUT",
258
254
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
259
255
  params=remove_none_from_dict(
260
256
  {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
261
257
  ),
262
- json=jsonable_encoder(_request),
263
- headers=self._client_wrapper.get_headers(),
258
+ json=jsonable_encoder(request),
259
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
264
260
  timeout=60,
265
261
  )
266
262
  if 200 <= _response.status_code < 300:
@@ -274,7 +270,11 @@ class FilesClient:
274
270
  raise ApiError(status_code=_response.status_code, body=_response_json)
275
271
 
276
272
  def sync_files(
277
- self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
273
+ self,
274
+ *,
275
+ project_id: typing.Optional[str] = None,
276
+ organization_id: typing.Optional[str] = None,
277
+ project_id: typing.Optional[str] = None,
278
278
  ) -> typing.List[File]:
279
279
  """
280
280
  Sync Files API against file contents uploaded via S3 presigned urls.
@@ -283,6 +283,8 @@ class FilesClient:
283
283
  - project_id: typing.Optional[str].
284
284
 
285
285
  - organization_id: typing.Optional[str].
286
+
287
+ - project_id: typing.Optional[str].
286
288
  ---
287
289
  from llama_cloud.client import LlamaCloud
288
290
 
@@ -295,7 +297,7 @@ class FilesClient:
295
297
  "PUT",
296
298
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/sync"),
297
299
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
298
- headers=self._client_wrapper.get_headers(),
300
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
299
301
  timeout=60,
300
302
  )
301
303
  if 200 <= _response.status_code < 300:
@@ -320,6 +322,7 @@ class FilesClient:
320
322
  verify_ssl: typing.Optional[bool] = OMIT,
321
323
  follow_redirects: typing.Optional[bool] = OMIT,
322
324
  resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]] = OMIT,
325
+ project_id: typing.Optional[str] = None,
323
326
  ) -> File:
324
327
  """
325
328
  Upload a file to the project from a URL.
@@ -349,6 +352,8 @@ class FilesClient:
349
352
  - follow_redirects: typing.Optional[bool]. Whether to follow redirects when downloading the file
350
353
 
351
354
  - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]].
355
+
356
+ - project_id: typing.Optional[str].
352
357
  ---
353
358
  from llama_cloud.client import LlamaCloud
354
359
 
@@ -377,7 +382,7 @@ class FilesClient:
377
382
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/upload_from_url"),
378
383
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
379
384
  json=jsonable_encoder(_request),
380
- headers=self._client_wrapper.get_headers(),
385
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
381
386
  timeout=60,
382
387
  )
383
388
  if 200 <= _response.status_code < 300:
@@ -397,6 +402,7 @@ class FilesClient:
397
402
  expires_at_seconds: typing.Optional[int] = None,
398
403
  project_id: typing.Optional[str] = None,
399
404
  organization_id: typing.Optional[str] = None,
405
+ project_id: typing.Optional[str] = None,
400
406
  ) -> PresignedUrl:
401
407
  """
402
408
  Returns a presigned url to read the file content.
@@ -409,6 +415,8 @@ class FilesClient:
409
415
  - project_id: typing.Optional[str].
410
416
 
411
417
  - organization_id: typing.Optional[str].
418
+
419
+ - project_id: typing.Optional[str].
412
420
  ---
413
421
  from llama_cloud.client import LlamaCloud
414
422
 
@@ -425,7 +433,7 @@ class FilesClient:
425
433
  params=remove_none_from_dict(
426
434
  {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
427
435
  ),
428
- headers=self._client_wrapper.get_headers(),
436
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
429
437
  timeout=60,
430
438
  )
431
439
  if 200 <= _response.status_code < 300:
@@ -439,7 +447,12 @@ class FilesClient:
439
447
  raise ApiError(status_code=_response.status_code, body=_response_json)
440
448
 
441
449
  def list_file_page_screenshots(
442
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
450
+ self,
451
+ id: str,
452
+ *,
453
+ project_id: typing.Optional[str] = None,
454
+ organization_id: typing.Optional[str] = None,
455
+ project_id: typing.Optional[str] = None,
443
456
  ) -> typing.List[PageScreenshotMetadata]:
444
457
  """
445
458
  List metadata for all screenshots of pages from a file.
@@ -450,6 +463,8 @@ class FilesClient:
450
463
  - project_id: typing.Optional[str].
451
464
 
452
465
  - organization_id: typing.Optional[str].
466
+
467
+ - project_id: typing.Optional[str].
453
468
  ---
454
469
  from llama_cloud.client import LlamaCloud
455
470
 
@@ -464,7 +479,7 @@ class FilesClient:
464
479
  "GET",
465
480
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots"),
466
481
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
467
- headers=self._client_wrapper.get_headers(),
482
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
468
483
  timeout=60,
469
484
  )
470
485
  if 200 <= _response.status_code < 300:
@@ -484,6 +499,7 @@ class FilesClient:
484
499
  *,
485
500
  project_id: typing.Optional[str] = None,
486
501
  organization_id: typing.Optional[str] = None,
502
+ project_id: typing.Optional[str] = None,
487
503
  ) -> typing.Any:
488
504
  """
489
505
  Get screenshot of a page from a file.
@@ -496,6 +512,8 @@ class FilesClient:
496
512
  - project_id: typing.Optional[str].
497
513
 
498
514
  - organization_id: typing.Optional[str].
515
+
516
+ - project_id: typing.Optional[str].
499
517
  ---
500
518
  from llama_cloud.client import LlamaCloud
501
519
 
@@ -513,7 +531,7 @@ class FilesClient:
513
531
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots/{page_index}"
514
532
  ),
515
533
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
516
- headers=self._client_wrapper.get_headers(),
534
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
517
535
  timeout=60,
518
536
  )
519
537
  if 200 <= _response.status_code < 300:
@@ -527,7 +545,12 @@ class FilesClient:
527
545
  raise ApiError(status_code=_response.status_code, body=_response_json)
528
546
 
529
547
  def list_file_pages_figures(
530
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
548
+ self,
549
+ id: str,
550
+ *,
551
+ project_id: typing.Optional[str] = None,
552
+ organization_id: typing.Optional[str] = None,
553
+ project_id: typing.Optional[str] = None,
531
554
  ) -> typing.List[PageFigureMetadata]:
532
555
  """
533
556
  Parameters:
@@ -536,6 +559,8 @@ class FilesClient:
536
559
  - project_id: typing.Optional[str].
537
560
 
538
561
  - organization_id: typing.Optional[str].
562
+
563
+ - project_id: typing.Optional[str].
539
564
  ---
540
565
  from llama_cloud.client import LlamaCloud
541
566
 
@@ -550,7 +575,7 @@ class FilesClient:
550
575
  "GET",
551
576
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures"),
552
577
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
553
- headers=self._client_wrapper.get_headers(),
578
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
554
579
  timeout=60,
555
580
  )
556
581
  if 200 <= _response.status_code < 300:
@@ -570,6 +595,7 @@ class FilesClient:
570
595
  *,
571
596
  project_id: typing.Optional[str] = None,
572
597
  organization_id: typing.Optional[str] = None,
598
+ project_id: typing.Optional[str] = None,
573
599
  ) -> typing.List[PageFigureMetadata]:
574
600
  """
575
601
  Parameters:
@@ -580,6 +606,8 @@ class FilesClient:
580
606
  - project_id: typing.Optional[str].
581
607
 
582
608
  - organization_id: typing.Optional[str].
609
+
610
+ - project_id: typing.Optional[str].
583
611
  ---
584
612
  from llama_cloud.client import LlamaCloud
585
613
 
@@ -597,7 +625,7 @@ class FilesClient:
597
625
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures/{page_index}"
598
626
  ),
599
627
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
600
- headers=self._client_wrapper.get_headers(),
628
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
601
629
  timeout=60,
602
630
  )
603
631
  if 200 <= _response.status_code < 300:
@@ -618,6 +646,7 @@ class FilesClient:
618
646
  *,
619
647
  project_id: typing.Optional[str] = None,
620
648
  organization_id: typing.Optional[str] = None,
649
+ project_id: typing.Optional[str] = None,
621
650
  ) -> typing.Any:
622
651
  """
623
652
  Parameters:
@@ -630,6 +659,8 @@ class FilesClient:
630
659
  - project_id: typing.Optional[str].
631
660
 
632
661
  - organization_id: typing.Optional[str].
662
+
663
+ - project_id: typing.Optional[str].
633
664
  ---
634
665
  from llama_cloud.client import LlamaCloud
635
666
 
@@ -648,7 +679,7 @@ class FilesClient:
648
679
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures/{page_index}/{figure_name}"
649
680
  ),
650
681
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
651
- headers=self._client_wrapper.get_headers(),
682
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
652
683
  timeout=60,
653
684
  )
654
685
  if 200 <= _response.status_code < 300:
@@ -668,6 +699,7 @@ class FilesClient:
668
699
  *,
669
700
  project_id: typing.Optional[str] = None,
670
701
  organization_id: typing.Optional[str] = None,
702
+ project_id: typing.Optional[str] = None,
671
703
  ) -> PresignedUrl:
672
704
  """
673
705
  Returns a presigned url to read a page screenshot.
@@ -683,6 +715,8 @@ class FilesClient:
683
715
  - project_id: typing.Optional[str].
684
716
 
685
717
  - organization_id: typing.Optional[str].
718
+
719
+ - project_id: typing.Optional[str].
686
720
  ---
687
721
  from llama_cloud.client import LlamaCloud
688
722
 
@@ -701,7 +735,7 @@ class FilesClient:
701
735
  f"api/v1/files/{id}/page_screenshots/{page_index}/presigned_url",
702
736
  ),
703
737
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
704
- headers=self._client_wrapper.get_headers(),
738
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
705
739
  timeout=60,
706
740
  )
707
741
  if 200 <= _response.status_code < 300:
@@ -722,6 +756,7 @@ class FilesClient:
722
756
  *,
723
757
  project_id: typing.Optional[str] = None,
724
758
  organization_id: typing.Optional[str] = None,
759
+ project_id: typing.Optional[str] = None,
725
760
  ) -> PresignedUrl:
726
761
  """
727
762
  Returns a presigned url to read a page figure.
@@ -739,6 +774,8 @@ class FilesClient:
739
774
  - project_id: typing.Optional[str].
740
775
 
741
776
  - organization_id: typing.Optional[str].
777
+
778
+ - project_id: typing.Optional[str].
742
779
  ---
743
780
  from llama_cloud.client import LlamaCloud
744
781
 
@@ -758,7 +795,7 @@ class FilesClient:
758
795
  f"api/v1/files/{id}/page-figures/{page_index}/{figure_name}/presigned_url",
759
796
  ),
760
797
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
761
- headers=self._client_wrapper.get_headers(),
798
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
762
799
  timeout=60,
763
800
  )
764
801
  if 200 <= _response.status_code < 300:
@@ -777,7 +814,12 @@ class AsyncFilesClient:
777
814
  self._client_wrapper = client_wrapper
778
815
 
779
816
  async def get_file(
780
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
817
+ self,
818
+ id: str,
819
+ *,
820
+ project_id: typing.Optional[str] = None,
821
+ organization_id: typing.Optional[str] = None,
822
+ project_id: typing.Optional[str] = None,
781
823
  ) -> File:
782
824
  """
783
825
  Read File metadata objects.
@@ -788,6 +830,8 @@ class AsyncFilesClient:
788
830
  - project_id: typing.Optional[str].
789
831
 
790
832
  - organization_id: typing.Optional[str].
833
+
834
+ - project_id: typing.Optional[str].
791
835
  ---
792
836
  from llama_cloud.client import AsyncLlamaCloud
793
837
 
@@ -802,7 +846,7 @@ class AsyncFilesClient:
802
846
  "GET",
803
847
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
804
848
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
805
- headers=self._client_wrapper.get_headers(),
849
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
806
850
  timeout=60,
807
851
  )
808
852
  if 200 <= _response.status_code < 300:
@@ -816,7 +860,12 @@ class AsyncFilesClient:
816
860
  raise ApiError(status_code=_response.status_code, body=_response_json)
817
861
 
818
862
  async def delete_file(
819
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
863
+ self,
864
+ id: str,
865
+ *,
866
+ project_id: typing.Optional[str] = None,
867
+ organization_id: typing.Optional[str] = None,
868
+ project_id: typing.Optional[str] = None,
820
869
  ) -> None:
821
870
  """
822
871
  Delete the file from S3.
@@ -827,6 +876,8 @@ class AsyncFilesClient:
827
876
  - project_id: typing.Optional[str].
828
877
 
829
878
  - organization_id: typing.Optional[str].
879
+
880
+ - project_id: typing.Optional[str].
830
881
  ---
831
882
  from llama_cloud.client import AsyncLlamaCloud
832
883
 
@@ -841,7 +892,7 @@ class AsyncFilesClient:
841
892
  "DELETE",
842
893
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}"),
843
894
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
844
- headers=self._client_wrapper.get_headers(),
895
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
845
896
  timeout=60,
846
897
  )
847
898
  if 200 <= _response.status_code < 300:
@@ -855,7 +906,11 @@ class AsyncFilesClient:
855
906
  raise ApiError(status_code=_response.status_code, body=_response_json)
856
907
 
857
908
  async def list_files(
858
- self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
909
+ self,
910
+ *,
911
+ project_id: typing.Optional[str] = None,
912
+ organization_id: typing.Optional[str] = None,
913
+ project_id: typing.Optional[str] = None,
859
914
  ) -> typing.List[File]:
860
915
  """
861
916
  Read File metadata objects.
@@ -864,6 +919,8 @@ class AsyncFilesClient:
864
919
  - project_id: typing.Optional[str].
865
920
 
866
921
  - organization_id: typing.Optional[str].
922
+
923
+ - project_id: typing.Optional[str].
867
924
  ---
868
925
  from llama_cloud.client import AsyncLlamaCloud
869
926
 
@@ -876,7 +933,7 @@ class AsyncFilesClient:
876
933
  "GET",
877
934
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
878
935
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
879
- headers=self._client_wrapper.get_headers(),
936
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
880
937
  timeout=60,
881
938
  )
882
939
  if 200 <= _response.status_code < 300:
@@ -896,6 +953,7 @@ class AsyncFilesClient:
896
953
  project_id: typing.Optional[str] = None,
897
954
  organization_id: typing.Optional[str] = None,
898
955
  upload_file: typing.IO,
956
+ project_id: typing.Optional[str] = None,
899
957
  ) -> File:
900
958
  """
901
959
  Upload a file to S3.
@@ -908,6 +966,8 @@ class AsyncFilesClient:
908
966
  - organization_id: typing.Optional[str].
909
967
 
910
968
  - upload_file: typing.IO.
969
+
970
+ - project_id: typing.Optional[str].
911
971
  """
912
972
  _response = await self._client_wrapper.httpx_client.request(
913
973
  "POST",
@@ -917,7 +977,7 @@ class AsyncFilesClient:
917
977
  ),
918
978
  data=jsonable_encoder({}),
919
979
  files={"upload_file": upload_file},
920
- headers=self._client_wrapper.get_headers(),
980
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
921
981
  timeout=60,
922
982
  )
923
983
  if 200 <= _response.status_code < 300:
@@ -936,13 +996,8 @@ class AsyncFilesClient:
936
996
  expires_at_seconds: typing.Optional[int] = None,
937
997
  project_id: typing.Optional[str] = None,
938
998
  organization_id: typing.Optional[str] = None,
939
- name: str,
940
- external_file_id: typing.Optional[str] = OMIT,
941
- file_size: typing.Optional[int] = OMIT,
942
- last_modified_at: typing.Optional[dt.datetime] = OMIT,
943
- resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]] = OMIT,
944
- permission_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreatePermissionInfoValue]]] = OMIT,
945
- data_source_id: typing.Optional[str] = OMIT,
999
+ request: FileCreate,
1000
+ project_id: typing.Optional[str] = None,
946
1001
  ) -> FileIdPresignedUrl:
947
1002
  """
948
1003
  Create a presigned url for uploading a file.
@@ -957,50 +1012,30 @@ class AsyncFilesClient:
957
1012
 
958
1013
  - organization_id: typing.Optional[str].
959
1014
 
960
- - name: str. Name that will be used for created file. If possible, always include the file extension in the name.
961
-
962
- - external_file_id: typing.Optional[str].
963
-
964
- - file_size: typing.Optional[int].
965
-
966
- - last_modified_at: typing.Optional[dt.datetime].
1015
+ - request: FileCreate.
967
1016
 
968
- - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateResourceInfoValue]]].
969
-
970
- - permission_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreatePermissionInfoValue]]].
971
-
972
- - data_source_id: typing.Optional[str].
1017
+ - project_id: typing.Optional[str].
973
1018
  ---
1019
+ from llama_cloud import FileCreate
974
1020
  from llama_cloud.client import AsyncLlamaCloud
975
1021
 
976
1022
  client = AsyncLlamaCloud(
977
1023
  token="YOUR_TOKEN",
978
1024
  )
979
1025
  await client.files.generate_presigned_url(
980
- name="string",
1026
+ request=FileCreate(
1027
+ name="string",
1028
+ ),
981
1029
  )
982
1030
  """
983
- _request: typing.Dict[str, typing.Any] = {"name": name}
984
- if external_file_id is not OMIT:
985
- _request["external_file_id"] = external_file_id
986
- if file_size is not OMIT:
987
- _request["file_size"] = file_size
988
- if last_modified_at is not OMIT:
989
- _request["last_modified_at"] = last_modified_at
990
- if resource_info is not OMIT:
991
- _request["resource_info"] = resource_info
992
- if permission_info is not OMIT:
993
- _request["permission_info"] = permission_info
994
- if data_source_id is not OMIT:
995
- _request["data_source_id"] = data_source_id
996
1031
  _response = await self._client_wrapper.httpx_client.request(
997
1032
  "PUT",
998
1033
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files"),
999
1034
  params=remove_none_from_dict(
1000
1035
  {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
1001
1036
  ),
1002
- json=jsonable_encoder(_request),
1003
- headers=self._client_wrapper.get_headers(),
1037
+ json=jsonable_encoder(request),
1038
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1004
1039
  timeout=60,
1005
1040
  )
1006
1041
  if 200 <= _response.status_code < 300:
@@ -1014,7 +1049,11 @@ class AsyncFilesClient:
1014
1049
  raise ApiError(status_code=_response.status_code, body=_response_json)
1015
1050
 
1016
1051
  async def sync_files(
1017
- self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1052
+ self,
1053
+ *,
1054
+ project_id: typing.Optional[str] = None,
1055
+ organization_id: typing.Optional[str] = None,
1056
+ project_id: typing.Optional[str] = None,
1018
1057
  ) -> typing.List[File]:
1019
1058
  """
1020
1059
  Sync Files API against file contents uploaded via S3 presigned urls.
@@ -1023,6 +1062,8 @@ class AsyncFilesClient:
1023
1062
  - project_id: typing.Optional[str].
1024
1063
 
1025
1064
  - organization_id: typing.Optional[str].
1065
+
1066
+ - project_id: typing.Optional[str].
1026
1067
  ---
1027
1068
  from llama_cloud.client import AsyncLlamaCloud
1028
1069
 
@@ -1035,7 +1076,7 @@ class AsyncFilesClient:
1035
1076
  "PUT",
1036
1077
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/sync"),
1037
1078
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1038
- headers=self._client_wrapper.get_headers(),
1079
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1039
1080
  timeout=60,
1040
1081
  )
1041
1082
  if 200 <= _response.status_code < 300:
@@ -1060,6 +1101,7 @@ class AsyncFilesClient:
1060
1101
  verify_ssl: typing.Optional[bool] = OMIT,
1061
1102
  follow_redirects: typing.Optional[bool] = OMIT,
1062
1103
  resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]] = OMIT,
1104
+ project_id: typing.Optional[str] = None,
1063
1105
  ) -> File:
1064
1106
  """
1065
1107
  Upload a file to the project from a URL.
@@ -1089,6 +1131,8 @@ class AsyncFilesClient:
1089
1131
  - follow_redirects: typing.Optional[bool]. Whether to follow redirects when downloading the file
1090
1132
 
1091
1133
  - resource_info: typing.Optional[typing.Dict[str, typing.Optional[FileCreateFromUrlResourceInfoValue]]].
1134
+
1135
+ - project_id: typing.Optional[str].
1092
1136
  ---
1093
1137
  from llama_cloud.client import AsyncLlamaCloud
1094
1138
 
@@ -1117,7 +1161,7 @@ class AsyncFilesClient:
1117
1161
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/files/upload_from_url"),
1118
1162
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1119
1163
  json=jsonable_encoder(_request),
1120
- headers=self._client_wrapper.get_headers(),
1164
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1121
1165
  timeout=60,
1122
1166
  )
1123
1167
  if 200 <= _response.status_code < 300:
@@ -1137,6 +1181,7 @@ class AsyncFilesClient:
1137
1181
  expires_at_seconds: typing.Optional[int] = None,
1138
1182
  project_id: typing.Optional[str] = None,
1139
1183
  organization_id: typing.Optional[str] = None,
1184
+ project_id: typing.Optional[str] = None,
1140
1185
  ) -> PresignedUrl:
1141
1186
  """
1142
1187
  Returns a presigned url to read the file content.
@@ -1149,6 +1194,8 @@ class AsyncFilesClient:
1149
1194
  - project_id: typing.Optional[str].
1150
1195
 
1151
1196
  - organization_id: typing.Optional[str].
1197
+
1198
+ - project_id: typing.Optional[str].
1152
1199
  ---
1153
1200
  from llama_cloud.client import AsyncLlamaCloud
1154
1201
 
@@ -1165,7 +1212,7 @@ class AsyncFilesClient:
1165
1212
  params=remove_none_from_dict(
1166
1213
  {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
1167
1214
  ),
1168
- headers=self._client_wrapper.get_headers(),
1215
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1169
1216
  timeout=60,
1170
1217
  )
1171
1218
  if 200 <= _response.status_code < 300:
@@ -1179,7 +1226,12 @@ class AsyncFilesClient:
1179
1226
  raise ApiError(status_code=_response.status_code, body=_response_json)
1180
1227
 
1181
1228
  async def list_file_page_screenshots(
1182
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1229
+ self,
1230
+ id: str,
1231
+ *,
1232
+ project_id: typing.Optional[str] = None,
1233
+ organization_id: typing.Optional[str] = None,
1234
+ project_id: typing.Optional[str] = None,
1183
1235
  ) -> typing.List[PageScreenshotMetadata]:
1184
1236
  """
1185
1237
  List metadata for all screenshots of pages from a file.
@@ -1190,6 +1242,8 @@ class AsyncFilesClient:
1190
1242
  - project_id: typing.Optional[str].
1191
1243
 
1192
1244
  - organization_id: typing.Optional[str].
1245
+
1246
+ - project_id: typing.Optional[str].
1193
1247
  ---
1194
1248
  from llama_cloud.client import AsyncLlamaCloud
1195
1249
 
@@ -1204,7 +1258,7 @@ class AsyncFilesClient:
1204
1258
  "GET",
1205
1259
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots"),
1206
1260
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1207
- headers=self._client_wrapper.get_headers(),
1261
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1208
1262
  timeout=60,
1209
1263
  )
1210
1264
  if 200 <= _response.status_code < 300:
@@ -1224,6 +1278,7 @@ class AsyncFilesClient:
1224
1278
  *,
1225
1279
  project_id: typing.Optional[str] = None,
1226
1280
  organization_id: typing.Optional[str] = None,
1281
+ project_id: typing.Optional[str] = None,
1227
1282
  ) -> typing.Any:
1228
1283
  """
1229
1284
  Get screenshot of a page from a file.
@@ -1236,6 +1291,8 @@ class AsyncFilesClient:
1236
1291
  - project_id: typing.Optional[str].
1237
1292
 
1238
1293
  - organization_id: typing.Optional[str].
1294
+
1295
+ - project_id: typing.Optional[str].
1239
1296
  ---
1240
1297
  from llama_cloud.client import AsyncLlamaCloud
1241
1298
 
@@ -1253,7 +1310,7 @@ class AsyncFilesClient:
1253
1310
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page_screenshots/{page_index}"
1254
1311
  ),
1255
1312
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1256
- headers=self._client_wrapper.get_headers(),
1313
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1257
1314
  timeout=60,
1258
1315
  )
1259
1316
  if 200 <= _response.status_code < 300:
@@ -1267,7 +1324,12 @@ class AsyncFilesClient:
1267
1324
  raise ApiError(status_code=_response.status_code, body=_response_json)
1268
1325
 
1269
1326
  async def list_file_pages_figures(
1270
- self, id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
1327
+ self,
1328
+ id: str,
1329
+ *,
1330
+ project_id: typing.Optional[str] = None,
1331
+ organization_id: typing.Optional[str] = None,
1332
+ project_id: typing.Optional[str] = None,
1271
1333
  ) -> typing.List[PageFigureMetadata]:
1272
1334
  """
1273
1335
  Parameters:
@@ -1276,6 +1338,8 @@ class AsyncFilesClient:
1276
1338
  - project_id: typing.Optional[str].
1277
1339
 
1278
1340
  - organization_id: typing.Optional[str].
1341
+
1342
+ - project_id: typing.Optional[str].
1279
1343
  ---
1280
1344
  from llama_cloud.client import AsyncLlamaCloud
1281
1345
 
@@ -1290,7 +1354,7 @@ class AsyncFilesClient:
1290
1354
  "GET",
1291
1355
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures"),
1292
1356
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1293
- headers=self._client_wrapper.get_headers(),
1357
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1294
1358
  timeout=60,
1295
1359
  )
1296
1360
  if 200 <= _response.status_code < 300:
@@ -1310,6 +1374,7 @@ class AsyncFilesClient:
1310
1374
  *,
1311
1375
  project_id: typing.Optional[str] = None,
1312
1376
  organization_id: typing.Optional[str] = None,
1377
+ project_id: typing.Optional[str] = None,
1313
1378
  ) -> typing.List[PageFigureMetadata]:
1314
1379
  """
1315
1380
  Parameters:
@@ -1320,6 +1385,8 @@ class AsyncFilesClient:
1320
1385
  - project_id: typing.Optional[str].
1321
1386
 
1322
1387
  - organization_id: typing.Optional[str].
1388
+
1389
+ - project_id: typing.Optional[str].
1323
1390
  ---
1324
1391
  from llama_cloud.client import AsyncLlamaCloud
1325
1392
 
@@ -1337,7 +1404,7 @@ class AsyncFilesClient:
1337
1404
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures/{page_index}"
1338
1405
  ),
1339
1406
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1340
- headers=self._client_wrapper.get_headers(),
1407
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1341
1408
  timeout=60,
1342
1409
  )
1343
1410
  if 200 <= _response.status_code < 300:
@@ -1358,6 +1425,7 @@ class AsyncFilesClient:
1358
1425
  *,
1359
1426
  project_id: typing.Optional[str] = None,
1360
1427
  organization_id: typing.Optional[str] = None,
1428
+ project_id: typing.Optional[str] = None,
1361
1429
  ) -> typing.Any:
1362
1430
  """
1363
1431
  Parameters:
@@ -1370,6 +1438,8 @@ class AsyncFilesClient:
1370
1438
  - project_id: typing.Optional[str].
1371
1439
 
1372
1440
  - organization_id: typing.Optional[str].
1441
+
1442
+ - project_id: typing.Optional[str].
1373
1443
  ---
1374
1444
  from llama_cloud.client import AsyncLlamaCloud
1375
1445
 
@@ -1388,7 +1458,7 @@ class AsyncFilesClient:
1388
1458
  f"{self._client_wrapper.get_base_url()}/", f"api/v1/files/{id}/page-figures/{page_index}/{figure_name}"
1389
1459
  ),
1390
1460
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1391
- headers=self._client_wrapper.get_headers(),
1461
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1392
1462
  timeout=60,
1393
1463
  )
1394
1464
  if 200 <= _response.status_code < 300:
@@ -1408,6 +1478,7 @@ class AsyncFilesClient:
1408
1478
  *,
1409
1479
  project_id: typing.Optional[str] = None,
1410
1480
  organization_id: typing.Optional[str] = None,
1481
+ project_id: typing.Optional[str] = None,
1411
1482
  ) -> PresignedUrl:
1412
1483
  """
1413
1484
  Returns a presigned url to read a page screenshot.
@@ -1423,6 +1494,8 @@ class AsyncFilesClient:
1423
1494
  - project_id: typing.Optional[str].
1424
1495
 
1425
1496
  - organization_id: typing.Optional[str].
1497
+
1498
+ - project_id: typing.Optional[str].
1426
1499
  ---
1427
1500
  from llama_cloud.client import AsyncLlamaCloud
1428
1501
 
@@ -1441,7 +1514,7 @@ class AsyncFilesClient:
1441
1514
  f"api/v1/files/{id}/page_screenshots/{page_index}/presigned_url",
1442
1515
  ),
1443
1516
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1444
- headers=self._client_wrapper.get_headers(),
1517
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1445
1518
  timeout=60,
1446
1519
  )
1447
1520
  if 200 <= _response.status_code < 300:
@@ -1462,6 +1535,7 @@ class AsyncFilesClient:
1462
1535
  *,
1463
1536
  project_id: typing.Optional[str] = None,
1464
1537
  organization_id: typing.Optional[str] = None,
1538
+ project_id: typing.Optional[str] = None,
1465
1539
  ) -> PresignedUrl:
1466
1540
  """
1467
1541
  Returns a presigned url to read a page figure.
@@ -1479,6 +1553,8 @@ class AsyncFilesClient:
1479
1553
  - project_id: typing.Optional[str].
1480
1554
 
1481
1555
  - organization_id: typing.Optional[str].
1556
+
1557
+ - project_id: typing.Optional[str].
1482
1558
  ---
1483
1559
  from llama_cloud.client import AsyncLlamaCloud
1484
1560
 
@@ -1498,7 +1574,7 @@ class AsyncFilesClient:
1498
1574
  f"api/v1/files/{id}/page-figures/{page_index}/{figure_name}/presigned_url",
1499
1575
  ),
1500
1576
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1501
- headers=self._client_wrapper.get_headers(),
1577
+ headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
1502
1578
  timeout=60,
1503
1579
  )
1504
1580
  if 200 <= _response.status_code < 300: