llama-cloud 0.1.38__py3-none-any.whl → 0.1.40__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (39) hide show
  1. llama_cloud/__init__.py +12 -0
  2. llama_cloud/resources/admin/client.py +5 -5
  3. llama_cloud/resources/alpha/client.py +2 -8
  4. llama_cloud/resources/beta/client.py +30 -126
  5. llama_cloud/resources/chat_apps/client.py +8 -32
  6. llama_cloud/resources/classifier/client.py +8 -32
  7. llama_cloud/resources/data_sinks/client.py +8 -32
  8. llama_cloud/resources/data_sources/client.py +8 -32
  9. llama_cloud/resources/embedding_model_configs/client.py +12 -48
  10. llama_cloud/resources/files/client.py +42 -176
  11. llama_cloud/resources/jobs/client.py +2 -8
  12. llama_cloud/resources/llama_extract/client.py +40 -138
  13. llama_cloud/resources/organizations/client.py +4 -18
  14. llama_cloud/resources/parsing/client.py +12 -16
  15. llama_cloud/resources/pipelines/client.py +45 -32
  16. llama_cloud/resources/projects/client.py +18 -78
  17. llama_cloud/resources/reports/client.py +30 -126
  18. llama_cloud/resources/retrievers/client.py +12 -48
  19. llama_cloud/types/__init__.py +12 -0
  20. llama_cloud/types/extract_job_create.py +2 -0
  21. llama_cloud/types/extract_job_create_priority.py +29 -0
  22. llama_cloud/types/file.py +1 -1
  23. llama_cloud/types/job_names.py +0 -4
  24. llama_cloud/types/llama_extract_feature_availability.py +34 -0
  25. llama_cloud/types/llama_parse_parameters.py +1 -0
  26. llama_cloud/types/parse_job_config.py +1 -0
  27. llama_cloud/types/pipeline.py +4 -0
  28. llama_cloud/types/pipeline_create.py +2 -0
  29. llama_cloud/types/pipeline_file.py +4 -4
  30. llama_cloud/types/schema_generation_availability.py +33 -0
  31. llama_cloud/types/schema_generation_availability_status.py +17 -0
  32. llama_cloud/types/sparse_model_config.py +42 -0
  33. llama_cloud/types/sparse_model_type.py +33 -0
  34. llama_cloud/types/webhook_configuration.py +1 -0
  35. llama_cloud-0.1.40.dist-info/METADATA +106 -0
  36. {llama_cloud-0.1.38.dist-info → llama_cloud-0.1.40.dist-info}/RECORD +38 -32
  37. {llama_cloud-0.1.38.dist-info → llama_cloud-0.1.40.dist-info}/WHEEL +1 -1
  38. llama_cloud-0.1.38.dist-info/METADATA +0 -32
  39. {llama_cloud-0.1.38.dist-info → llama_cloud-0.1.40.dist-info}/LICENSE +0 -0
@@ -144,13 +144,7 @@ class ProjectsClient:
144
144
  raise ApiError(status_code=_response.status_code, body=_response.text)
145
145
  raise ApiError(status_code=_response.status_code, body=_response_json)
146
146
 
147
- def get_project(
148
- self,
149
- project_id: typing.Optional[str],
150
- *,
151
- organization_id: typing.Optional[str] = None,
152
- project_id: typing.Optional[str] = None,
153
- ) -> Project:
147
+ def get_project(self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None) -> Project:
154
148
  """
155
149
  Get a project by ID.
156
150
 
@@ -158,8 +152,6 @@ class ProjectsClient:
158
152
  - project_id: typing.Optional[str].
159
153
 
160
154
  - organization_id: typing.Optional[str].
161
-
162
- - project_id: typing.Optional[str].
163
155
  ---
164
156
  from llama_cloud.client import LlamaCloud
165
157
 
@@ -172,7 +164,7 @@ class ProjectsClient:
172
164
  "GET",
173
165
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
174
166
  params=remove_none_from_dict({"organization_id": organization_id}),
175
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
167
+ headers=self._client_wrapper.get_headers(),
176
168
  timeout=60,
177
169
  )
178
170
  if 200 <= _response.status_code < 300:
@@ -186,12 +178,7 @@ class ProjectsClient:
186
178
  raise ApiError(status_code=_response.status_code, body=_response_json)
187
179
 
188
180
  def update_existing_project(
189
- self,
190
- project_id: typing.Optional[str],
191
- *,
192
- organization_id: typing.Optional[str] = None,
193
- name: str,
194
- project_id: typing.Optional[str] = None,
181
+ self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None, name: str
195
182
  ) -> Project:
196
183
  """
197
184
  Update an existing project.
@@ -202,8 +189,6 @@ class ProjectsClient:
202
189
  - organization_id: typing.Optional[str].
203
190
 
204
191
  - name: str.
205
-
206
- - project_id: typing.Optional[str].
207
192
  ---
208
193
  from llama_cloud.client import LlamaCloud
209
194
 
@@ -219,7 +204,7 @@ class ProjectsClient:
219
204
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
220
205
  params=remove_none_from_dict({"organization_id": organization_id}),
221
206
  json=jsonable_encoder({"name": name}),
222
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
207
+ headers=self._client_wrapper.get_headers(),
223
208
  timeout=60,
224
209
  )
225
210
  if 200 <= _response.status_code < 300:
@@ -232,13 +217,7 @@ class ProjectsClient:
232
217
  raise ApiError(status_code=_response.status_code, body=_response.text)
233
218
  raise ApiError(status_code=_response.status_code, body=_response_json)
234
219
 
235
- def delete_project(
236
- self,
237
- project_id: typing.Optional[str],
238
- *,
239
- organization_id: typing.Optional[str] = None,
240
- project_id: typing.Optional[str] = None,
241
- ) -> None:
220
+ def delete_project(self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None) -> None:
242
221
  """
243
222
  Delete a project by ID.
244
223
 
@@ -246,8 +225,6 @@ class ProjectsClient:
246
225
  - project_id: typing.Optional[str].
247
226
 
248
227
  - organization_id: typing.Optional[str].
249
-
250
- - project_id: typing.Optional[str].
251
228
  ---
252
229
  from llama_cloud.client import LlamaCloud
253
230
 
@@ -260,7 +237,7 @@ class ProjectsClient:
260
237
  "DELETE",
261
238
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
262
239
  params=remove_none_from_dict({"organization_id": organization_id}),
263
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
240
+ headers=self._client_wrapper.get_headers(),
264
241
  timeout=60,
265
242
  )
266
243
  if 200 <= _response.status_code < 300:
@@ -274,11 +251,7 @@ class ProjectsClient:
274
251
  raise ApiError(status_code=_response.status_code, body=_response_json)
275
252
 
276
253
  def get_current_project(
277
- self,
278
- *,
279
- project_id: typing.Optional[str] = None,
280
- organization_id: typing.Optional[str] = None,
281
- project_id: typing.Optional[str] = None,
254
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
282
255
  ) -> Project:
283
256
  """
284
257
  Get the current project.
@@ -287,8 +260,6 @@ class ProjectsClient:
287
260
  - project_id: typing.Optional[str].
288
261
 
289
262
  - organization_id: typing.Optional[str].
290
-
291
- - project_id: typing.Optional[str].
292
263
  ---
293
264
  from llama_cloud.client import LlamaCloud
294
265
 
@@ -301,7 +272,7 @@ class ProjectsClient:
301
272
  "GET",
302
273
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/projects/current"),
303
274
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
304
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
275
+ headers=self._client_wrapper.get_headers(),
305
276
  timeout=60,
306
277
  )
307
278
  if 200 <= _response.status_code < 300:
@@ -320,7 +291,6 @@ class ProjectsClient:
320
291
  *,
321
292
  get_current_invoice_total: typing.Optional[bool] = None,
322
293
  organization_id: typing.Optional[str] = None,
323
- project_id: typing.Optional[str] = None,
324
294
  ) -> UsageAndPlan:
325
295
  """
326
296
  Get usage for a project
@@ -331,8 +301,6 @@ class ProjectsClient:
331
301
  - get_current_invoice_total: typing.Optional[bool].
332
302
 
333
303
  - organization_id: typing.Optional[str].
334
-
335
- - project_id: typing.Optional[str].
336
304
  ---
337
305
  from llama_cloud.client import LlamaCloud
338
306
 
@@ -347,7 +315,7 @@ class ProjectsClient:
347
315
  params=remove_none_from_dict(
348
316
  {"get_current_invoice_total": get_current_invoice_total, "organization_id": organization_id}
349
317
  ),
350
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
318
+ headers=self._client_wrapper.get_headers(),
351
319
  timeout=60,
352
320
  )
353
321
  if 200 <= _response.status_code < 300:
@@ -480,11 +448,7 @@ class AsyncProjectsClient:
480
448
  raise ApiError(status_code=_response.status_code, body=_response_json)
481
449
 
482
450
  async def get_project(
483
- self,
484
- project_id: typing.Optional[str],
485
- *,
486
- organization_id: typing.Optional[str] = None,
487
- project_id: typing.Optional[str] = None,
451
+ self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
488
452
  ) -> Project:
489
453
  """
490
454
  Get a project by ID.
@@ -493,8 +457,6 @@ class AsyncProjectsClient:
493
457
  - project_id: typing.Optional[str].
494
458
 
495
459
  - organization_id: typing.Optional[str].
496
-
497
- - project_id: typing.Optional[str].
498
460
  ---
499
461
  from llama_cloud.client import AsyncLlamaCloud
500
462
 
@@ -507,7 +469,7 @@ class AsyncProjectsClient:
507
469
  "GET",
508
470
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
509
471
  params=remove_none_from_dict({"organization_id": organization_id}),
510
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
472
+ headers=self._client_wrapper.get_headers(),
511
473
  timeout=60,
512
474
  )
513
475
  if 200 <= _response.status_code < 300:
@@ -521,12 +483,7 @@ class AsyncProjectsClient:
521
483
  raise ApiError(status_code=_response.status_code, body=_response_json)
522
484
 
523
485
  async def update_existing_project(
524
- self,
525
- project_id: typing.Optional[str],
526
- *,
527
- organization_id: typing.Optional[str] = None,
528
- name: str,
529
- project_id: typing.Optional[str] = None,
486
+ self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None, name: str
530
487
  ) -> Project:
531
488
  """
532
489
  Update an existing project.
@@ -537,8 +494,6 @@ class AsyncProjectsClient:
537
494
  - organization_id: typing.Optional[str].
538
495
 
539
496
  - name: str.
540
-
541
- - project_id: typing.Optional[str].
542
497
  ---
543
498
  from llama_cloud.client import AsyncLlamaCloud
544
499
 
@@ -554,7 +509,7 @@ class AsyncProjectsClient:
554
509
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
555
510
  params=remove_none_from_dict({"organization_id": organization_id}),
556
511
  json=jsonable_encoder({"name": name}),
557
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
512
+ headers=self._client_wrapper.get_headers(),
558
513
  timeout=60,
559
514
  )
560
515
  if 200 <= _response.status_code < 300:
@@ -568,11 +523,7 @@ class AsyncProjectsClient:
568
523
  raise ApiError(status_code=_response.status_code, body=_response_json)
569
524
 
570
525
  async def delete_project(
571
- self,
572
- project_id: typing.Optional[str],
573
- *,
574
- organization_id: typing.Optional[str] = None,
575
- project_id: typing.Optional[str] = None,
526
+ self, project_id: typing.Optional[str], *, organization_id: typing.Optional[str] = None
576
527
  ) -> None:
577
528
  """
578
529
  Delete a project by ID.
@@ -581,8 +532,6 @@ class AsyncProjectsClient:
581
532
  - project_id: typing.Optional[str].
582
533
 
583
534
  - organization_id: typing.Optional[str].
584
-
585
- - project_id: typing.Optional[str].
586
535
  ---
587
536
  from llama_cloud.client import AsyncLlamaCloud
588
537
 
@@ -595,7 +544,7 @@ class AsyncProjectsClient:
595
544
  "DELETE",
596
545
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/projects/{project_id}"),
597
546
  params=remove_none_from_dict({"organization_id": organization_id}),
598
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
547
+ headers=self._client_wrapper.get_headers(),
599
548
  timeout=60,
600
549
  )
601
550
  if 200 <= _response.status_code < 300:
@@ -609,11 +558,7 @@ class AsyncProjectsClient:
609
558
  raise ApiError(status_code=_response.status_code, body=_response_json)
610
559
 
611
560
  async def get_current_project(
612
- self,
613
- *,
614
- project_id: typing.Optional[str] = None,
615
- organization_id: typing.Optional[str] = None,
616
- project_id: typing.Optional[str] = None,
561
+ self, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
617
562
  ) -> Project:
618
563
  """
619
564
  Get the current project.
@@ -622,8 +567,6 @@ class AsyncProjectsClient:
622
567
  - project_id: typing.Optional[str].
623
568
 
624
569
  - organization_id: typing.Optional[str].
625
-
626
- - project_id: typing.Optional[str].
627
570
  ---
628
571
  from llama_cloud.client import AsyncLlamaCloud
629
572
 
@@ -636,7 +579,7 @@ class AsyncProjectsClient:
636
579
  "GET",
637
580
  urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/projects/current"),
638
581
  params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
639
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
582
+ headers=self._client_wrapper.get_headers(),
640
583
  timeout=60,
641
584
  )
642
585
  if 200 <= _response.status_code < 300:
@@ -655,7 +598,6 @@ class AsyncProjectsClient:
655
598
  *,
656
599
  get_current_invoice_total: typing.Optional[bool] = None,
657
600
  organization_id: typing.Optional[str] = None,
658
- project_id: typing.Optional[str] = None,
659
601
  ) -> UsageAndPlan:
660
602
  """
661
603
  Get usage for a project
@@ -666,8 +608,6 @@ class AsyncProjectsClient:
666
608
  - get_current_invoice_total: typing.Optional[bool].
667
609
 
668
610
  - organization_id: typing.Optional[str].
669
-
670
- - project_id: typing.Optional[str].
671
611
  ---
672
612
  from llama_cloud.client import AsyncLlamaCloud
673
613
 
@@ -682,7 +622,7 @@ class AsyncProjectsClient:
682
622
  params=remove_none_from_dict(
683
623
  {"get_current_invoice_total": get_current_invoice_total, "organization_id": organization_id}
684
624
  ),
685
- headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
625
+ headers=self._client_wrapper.get_headers(),
686
626
  timeout=60,
687
627
  )
688
628
  if 200 <= _response.status_code < 300: