llama-cloud 0.1.41__py3-none-any.whl → 0.1.43__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of llama-cloud might be problematic. Click here for more details.

Files changed (43) hide show
  1. llama_cloud/__init__.py +19 -19
  2. llama_cloud/resources/__init__.py +6 -0
  3. llama_cloud/resources/alpha/client.py +14 -30
  4. llama_cloud/resources/beta/client.py +1045 -59
  5. llama_cloud/resources/jobs/client.py +0 -8
  6. llama_cloud/resources/llama_extract/__init__.py +6 -0
  7. llama_cloud/resources/llama_extract/client.py +825 -941
  8. llama_cloud/resources/llama_extract/types/__init__.py +6 -0
  9. llama_cloud/resources/organizations/client.py +18 -4
  10. llama_cloud/resources/parsing/client.py +56 -0
  11. llama_cloud/resources/pipelines/client.py +164 -0
  12. llama_cloud/types/__init__.py +16 -22
  13. llama_cloud/types/agent_data.py +1 -1
  14. llama_cloud/types/agent_deployment_summary.py +1 -2
  15. llama_cloud/types/{prompt_conf.py → api_key.py} +14 -9
  16. llama_cloud/types/{extract_job_create.py → api_key_query_response.py} +6 -14
  17. llama_cloud/types/api_key_type.py +17 -0
  18. llama_cloud/types/delete_response.py +35 -0
  19. llama_cloud/types/extract_config.py +1 -0
  20. llama_cloud/types/extract_models.py +4 -0
  21. llama_cloud/types/extracted_table.py +40 -0
  22. llama_cloud/types/legacy_parse_job_config.py +3 -0
  23. llama_cloud/types/llama_parse_parameters.py +7 -0
  24. llama_cloud/types/organization.py +1 -0
  25. llama_cloud/types/paginated_response_spreadsheet_job.py +34 -0
  26. llama_cloud/types/parse_job_config.py +7 -0
  27. llama_cloud/types/public_model_name.py +4 -0
  28. llama_cloud/types/quota_configuration_configuration_type.py +4 -0
  29. llama_cloud/types/spreadsheet_job.py +50 -0
  30. llama_cloud/types/spreadsheet_parsing_config.py +35 -0
  31. {llama_cloud-0.1.41.dist-info → llama_cloud-0.1.43.dist-info}/METADATA +1 -1
  32. {llama_cloud-0.1.41.dist-info → llama_cloud-0.1.43.dist-info}/RECORD +37 -37
  33. llama_cloud/types/chunk_mode.py +0 -29
  34. llama_cloud/types/llama_extract_settings.py +0 -67
  35. llama_cloud/types/multimodal_parse_resolution.py +0 -17
  36. llama_cloud/types/schema_relax_mode.py +0 -25
  37. llama_cloud/types/struct_mode.py +0 -33
  38. llama_cloud/types/struct_parse_conf.py +0 -63
  39. /llama_cloud/{types → resources/llama_extract/types}/extract_job_create_data_schema_override.py +0 -0
  40. /llama_cloud/{types → resources/llama_extract/types}/extract_job_create_data_schema_override_zero_value.py +0 -0
  41. /llama_cloud/{types → resources/llama_extract/types}/extract_job_create_priority.py +0 -0
  42. {llama_cloud-0.1.41.dist-info → llama_cloud-0.1.43.dist-info}/LICENSE +0 -0
  43. {llama_cloud-0.1.41.dist-info → llama_cloud-0.1.43.dist-info}/WHEEL +0 -0
@@ -12,9 +12,13 @@ from ...core.jsonable_encoder import jsonable_encoder
12
12
  from ...core.remove_none_from_dict import remove_none_from_dict
13
13
  from ...errors.unprocessable_entity_error import UnprocessableEntityError
14
14
  from ...types.agent_data import AgentData
15
+ from ...types.api_key import ApiKey
16
+ from ...types.api_key_query_response import ApiKeyQueryResponse
17
+ from ...types.api_key_type import ApiKeyType
15
18
  from ...types.batch import Batch
16
19
  from ...types.batch_paginated_list import BatchPaginatedList
17
20
  from ...types.batch_public_output import BatchPublicOutput
21
+ from ...types.delete_response import DeleteResponse
18
22
  from ...types.file import File
19
23
  from ...types.file_create import FileCreate
20
24
  from ...types.file_filter import FileFilter
@@ -25,10 +29,14 @@ from ...types.llama_parse_parameters import LlamaParseParameters
25
29
  from ...types.paginated_response_agent_data import PaginatedResponseAgentData
26
30
  from ...types.paginated_response_aggregate_group import PaginatedResponseAggregateGroup
27
31
  from ...types.paginated_response_quota_configuration import PaginatedResponseQuotaConfiguration
32
+ from ...types.paginated_response_spreadsheet_job import PaginatedResponseSpreadsheetJob
28
33
  from ...types.parse_configuration import ParseConfiguration
29
34
  from ...types.parse_configuration_create import ParseConfigurationCreate
30
35
  from ...types.parse_configuration_filter import ParseConfigurationFilter
31
36
  from ...types.parse_configuration_query_response import ParseConfigurationQueryResponse
37
+ from ...types.presigned_url import PresignedUrl
38
+ from ...types.spreadsheet_job import SpreadsheetJob
39
+ from ...types.spreadsheet_parsing_config import SpreadsheetParsingConfig
32
40
 
33
41
  try:
34
42
  import pydantic
@@ -46,6 +54,220 @@ class BetaClient:
46
54
  def __init__(self, *, client_wrapper: SyncClientWrapper):
47
55
  self._client_wrapper = client_wrapper
48
56
 
57
+ def list_api_keys(
58
+ self,
59
+ *,
60
+ page_size: typing.Optional[int] = None,
61
+ page_token: typing.Optional[str] = None,
62
+ name: typing.Optional[str] = None,
63
+ project_id: typing.Optional[str] = None,
64
+ key_type: typing.Optional[ApiKeyType] = None,
65
+ ) -> ApiKeyQueryResponse:
66
+ """
67
+ List API keys.
68
+
69
+ If project_id is provided, validates user has access to that project.
70
+ If project_id is not provided, scopes results to the current user.
71
+
72
+ Args:
73
+ user: Current user
74
+ db: Database session
75
+ page_size: Number of items per page
76
+ page_token: Token for pagination
77
+ name: Filter by API key name
78
+ project_id: Filter by project ID
79
+ key_type: Filter by key type
80
+
81
+ Returns:
82
+ Paginated response with API keys
83
+
84
+ Parameters:
85
+ - page_size: typing.Optional[int].
86
+
87
+ - page_token: typing.Optional[str].
88
+
89
+ - name: typing.Optional[str].
90
+
91
+ - project_id: typing.Optional[str].
92
+
93
+ - key_type: typing.Optional[ApiKeyType].
94
+ ---
95
+ from llama_cloud import ApiKeyType
96
+ from llama_cloud.client import LlamaCloud
97
+
98
+ client = LlamaCloud(
99
+ token="YOUR_TOKEN",
100
+ )
101
+ client.beta.list_api_keys(
102
+ key_type=ApiKeyType.USER,
103
+ )
104
+ """
105
+ _response = self._client_wrapper.httpx_client.request(
106
+ "GET",
107
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/api-keys"),
108
+ params=remove_none_from_dict(
109
+ {
110
+ "page_size": page_size,
111
+ "page_token": page_token,
112
+ "name": name,
113
+ "project_id": project_id,
114
+ "key_type": key_type,
115
+ }
116
+ ),
117
+ headers=self._client_wrapper.get_headers(),
118
+ timeout=60,
119
+ )
120
+ if 200 <= _response.status_code < 300:
121
+ return pydantic.parse_obj_as(ApiKeyQueryResponse, _response.json()) # type: ignore
122
+ if _response.status_code == 422:
123
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
124
+ try:
125
+ _response_json = _response.json()
126
+ except JSONDecodeError:
127
+ raise ApiError(status_code=_response.status_code, body=_response.text)
128
+ raise ApiError(status_code=_response.status_code, body=_response_json)
129
+
130
+ def create_api_key(
131
+ self,
132
+ *,
133
+ name: typing.Optional[str] = OMIT,
134
+ project_id: typing.Optional[str] = OMIT,
135
+ key_type: typing.Optional[ApiKeyType] = OMIT,
136
+ ) -> ApiKey:
137
+ """
138
+ Create a new API key.
139
+
140
+ If project_id is specified, validates user has admin permissions for that project.
141
+
142
+ Args:
143
+ api_key_create: API key creation data
144
+ user: Current user
145
+ db: Database session
146
+
147
+ Returns:
148
+ The created API key with the secret key visible in redacted_api_key field
149
+
150
+ Parameters:
151
+ - name: typing.Optional[str].
152
+
153
+ - project_id: typing.Optional[str].
154
+
155
+ - key_type: typing.Optional[ApiKeyType].
156
+ ---
157
+ from llama_cloud import ApiKeyType
158
+ from llama_cloud.client import LlamaCloud
159
+
160
+ client = LlamaCloud(
161
+ token="YOUR_TOKEN",
162
+ )
163
+ client.beta.create_api_key(
164
+ key_type=ApiKeyType.USER,
165
+ )
166
+ """
167
+ _request: typing.Dict[str, typing.Any] = {}
168
+ if name is not OMIT:
169
+ _request["name"] = name
170
+ if project_id is not OMIT:
171
+ _request["project_id"] = project_id
172
+ if key_type is not OMIT:
173
+ _request["key_type"] = key_type
174
+ _response = self._client_wrapper.httpx_client.request(
175
+ "POST",
176
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/api-keys"),
177
+ json=jsonable_encoder(_request),
178
+ headers=self._client_wrapper.get_headers(),
179
+ timeout=60,
180
+ )
181
+ if 200 <= _response.status_code < 300:
182
+ return pydantic.parse_obj_as(ApiKey, _response.json()) # type: ignore
183
+ if _response.status_code == 422:
184
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
185
+ try:
186
+ _response_json = _response.json()
187
+ except JSONDecodeError:
188
+ raise ApiError(status_code=_response.status_code, body=_response.text)
189
+ raise ApiError(status_code=_response.status_code, body=_response_json)
190
+
191
+ def get_api_key(self, api_key_id: str) -> ApiKey:
192
+ """
193
+ Get an API key by ID.
194
+
195
+ Args:
196
+ api_key_id: The ID of the API key
197
+ user: Current user
198
+ db: Database session
199
+
200
+ Returns:
201
+ The API key
202
+
203
+ Parameters:
204
+ - api_key_id: str.
205
+ ---
206
+ from llama_cloud.client import LlamaCloud
207
+
208
+ client = LlamaCloud(
209
+ token="YOUR_TOKEN",
210
+ )
211
+ client.beta.get_api_key(
212
+ api_key_id="string",
213
+ )
214
+ """
215
+ _response = self._client_wrapper.httpx_client.request(
216
+ "GET",
217
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/api-keys/{api_key_id}"),
218
+ headers=self._client_wrapper.get_headers(),
219
+ timeout=60,
220
+ )
221
+ if 200 <= _response.status_code < 300:
222
+ return pydantic.parse_obj_as(ApiKey, _response.json()) # type: ignore
223
+ if _response.status_code == 422:
224
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
225
+ try:
226
+ _response_json = _response.json()
227
+ except JSONDecodeError:
228
+ raise ApiError(status_code=_response.status_code, body=_response.text)
229
+ raise ApiError(status_code=_response.status_code, body=_response_json)
230
+
231
+ def delete_api_key(self, api_key_id: str) -> None:
232
+ """
233
+ Delete an API key.
234
+
235
+ If the API key belongs to a project, validates user has admin permissions for that project.
236
+ If the API key has no project, validates it belongs to the current user.
237
+
238
+ Args:
239
+ api_key_id: The ID of the API key to delete
240
+ user: Current user
241
+ db: Database session
242
+
243
+ Parameters:
244
+ - api_key_id: str.
245
+ ---
246
+ from llama_cloud.client import LlamaCloud
247
+
248
+ client = LlamaCloud(
249
+ token="YOUR_TOKEN",
250
+ )
251
+ client.beta.delete_api_key(
252
+ api_key_id="string",
253
+ )
254
+ """
255
+ _response = self._client_wrapper.httpx_client.request(
256
+ "DELETE",
257
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/api-keys/{api_key_id}"),
258
+ headers=self._client_wrapper.get_headers(),
259
+ timeout=60,
260
+ )
261
+ if 200 <= _response.status_code < 300:
262
+ return
263
+ if _response.status_code == 422:
264
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
265
+ try:
266
+ _response_json = _response.json()
267
+ except JSONDecodeError:
268
+ raise ApiError(status_code=_response.status_code, body=_response.text)
269
+ raise ApiError(status_code=_response.status_code, body=_response_json)
270
+
49
271
  def list_batches(
50
272
  self,
51
273
  *,
@@ -350,7 +572,7 @@ class BetaClient:
350
572
  *,
351
573
  project_id: typing.Optional[str] = None,
352
574
  organization_id: typing.Optional[str] = None,
353
- agent_slug: str,
575
+ deployment_name: str,
354
576
  collection: typing.Optional[str] = OMIT,
355
577
  data: typing.Dict[str, typing.Any],
356
578
  ) -> AgentData:
@@ -362,7 +584,7 @@ class BetaClient:
362
584
 
363
585
  - organization_id: typing.Optional[str].
364
586
 
365
- - agent_slug: str.
587
+ - deployment_name: str.
366
588
 
367
589
  - collection: typing.Optional[str].
368
590
 
@@ -374,11 +596,11 @@ class BetaClient:
374
596
  token="YOUR_TOKEN",
375
597
  )
376
598
  client.beta.create_agent_data(
377
- agent_slug="string",
599
+ deployment_name="string",
378
600
  data={"string": {}},
379
601
  )
380
602
  """
381
- _request: typing.Dict[str, typing.Any] = {"agent_slug": agent_slug, "data": data}
603
+ _request: typing.Dict[str, typing.Any] = {"deployment_name": deployment_name, "data": data}
382
604
  if collection is not OMIT:
383
605
  _request["collection"] = collection
384
606
  _response = self._client_wrapper.httpx_client.request(
@@ -408,7 +630,7 @@ class BetaClient:
408
630
  page_token: typing.Optional[str] = OMIT,
409
631
  filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
410
632
  order_by: typing.Optional[str] = OMIT,
411
- agent_slug: str,
633
+ deployment_name: str,
412
634
  collection: typing.Optional[str] = OMIT,
413
635
  include_total: typing.Optional[bool] = OMIT,
414
636
  offset: typing.Optional[int] = OMIT,
@@ -429,7 +651,7 @@ class BetaClient:
429
651
 
430
652
  - order_by: typing.Optional[str].
431
653
 
432
- - agent_slug: str. The agent deployment's agent_slug to search within
654
+ - deployment_name: str. The agent deployment's name to search within
433
655
 
434
656
  - collection: typing.Optional[str]. The logical agent data collection to search within
435
657
 
@@ -443,10 +665,10 @@ class BetaClient:
443
665
  token="YOUR_TOKEN",
444
666
  )
445
667
  client.beta.search_agent_data_api_v_1_beta_agent_data_search_post(
446
- agent_slug="string",
668
+ deployment_name="string",
447
669
  )
448
670
  """
449
- _request: typing.Dict[str, typing.Any] = {"agent_slug": agent_slug}
671
+ _request: typing.Dict[str, typing.Any] = {"deployment_name": deployment_name}
450
672
  if page_size is not OMIT:
451
673
  _request["page_size"] = page_size
452
674
  if page_token is not OMIT:
@@ -488,7 +710,7 @@ class BetaClient:
488
710
  page_token: typing.Optional[str] = OMIT,
489
711
  filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
490
712
  order_by: typing.Optional[str] = OMIT,
491
- agent_slug: str,
713
+ deployment_name: str,
492
714
  collection: typing.Optional[str] = OMIT,
493
715
  group_by: typing.Optional[typing.List[str]] = OMIT,
494
716
  count: typing.Optional[bool] = OMIT,
@@ -511,7 +733,7 @@ class BetaClient:
511
733
 
512
734
  - order_by: typing.Optional[str].
513
735
 
514
- - agent_slug: str. The agent deployment's agent_slug to aggregate data for
736
+ - deployment_name: str. The agent deployment's name to aggregate data for
515
737
 
516
738
  - collection: typing.Optional[str]. The logical agent data collection to aggregate data for
517
739
 
@@ -529,10 +751,10 @@ class BetaClient:
529
751
  token="YOUR_TOKEN",
530
752
  )
531
753
  client.beta.aggregate_agent_data_api_v_1_beta_agent_data_aggregate_post(
532
- agent_slug="string",
754
+ deployment_name="string",
533
755
  )
534
756
  """
535
- _request: typing.Dict[str, typing.Any] = {"agent_slug": agent_slug}
757
+ _request: typing.Dict[str, typing.Any] = {"deployment_name": deployment_name}
536
758
  if page_size is not OMIT:
537
759
  _request["page_size"] = page_size
538
760
  if page_token is not OMIT:
@@ -569,6 +791,61 @@ class BetaClient:
569
791
  raise ApiError(status_code=_response.status_code, body=_response.text)
570
792
  raise ApiError(status_code=_response.status_code, body=_response_json)
571
793
 
794
+ def delete_agent_data_by_query_api_v_1_beta_agent_data_delete_post(
795
+ self,
796
+ *,
797
+ project_id: typing.Optional[str] = None,
798
+ organization_id: typing.Optional[str] = None,
799
+ deployment_name: str,
800
+ collection: typing.Optional[str] = OMIT,
801
+ filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
802
+ ) -> DeleteResponse:
803
+ """
804
+ Bulk delete agent data by query (deployment_name, collection, optional filters).
805
+
806
+ Parameters:
807
+ - project_id: typing.Optional[str].
808
+
809
+ - organization_id: typing.Optional[str].
810
+
811
+ - deployment_name: str. The agent deployment's name to delete data for
812
+
813
+ - collection: typing.Optional[str]. The logical agent data collection to delete from
814
+
815
+ - filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]].
816
+ ---
817
+ from llama_cloud.client import LlamaCloud
818
+
819
+ client = LlamaCloud(
820
+ token="YOUR_TOKEN",
821
+ )
822
+ client.beta.delete_agent_data_by_query_api_v_1_beta_agent_data_delete_post(
823
+ deployment_name="string",
824
+ )
825
+ """
826
+ _request: typing.Dict[str, typing.Any] = {"deployment_name": deployment_name}
827
+ if collection is not OMIT:
828
+ _request["collection"] = collection
829
+ if filter is not OMIT:
830
+ _request["filter"] = filter
831
+ _response = self._client_wrapper.httpx_client.request(
832
+ "POST",
833
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:delete"),
834
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
835
+ json=jsonable_encoder(_request),
836
+ headers=self._client_wrapper.get_headers(),
837
+ timeout=60,
838
+ )
839
+ if 200 <= _response.status_code < 300:
840
+ return pydantic.parse_obj_as(DeleteResponse, _response.json()) # type: ignore
841
+ if _response.status_code == 422:
842
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
843
+ try:
844
+ _response_json = _response.json()
845
+ except JSONDecodeError:
846
+ raise ApiError(status_code=_response.status_code, body=_response.text)
847
+ raise ApiError(status_code=_response.status_code, body=_response_json)
848
+
572
849
  def list_quota_configurations(
573
850
  self,
574
851
  *,
@@ -1352,47 +1629,54 @@ class BetaClient:
1352
1629
  raise ApiError(status_code=_response.status_code, body=_response.text)
1353
1630
  raise ApiError(status_code=_response.status_code, body=_response_json)
1354
1631
 
1355
-
1356
- class AsyncBetaClient:
1357
- def __init__(self, *, client_wrapper: AsyncClientWrapper):
1358
- self._client_wrapper = client_wrapper
1359
-
1360
- async def list_batches(
1632
+ def list_spreadsheet_jobs(
1361
1633
  self,
1362
1634
  *,
1363
- limit: typing.Optional[int] = None,
1364
- offset: typing.Optional[int] = None,
1635
+ include_results: typing.Optional[bool] = None,
1365
1636
  project_id: typing.Optional[str] = None,
1366
1637
  organization_id: typing.Optional[str] = None,
1367
- ) -> BatchPaginatedList:
1638
+ page_size: typing.Optional[int] = None,
1639
+ page_token: typing.Optional[str] = None,
1640
+ ) -> PaginatedResponseSpreadsheetJob:
1368
1641
  """
1369
- Parameters:
1370
- - limit: typing.Optional[int].
1642
+ List spreadsheet parsing jobs.
1643
+ Experimental: This endpoint is not yet ready for production use and is subject to change at any time.
1371
1644
 
1372
- - offset: typing.Optional[int].
1645
+ Parameters:
1646
+ - include_results: typing.Optional[bool].
1373
1647
 
1374
1648
  - project_id: typing.Optional[str].
1375
1649
 
1376
1650
  - organization_id: typing.Optional[str].
1651
+
1652
+ - page_size: typing.Optional[int].
1653
+
1654
+ - page_token: typing.Optional[str].
1377
1655
  ---
1378
- from llama_cloud.client import AsyncLlamaCloud
1656
+ from llama_cloud.client import LlamaCloud
1379
1657
 
1380
- client = AsyncLlamaCloud(
1658
+ client = LlamaCloud(
1381
1659
  token="YOUR_TOKEN",
1382
1660
  )
1383
- await client.beta.list_batches()
1661
+ client.beta.list_spreadsheet_jobs()
1384
1662
  """
1385
- _response = await self._client_wrapper.httpx_client.request(
1663
+ _response = self._client_wrapper.httpx_client.request(
1386
1664
  "GET",
1387
- urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/batches"),
1665
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/spreadsheet/jobs"),
1388
1666
  params=remove_none_from_dict(
1389
- {"limit": limit, "offset": offset, "project_id": project_id, "organization_id": organization_id}
1667
+ {
1668
+ "include_results": include_results,
1669
+ "project_id": project_id,
1670
+ "organization_id": organization_id,
1671
+ "page_size": page_size,
1672
+ "page_token": page_token,
1673
+ }
1390
1674
  ),
1391
1675
  headers=self._client_wrapper.get_headers(),
1392
1676
  timeout=60,
1393
1677
  )
1394
1678
  if 200 <= _response.status_code < 300:
1395
- return pydantic.parse_obj_as(BatchPaginatedList, _response.json()) # type: ignore
1679
+ return pydantic.parse_obj_as(PaginatedResponseSpreadsheetJob, _response.json()) # type: ignore
1396
1680
  if _response.status_code == 422:
1397
1681
  raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1398
1682
  try:
@@ -1401,30 +1685,457 @@ class AsyncBetaClient:
1401
1685
  raise ApiError(status_code=_response.status_code, body=_response.text)
1402
1686
  raise ApiError(status_code=_response.status_code, body=_response_json)
1403
1687
 
1404
- async def create_batch(
1688
+ def create_spreadsheet_job(
1405
1689
  self,
1406
1690
  *,
1407
- organization_id: typing.Optional[str] = None,
1408
1691
  project_id: typing.Optional[str] = None,
1409
- tool: str,
1410
- tool_data: typing.Optional[LlamaParseParameters] = OMIT,
1411
- input_type: str,
1412
- input_id: str,
1413
- output_type: typing.Optional[str] = OMIT,
1414
- output_id: typing.Optional[str] = OMIT,
1415
- batch_create_project_id: str,
1416
- external_id: str,
1417
- completion_window: typing.Optional[int] = OMIT,
1418
- ) -> Batch:
1692
+ organization_id: typing.Optional[str] = None,
1693
+ file_id: str,
1694
+ config: typing.Optional[SpreadsheetParsingConfig] = OMIT,
1695
+ ) -> SpreadsheetJob:
1419
1696
  """
1420
- Parameters:
1421
- - organization_id: typing.Optional[str].
1697
+ Create a spreadsheet parsing job.
1698
+ Experimental: This endpoint is not yet ready for production use and is subject to change at any time.
1422
1699
 
1700
+ Parameters:
1423
1701
  - project_id: typing.Optional[str].
1424
1702
 
1425
- - tool: str. The tool to be used for all requests in the batch.
1703
+ - organization_id: typing.Optional[str].
1426
1704
 
1427
- - tool_data: typing.Optional[LlamaParseParameters].
1705
+ - file_id: str. The ID of the file to parse
1706
+
1707
+ - config: typing.Optional[SpreadsheetParsingConfig]. Configuration for the parsing job
1708
+ ---
1709
+ from llama_cloud import SpreadsheetParsingConfig
1710
+ from llama_cloud.client import LlamaCloud
1711
+
1712
+ client = LlamaCloud(
1713
+ token="YOUR_TOKEN",
1714
+ )
1715
+ client.beta.create_spreadsheet_job(
1716
+ file_id="string",
1717
+ config=SpreadsheetParsingConfig(),
1718
+ )
1719
+ """
1720
+ _request: typing.Dict[str, typing.Any] = {"file_id": file_id}
1721
+ if config is not OMIT:
1722
+ _request["config"] = config
1723
+ _response = self._client_wrapper.httpx_client.request(
1724
+ "POST",
1725
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/spreadsheet/jobs"),
1726
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
1727
+ json=jsonable_encoder(_request),
1728
+ headers=self._client_wrapper.get_headers(),
1729
+ timeout=60,
1730
+ )
1731
+ if 200 <= _response.status_code < 300:
1732
+ return pydantic.parse_obj_as(SpreadsheetJob, _response.json()) # type: ignore
1733
+ if _response.status_code == 422:
1734
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1735
+ try:
1736
+ _response_json = _response.json()
1737
+ except JSONDecodeError:
1738
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1739
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1740
+
1741
+ def get_spreadsheet_job(
1742
+ self,
1743
+ spreadsheet_job_id: str,
1744
+ *,
1745
+ include_results: typing.Optional[bool] = None,
1746
+ project_id: typing.Optional[str] = None,
1747
+ organization_id: typing.Optional[str] = None,
1748
+ ) -> SpreadsheetJob:
1749
+ """
1750
+ Get a spreadsheet parsing job.
1751
+
1752
+ When include_results=True (default), the response will include extracted tables and results
1753
+ if the job is complete, eliminating the need for a separate /results call.
1754
+
1755
+ Experimental: This endpoint is not yet ready for production use and is subject to change at any time.
1756
+
1757
+ Parameters:
1758
+ - spreadsheet_job_id: str.
1759
+
1760
+ - include_results: typing.Optional[bool].
1761
+
1762
+ - project_id: typing.Optional[str].
1763
+
1764
+ - organization_id: typing.Optional[str].
1765
+ ---
1766
+ from llama_cloud.client import LlamaCloud
1767
+
1768
+ client = LlamaCloud(
1769
+ token="YOUR_TOKEN",
1770
+ )
1771
+ client.beta.get_spreadsheet_job(
1772
+ spreadsheet_job_id="string",
1773
+ )
1774
+ """
1775
+ _response = self._client_wrapper.httpx_client.request(
1776
+ "GET",
1777
+ urllib.parse.urljoin(
1778
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/spreadsheet/jobs/{spreadsheet_job_id}"
1779
+ ),
1780
+ params=remove_none_from_dict(
1781
+ {"include_results": include_results, "project_id": project_id, "organization_id": organization_id}
1782
+ ),
1783
+ headers=self._client_wrapper.get_headers(),
1784
+ timeout=60,
1785
+ )
1786
+ if 200 <= _response.status_code < 300:
1787
+ return pydantic.parse_obj_as(SpreadsheetJob, _response.json()) # type: ignore
1788
+ if _response.status_code == 422:
1789
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1790
+ try:
1791
+ _response_json = _response.json()
1792
+ except JSONDecodeError:
1793
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1794
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1795
+
1796
+ def get_table_download_presigned_url(
1797
+ self,
1798
+ spreadsheet_job_id: str,
1799
+ table_id: int,
1800
+ *,
1801
+ expires_at_seconds: typing.Optional[int] = None,
1802
+ project_id: typing.Optional[str] = None,
1803
+ organization_id: typing.Optional[str] = None,
1804
+ ) -> PresignedUrl:
1805
+ """
1806
+ Generate a presigned URL to download a specific extracted table.
1807
+ Experimental: This endpoint is not yet ready for production use and is subject to change at any time.
1808
+
1809
+ Parameters:
1810
+ - spreadsheet_job_id: str.
1811
+
1812
+ - table_id: int.
1813
+
1814
+ - expires_at_seconds: typing.Optional[int].
1815
+
1816
+ - project_id: typing.Optional[str].
1817
+
1818
+ - organization_id: typing.Optional[str].
1819
+ ---
1820
+ from llama_cloud.client import LlamaCloud
1821
+
1822
+ client = LlamaCloud(
1823
+ token="YOUR_TOKEN",
1824
+ )
1825
+ client.beta.get_table_download_presigned_url(
1826
+ spreadsheet_job_id="string",
1827
+ table_id=1,
1828
+ )
1829
+ """
1830
+ _response = self._client_wrapper.httpx_client.request(
1831
+ "GET",
1832
+ urllib.parse.urljoin(
1833
+ f"{self._client_wrapper.get_base_url()}/",
1834
+ f"api/v1/beta/spreadsheet/jobs/{spreadsheet_job_id}/tables/{table_id}/result",
1835
+ ),
1836
+ params=remove_none_from_dict(
1837
+ {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
1838
+ ),
1839
+ headers=self._client_wrapper.get_headers(),
1840
+ timeout=60,
1841
+ )
1842
+ if 200 <= _response.status_code < 300:
1843
+ return pydantic.parse_obj_as(PresignedUrl, _response.json()) # type: ignore
1844
+ if _response.status_code == 422:
1845
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1846
+ try:
1847
+ _response_json = _response.json()
1848
+ except JSONDecodeError:
1849
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1850
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1851
+
1852
+
1853
+ class AsyncBetaClient:
1854
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
1855
+ self._client_wrapper = client_wrapper
1856
+
1857
+ async def list_api_keys(
1858
+ self,
1859
+ *,
1860
+ page_size: typing.Optional[int] = None,
1861
+ page_token: typing.Optional[str] = None,
1862
+ name: typing.Optional[str] = None,
1863
+ project_id: typing.Optional[str] = None,
1864
+ key_type: typing.Optional[ApiKeyType] = None,
1865
+ ) -> ApiKeyQueryResponse:
1866
+ """
1867
+ List API keys.
1868
+
1869
+ If project_id is provided, validates user has access to that project.
1870
+ If project_id is not provided, scopes results to the current user.
1871
+
1872
+ Args:
1873
+ user: Current user
1874
+ db: Database session
1875
+ page_size: Number of items per page
1876
+ page_token: Token for pagination
1877
+ name: Filter by API key name
1878
+ project_id: Filter by project ID
1879
+ key_type: Filter by key type
1880
+
1881
+ Returns:
1882
+ Paginated response with API keys
1883
+
1884
+ Parameters:
1885
+ - page_size: typing.Optional[int].
1886
+
1887
+ - page_token: typing.Optional[str].
1888
+
1889
+ - name: typing.Optional[str].
1890
+
1891
+ - project_id: typing.Optional[str].
1892
+
1893
+ - key_type: typing.Optional[ApiKeyType].
1894
+ ---
1895
+ from llama_cloud import ApiKeyType
1896
+ from llama_cloud.client import AsyncLlamaCloud
1897
+
1898
+ client = AsyncLlamaCloud(
1899
+ token="YOUR_TOKEN",
1900
+ )
1901
+ await client.beta.list_api_keys(
1902
+ key_type=ApiKeyType.USER,
1903
+ )
1904
+ """
1905
+ _response = await self._client_wrapper.httpx_client.request(
1906
+ "GET",
1907
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/api-keys"),
1908
+ params=remove_none_from_dict(
1909
+ {
1910
+ "page_size": page_size,
1911
+ "page_token": page_token,
1912
+ "name": name,
1913
+ "project_id": project_id,
1914
+ "key_type": key_type,
1915
+ }
1916
+ ),
1917
+ headers=self._client_wrapper.get_headers(),
1918
+ timeout=60,
1919
+ )
1920
+ if 200 <= _response.status_code < 300:
1921
+ return pydantic.parse_obj_as(ApiKeyQueryResponse, _response.json()) # type: ignore
1922
+ if _response.status_code == 422:
1923
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1924
+ try:
1925
+ _response_json = _response.json()
1926
+ except JSONDecodeError:
1927
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1928
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1929
+
1930
+ async def create_api_key(
1931
+ self,
1932
+ *,
1933
+ name: typing.Optional[str] = OMIT,
1934
+ project_id: typing.Optional[str] = OMIT,
1935
+ key_type: typing.Optional[ApiKeyType] = OMIT,
1936
+ ) -> ApiKey:
1937
+ """
1938
+ Create a new API key.
1939
+
1940
+ If project_id is specified, validates user has admin permissions for that project.
1941
+
1942
+ Args:
1943
+ api_key_create: API key creation data
1944
+ user: Current user
1945
+ db: Database session
1946
+
1947
+ Returns:
1948
+ The created API key with the secret key visible in redacted_api_key field
1949
+
1950
+ Parameters:
1951
+ - name: typing.Optional[str].
1952
+
1953
+ - project_id: typing.Optional[str].
1954
+
1955
+ - key_type: typing.Optional[ApiKeyType].
1956
+ ---
1957
+ from llama_cloud import ApiKeyType
1958
+ from llama_cloud.client import AsyncLlamaCloud
1959
+
1960
+ client = AsyncLlamaCloud(
1961
+ token="YOUR_TOKEN",
1962
+ )
1963
+ await client.beta.create_api_key(
1964
+ key_type=ApiKeyType.USER,
1965
+ )
1966
+ """
1967
+ _request: typing.Dict[str, typing.Any] = {}
1968
+ if name is not OMIT:
1969
+ _request["name"] = name
1970
+ if project_id is not OMIT:
1971
+ _request["project_id"] = project_id
1972
+ if key_type is not OMIT:
1973
+ _request["key_type"] = key_type
1974
+ _response = await self._client_wrapper.httpx_client.request(
1975
+ "POST",
1976
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/api-keys"),
1977
+ json=jsonable_encoder(_request),
1978
+ headers=self._client_wrapper.get_headers(),
1979
+ timeout=60,
1980
+ )
1981
+ if 200 <= _response.status_code < 300:
1982
+ return pydantic.parse_obj_as(ApiKey, _response.json()) # type: ignore
1983
+ if _response.status_code == 422:
1984
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
1985
+ try:
1986
+ _response_json = _response.json()
1987
+ except JSONDecodeError:
1988
+ raise ApiError(status_code=_response.status_code, body=_response.text)
1989
+ raise ApiError(status_code=_response.status_code, body=_response_json)
1990
+
1991
+ async def get_api_key(self, api_key_id: str) -> ApiKey:
1992
+ """
1993
+ Get an API key by ID.
1994
+
1995
+ Args:
1996
+ api_key_id: The ID of the API key
1997
+ user: Current user
1998
+ db: Database session
1999
+
2000
+ Returns:
2001
+ The API key
2002
+
2003
+ Parameters:
2004
+ - api_key_id: str.
2005
+ ---
2006
+ from llama_cloud.client import AsyncLlamaCloud
2007
+
2008
+ client = AsyncLlamaCloud(
2009
+ token="YOUR_TOKEN",
2010
+ )
2011
+ await client.beta.get_api_key(
2012
+ api_key_id="string",
2013
+ )
2014
+ """
2015
+ _response = await self._client_wrapper.httpx_client.request(
2016
+ "GET",
2017
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/api-keys/{api_key_id}"),
2018
+ headers=self._client_wrapper.get_headers(),
2019
+ timeout=60,
2020
+ )
2021
+ if 200 <= _response.status_code < 300:
2022
+ return pydantic.parse_obj_as(ApiKey, _response.json()) # type: ignore
2023
+ if _response.status_code == 422:
2024
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2025
+ try:
2026
+ _response_json = _response.json()
2027
+ except JSONDecodeError:
2028
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2029
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2030
+
2031
+ async def delete_api_key(self, api_key_id: str) -> None:
2032
+ """
2033
+ Delete an API key.
2034
+
2035
+ If the API key belongs to a project, validates user has admin permissions for that project.
2036
+ If the API key has no project, validates it belongs to the current user.
2037
+
2038
+ Args:
2039
+ api_key_id: The ID of the API key to delete
2040
+ user: Current user
2041
+ db: Database session
2042
+
2043
+ Parameters:
2044
+ - api_key_id: str.
2045
+ ---
2046
+ from llama_cloud.client import AsyncLlamaCloud
2047
+
2048
+ client = AsyncLlamaCloud(
2049
+ token="YOUR_TOKEN",
2050
+ )
2051
+ await client.beta.delete_api_key(
2052
+ api_key_id="string",
2053
+ )
2054
+ """
2055
+ _response = await self._client_wrapper.httpx_client.request(
2056
+ "DELETE",
2057
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/api-keys/{api_key_id}"),
2058
+ headers=self._client_wrapper.get_headers(),
2059
+ timeout=60,
2060
+ )
2061
+ if 200 <= _response.status_code < 300:
2062
+ return
2063
+ if _response.status_code == 422:
2064
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2065
+ try:
2066
+ _response_json = _response.json()
2067
+ except JSONDecodeError:
2068
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2069
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2070
+
2071
+ async def list_batches(
2072
+ self,
2073
+ *,
2074
+ limit: typing.Optional[int] = None,
2075
+ offset: typing.Optional[int] = None,
2076
+ project_id: typing.Optional[str] = None,
2077
+ organization_id: typing.Optional[str] = None,
2078
+ ) -> BatchPaginatedList:
2079
+ """
2080
+ Parameters:
2081
+ - limit: typing.Optional[int].
2082
+
2083
+ - offset: typing.Optional[int].
2084
+
2085
+ - project_id: typing.Optional[str].
2086
+
2087
+ - organization_id: typing.Optional[str].
2088
+ ---
2089
+ from llama_cloud.client import AsyncLlamaCloud
2090
+
2091
+ client = AsyncLlamaCloud(
2092
+ token="YOUR_TOKEN",
2093
+ )
2094
+ await client.beta.list_batches()
2095
+ """
2096
+ _response = await self._client_wrapper.httpx_client.request(
2097
+ "GET",
2098
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/batches"),
2099
+ params=remove_none_from_dict(
2100
+ {"limit": limit, "offset": offset, "project_id": project_id, "organization_id": organization_id}
2101
+ ),
2102
+ headers=self._client_wrapper.get_headers(),
2103
+ timeout=60,
2104
+ )
2105
+ if 200 <= _response.status_code < 300:
2106
+ return pydantic.parse_obj_as(BatchPaginatedList, _response.json()) # type: ignore
2107
+ if _response.status_code == 422:
2108
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2109
+ try:
2110
+ _response_json = _response.json()
2111
+ except JSONDecodeError:
2112
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2113
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2114
+
2115
+ async def create_batch(
2116
+ self,
2117
+ *,
2118
+ organization_id: typing.Optional[str] = None,
2119
+ project_id: typing.Optional[str] = None,
2120
+ tool: str,
2121
+ tool_data: typing.Optional[LlamaParseParameters] = OMIT,
2122
+ input_type: str,
2123
+ input_id: str,
2124
+ output_type: typing.Optional[str] = OMIT,
2125
+ output_id: typing.Optional[str] = OMIT,
2126
+ batch_create_project_id: str,
2127
+ external_id: str,
2128
+ completion_window: typing.Optional[int] = OMIT,
2129
+ ) -> Batch:
2130
+ """
2131
+ Parameters:
2132
+ - organization_id: typing.Optional[str].
2133
+
2134
+ - project_id: typing.Optional[str].
2135
+
2136
+ - tool: str. The tool to be used for all requests in the batch.
2137
+
2138
+ - tool_data: typing.Optional[LlamaParseParameters].
1428
2139
 
1429
2140
  - input_type: str. The type of input file. Currently only 'datasource' is supported.
1430
2141
 
@@ -1661,7 +2372,7 @@ class AsyncBetaClient:
1661
2372
  *,
1662
2373
  project_id: typing.Optional[str] = None,
1663
2374
  organization_id: typing.Optional[str] = None,
1664
- agent_slug: str,
2375
+ deployment_name: str,
1665
2376
  collection: typing.Optional[str] = OMIT,
1666
2377
  data: typing.Dict[str, typing.Any],
1667
2378
  ) -> AgentData:
@@ -1673,7 +2384,7 @@ class AsyncBetaClient:
1673
2384
 
1674
2385
  - organization_id: typing.Optional[str].
1675
2386
 
1676
- - agent_slug: str.
2387
+ - deployment_name: str.
1677
2388
 
1678
2389
  - collection: typing.Optional[str].
1679
2390
 
@@ -1685,11 +2396,11 @@ class AsyncBetaClient:
1685
2396
  token="YOUR_TOKEN",
1686
2397
  )
1687
2398
  await client.beta.create_agent_data(
1688
- agent_slug="string",
2399
+ deployment_name="string",
1689
2400
  data={"string": {}},
1690
2401
  )
1691
2402
  """
1692
- _request: typing.Dict[str, typing.Any] = {"agent_slug": agent_slug, "data": data}
2403
+ _request: typing.Dict[str, typing.Any] = {"deployment_name": deployment_name, "data": data}
1693
2404
  if collection is not OMIT:
1694
2405
  _request["collection"] = collection
1695
2406
  _response = await self._client_wrapper.httpx_client.request(
@@ -1719,7 +2430,7 @@ class AsyncBetaClient:
1719
2430
  page_token: typing.Optional[str] = OMIT,
1720
2431
  filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
1721
2432
  order_by: typing.Optional[str] = OMIT,
1722
- agent_slug: str,
2433
+ deployment_name: str,
1723
2434
  collection: typing.Optional[str] = OMIT,
1724
2435
  include_total: typing.Optional[bool] = OMIT,
1725
2436
  offset: typing.Optional[int] = OMIT,
@@ -1740,7 +2451,7 @@ class AsyncBetaClient:
1740
2451
 
1741
2452
  - order_by: typing.Optional[str].
1742
2453
 
1743
- - agent_slug: str. The agent deployment's agent_slug to search within
2454
+ - deployment_name: str. The agent deployment's name to search within
1744
2455
 
1745
2456
  - collection: typing.Optional[str]. The logical agent data collection to search within
1746
2457
 
@@ -1754,10 +2465,10 @@ class AsyncBetaClient:
1754
2465
  token="YOUR_TOKEN",
1755
2466
  )
1756
2467
  await client.beta.search_agent_data_api_v_1_beta_agent_data_search_post(
1757
- agent_slug="string",
2468
+ deployment_name="string",
1758
2469
  )
1759
2470
  """
1760
- _request: typing.Dict[str, typing.Any] = {"agent_slug": agent_slug}
2471
+ _request: typing.Dict[str, typing.Any] = {"deployment_name": deployment_name}
1761
2472
  if page_size is not OMIT:
1762
2473
  _request["page_size"] = page_size
1763
2474
  if page_token is not OMIT:
@@ -1799,7 +2510,7 @@ class AsyncBetaClient:
1799
2510
  page_token: typing.Optional[str] = OMIT,
1800
2511
  filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
1801
2512
  order_by: typing.Optional[str] = OMIT,
1802
- agent_slug: str,
2513
+ deployment_name: str,
1803
2514
  collection: typing.Optional[str] = OMIT,
1804
2515
  group_by: typing.Optional[typing.List[str]] = OMIT,
1805
2516
  count: typing.Optional[bool] = OMIT,
@@ -1822,7 +2533,7 @@ class AsyncBetaClient:
1822
2533
 
1823
2534
  - order_by: typing.Optional[str].
1824
2535
 
1825
- - agent_slug: str. The agent deployment's agent_slug to aggregate data for
2536
+ - deployment_name: str. The agent deployment's name to aggregate data for
1826
2537
 
1827
2538
  - collection: typing.Optional[str]. The logical agent data collection to aggregate data for
1828
2539
 
@@ -1840,10 +2551,10 @@ class AsyncBetaClient:
1840
2551
  token="YOUR_TOKEN",
1841
2552
  )
1842
2553
  await client.beta.aggregate_agent_data_api_v_1_beta_agent_data_aggregate_post(
1843
- agent_slug="string",
2554
+ deployment_name="string",
1844
2555
  )
1845
2556
  """
1846
- _request: typing.Dict[str, typing.Any] = {"agent_slug": agent_slug}
2557
+ _request: typing.Dict[str, typing.Any] = {"deployment_name": deployment_name}
1847
2558
  if page_size is not OMIT:
1848
2559
  _request["page_size"] = page_size
1849
2560
  if page_token is not OMIT:
@@ -1880,6 +2591,61 @@ class AsyncBetaClient:
1880
2591
  raise ApiError(status_code=_response.status_code, body=_response.text)
1881
2592
  raise ApiError(status_code=_response.status_code, body=_response_json)
1882
2593
 
2594
+ async def delete_agent_data_by_query_api_v_1_beta_agent_data_delete_post(
2595
+ self,
2596
+ *,
2597
+ project_id: typing.Optional[str] = None,
2598
+ organization_id: typing.Optional[str] = None,
2599
+ deployment_name: str,
2600
+ collection: typing.Optional[str] = OMIT,
2601
+ filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
2602
+ ) -> DeleteResponse:
2603
+ """
2604
+ Bulk delete agent data by query (deployment_name, collection, optional filters).
2605
+
2606
+ Parameters:
2607
+ - project_id: typing.Optional[str].
2608
+
2609
+ - organization_id: typing.Optional[str].
2610
+
2611
+ - deployment_name: str. The agent deployment's name to delete data for
2612
+
2613
+ - collection: typing.Optional[str]. The logical agent data collection to delete from
2614
+
2615
+ - filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]].
2616
+ ---
2617
+ from llama_cloud.client import AsyncLlamaCloud
2618
+
2619
+ client = AsyncLlamaCloud(
2620
+ token="YOUR_TOKEN",
2621
+ )
2622
+ await client.beta.delete_agent_data_by_query_api_v_1_beta_agent_data_delete_post(
2623
+ deployment_name="string",
2624
+ )
2625
+ """
2626
+ _request: typing.Dict[str, typing.Any] = {"deployment_name": deployment_name}
2627
+ if collection is not OMIT:
2628
+ _request["collection"] = collection
2629
+ if filter is not OMIT:
2630
+ _request["filter"] = filter
2631
+ _response = await self._client_wrapper.httpx_client.request(
2632
+ "POST",
2633
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:delete"),
2634
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
2635
+ json=jsonable_encoder(_request),
2636
+ headers=self._client_wrapper.get_headers(),
2637
+ timeout=60,
2638
+ )
2639
+ if 200 <= _response.status_code < 300:
2640
+ return pydantic.parse_obj_as(DeleteResponse, _response.json()) # type: ignore
2641
+ if _response.status_code == 422:
2642
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
2643
+ try:
2644
+ _response_json = _response.json()
2645
+ except JSONDecodeError:
2646
+ raise ApiError(status_code=_response.status_code, body=_response.text)
2647
+ raise ApiError(status_code=_response.status_code, body=_response_json)
2648
+
1883
2649
  async def list_quota_configurations(
1884
2650
  self,
1885
2651
  *,
@@ -2662,3 +3428,223 @@ class AsyncBetaClient:
2662
3428
  except JSONDecodeError:
2663
3429
  raise ApiError(status_code=_response.status_code, body=_response.text)
2664
3430
  raise ApiError(status_code=_response.status_code, body=_response_json)
3431
+
3432
+ async def list_spreadsheet_jobs(
3433
+ self,
3434
+ *,
3435
+ include_results: typing.Optional[bool] = None,
3436
+ project_id: typing.Optional[str] = None,
3437
+ organization_id: typing.Optional[str] = None,
3438
+ page_size: typing.Optional[int] = None,
3439
+ page_token: typing.Optional[str] = None,
3440
+ ) -> PaginatedResponseSpreadsheetJob:
3441
+ """
3442
+ List spreadsheet parsing jobs.
3443
+ Experimental: This endpoint is not yet ready for production use and is subject to change at any time.
3444
+
3445
+ Parameters:
3446
+ - include_results: typing.Optional[bool].
3447
+
3448
+ - project_id: typing.Optional[str].
3449
+
3450
+ - organization_id: typing.Optional[str].
3451
+
3452
+ - page_size: typing.Optional[int].
3453
+
3454
+ - page_token: typing.Optional[str].
3455
+ ---
3456
+ from llama_cloud.client import AsyncLlamaCloud
3457
+
3458
+ client = AsyncLlamaCloud(
3459
+ token="YOUR_TOKEN",
3460
+ )
3461
+ await client.beta.list_spreadsheet_jobs()
3462
+ """
3463
+ _response = await self._client_wrapper.httpx_client.request(
3464
+ "GET",
3465
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/spreadsheet/jobs"),
3466
+ params=remove_none_from_dict(
3467
+ {
3468
+ "include_results": include_results,
3469
+ "project_id": project_id,
3470
+ "organization_id": organization_id,
3471
+ "page_size": page_size,
3472
+ "page_token": page_token,
3473
+ }
3474
+ ),
3475
+ headers=self._client_wrapper.get_headers(),
3476
+ timeout=60,
3477
+ )
3478
+ if 200 <= _response.status_code < 300:
3479
+ return pydantic.parse_obj_as(PaginatedResponseSpreadsheetJob, _response.json()) # type: ignore
3480
+ if _response.status_code == 422:
3481
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
3482
+ try:
3483
+ _response_json = _response.json()
3484
+ except JSONDecodeError:
3485
+ raise ApiError(status_code=_response.status_code, body=_response.text)
3486
+ raise ApiError(status_code=_response.status_code, body=_response_json)
3487
+
3488
+ async def create_spreadsheet_job(
3489
+ self,
3490
+ *,
3491
+ project_id: typing.Optional[str] = None,
3492
+ organization_id: typing.Optional[str] = None,
3493
+ file_id: str,
3494
+ config: typing.Optional[SpreadsheetParsingConfig] = OMIT,
3495
+ ) -> SpreadsheetJob:
3496
+ """
3497
+ Create a spreadsheet parsing job.
3498
+ Experimental: This endpoint is not yet ready for production use and is subject to change at any time.
3499
+
3500
+ Parameters:
3501
+ - project_id: typing.Optional[str].
3502
+
3503
+ - organization_id: typing.Optional[str].
3504
+
3505
+ - file_id: str. The ID of the file to parse
3506
+
3507
+ - config: typing.Optional[SpreadsheetParsingConfig]. Configuration for the parsing job
3508
+ ---
3509
+ from llama_cloud import SpreadsheetParsingConfig
3510
+ from llama_cloud.client import AsyncLlamaCloud
3511
+
3512
+ client = AsyncLlamaCloud(
3513
+ token="YOUR_TOKEN",
3514
+ )
3515
+ await client.beta.create_spreadsheet_job(
3516
+ file_id="string",
3517
+ config=SpreadsheetParsingConfig(),
3518
+ )
3519
+ """
3520
+ _request: typing.Dict[str, typing.Any] = {"file_id": file_id}
3521
+ if config is not OMIT:
3522
+ _request["config"] = config
3523
+ _response = await self._client_wrapper.httpx_client.request(
3524
+ "POST",
3525
+ urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/spreadsheet/jobs"),
3526
+ params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
3527
+ json=jsonable_encoder(_request),
3528
+ headers=self._client_wrapper.get_headers(),
3529
+ timeout=60,
3530
+ )
3531
+ if 200 <= _response.status_code < 300:
3532
+ return pydantic.parse_obj_as(SpreadsheetJob, _response.json()) # type: ignore
3533
+ if _response.status_code == 422:
3534
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
3535
+ try:
3536
+ _response_json = _response.json()
3537
+ except JSONDecodeError:
3538
+ raise ApiError(status_code=_response.status_code, body=_response.text)
3539
+ raise ApiError(status_code=_response.status_code, body=_response_json)
3540
+
3541
+ async def get_spreadsheet_job(
3542
+ self,
3543
+ spreadsheet_job_id: str,
3544
+ *,
3545
+ include_results: typing.Optional[bool] = None,
3546
+ project_id: typing.Optional[str] = None,
3547
+ organization_id: typing.Optional[str] = None,
3548
+ ) -> SpreadsheetJob:
3549
+ """
3550
+ Get a spreadsheet parsing job.
3551
+
3552
+ When include_results=True (default), the response will include extracted tables and results
3553
+ if the job is complete, eliminating the need for a separate /results call.
3554
+
3555
+ Experimental: This endpoint is not yet ready for production use and is subject to change at any time.
3556
+
3557
+ Parameters:
3558
+ - spreadsheet_job_id: str.
3559
+
3560
+ - include_results: typing.Optional[bool].
3561
+
3562
+ - project_id: typing.Optional[str].
3563
+
3564
+ - organization_id: typing.Optional[str].
3565
+ ---
3566
+ from llama_cloud.client import AsyncLlamaCloud
3567
+
3568
+ client = AsyncLlamaCloud(
3569
+ token="YOUR_TOKEN",
3570
+ )
3571
+ await client.beta.get_spreadsheet_job(
3572
+ spreadsheet_job_id="string",
3573
+ )
3574
+ """
3575
+ _response = await self._client_wrapper.httpx_client.request(
3576
+ "GET",
3577
+ urllib.parse.urljoin(
3578
+ f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/spreadsheet/jobs/{spreadsheet_job_id}"
3579
+ ),
3580
+ params=remove_none_from_dict(
3581
+ {"include_results": include_results, "project_id": project_id, "organization_id": organization_id}
3582
+ ),
3583
+ headers=self._client_wrapper.get_headers(),
3584
+ timeout=60,
3585
+ )
3586
+ if 200 <= _response.status_code < 300:
3587
+ return pydantic.parse_obj_as(SpreadsheetJob, _response.json()) # type: ignore
3588
+ if _response.status_code == 422:
3589
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
3590
+ try:
3591
+ _response_json = _response.json()
3592
+ except JSONDecodeError:
3593
+ raise ApiError(status_code=_response.status_code, body=_response.text)
3594
+ raise ApiError(status_code=_response.status_code, body=_response_json)
3595
+
3596
+ async def get_table_download_presigned_url(
3597
+ self,
3598
+ spreadsheet_job_id: str,
3599
+ table_id: int,
3600
+ *,
3601
+ expires_at_seconds: typing.Optional[int] = None,
3602
+ project_id: typing.Optional[str] = None,
3603
+ organization_id: typing.Optional[str] = None,
3604
+ ) -> PresignedUrl:
3605
+ """
3606
+ Generate a presigned URL to download a specific extracted table.
3607
+ Experimental: This endpoint is not yet ready for production use and is subject to change at any time.
3608
+
3609
+ Parameters:
3610
+ - spreadsheet_job_id: str.
3611
+
3612
+ - table_id: int.
3613
+
3614
+ - expires_at_seconds: typing.Optional[int].
3615
+
3616
+ - project_id: typing.Optional[str].
3617
+
3618
+ - organization_id: typing.Optional[str].
3619
+ ---
3620
+ from llama_cloud.client import AsyncLlamaCloud
3621
+
3622
+ client = AsyncLlamaCloud(
3623
+ token="YOUR_TOKEN",
3624
+ )
3625
+ await client.beta.get_table_download_presigned_url(
3626
+ spreadsheet_job_id="string",
3627
+ table_id=1,
3628
+ )
3629
+ """
3630
+ _response = await self._client_wrapper.httpx_client.request(
3631
+ "GET",
3632
+ urllib.parse.urljoin(
3633
+ f"{self._client_wrapper.get_base_url()}/",
3634
+ f"api/v1/beta/spreadsheet/jobs/{spreadsheet_job_id}/tables/{table_id}/result",
3635
+ ),
3636
+ params=remove_none_from_dict(
3637
+ {"expires_at_seconds": expires_at_seconds, "project_id": project_id, "organization_id": organization_id}
3638
+ ),
3639
+ headers=self._client_wrapper.get_headers(),
3640
+ timeout=60,
3641
+ )
3642
+ if 200 <= _response.status_code < 300:
3643
+ return pydantic.parse_obj_as(PresignedUrl, _response.json()) # type: ignore
3644
+ if _response.status_code == 422:
3645
+ raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
3646
+ try:
3647
+ _response_json = _response.json()
3648
+ except JSONDecodeError:
3649
+ raise ApiError(status_code=_response.status_code, body=_response.text)
3650
+ raise ApiError(status_code=_response.status_code, body=_response_json)