llama-cloud 0.1.40__py3-none-any.whl → 0.1.42__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of llama-cloud might be problematic. Click here for more details.
- llama_cloud/__init__.py +18 -72
- llama_cloud/client.py +2 -5
- llama_cloud/resources/__init__.py +0 -4
- llama_cloud/resources/alpha/client.py +14 -30
- llama_cloud/resources/beta/client.py +1791 -344
- llama_cloud/resources/llama_extract/client.py +48 -0
- llama_cloud/resources/organizations/client.py +18 -4
- llama_cloud/resources/parsing/client.py +56 -0
- llama_cloud/resources/pipelines/client.py +164 -0
- llama_cloud/types/__init__.py +18 -72
- llama_cloud/types/agent_data.py +1 -1
- llama_cloud/types/agent_deployment_summary.py +1 -2
- llama_cloud/types/{report_create_response.py → api_key.py} +14 -2
- llama_cloud/types/{edit_suggestion.py → api_key_query_response.py} +6 -6
- llama_cloud/types/api_key_type.py +17 -0
- llama_cloud/types/{src_app_schema_chat_chat_message.py → chat_message.py} +1 -1
- llama_cloud/types/extract_config.py +8 -2
- llama_cloud/types/extract_models.py +28 -28
- llama_cloud/types/legacy_parse_job_config.py +3 -0
- llama_cloud/types/llama_extract_mode_availability.py +4 -3
- llama_cloud/types/llama_extract_settings.py +1 -1
- llama_cloud/types/llama_parse_parameters.py +7 -0
- llama_cloud/types/organization.py +1 -0
- llama_cloud/types/{progress_event.py → parse_configuration.py} +12 -12
- llama_cloud/types/{llama_index_core_base_llms_types_chat_message.py → parse_configuration_create.py} +9 -7
- llama_cloud/types/{report_update_event.py → parse_configuration_filter.py} +8 -6
- llama_cloud/types/{report_state_event.py → parse_configuration_query_response.py} +6 -6
- llama_cloud/types/parse_job_config.py +7 -0
- llama_cloud/types/pipeline_create.py +1 -1
- llama_cloud/types/playground_session.py +2 -2
- llama_cloud/types/public_model_name.py +97 -0
- llama_cloud/types/quota_configuration_configuration_type.py +4 -0
- {llama_cloud-0.1.40.dist-info → llama_cloud-0.1.42.dist-info}/METADATA +1 -1
- {llama_cloud-0.1.40.dist-info → llama_cloud-0.1.42.dist-info}/RECORD +36 -58
- {llama_cloud-0.1.40.dist-info → llama_cloud-0.1.42.dist-info}/WHEEL +1 -1
- llama_cloud/resources/reports/__init__.py +0 -5
- llama_cloud/resources/reports/client.py +0 -1230
- llama_cloud/resources/reports/types/__init__.py +0 -7
- llama_cloud/resources/reports/types/update_report_plan_api_v_1_reports_report_id_plan_patch_request_action.py +0 -25
- llama_cloud/types/audio_block.py +0 -34
- llama_cloud/types/document_block.py +0 -35
- llama_cloud/types/edit_suggestion_blocks_item.py +0 -8
- llama_cloud/types/image_block.py +0 -35
- llama_cloud/types/llama_index_core_base_llms_types_chat_message_blocks_item.py +0 -56
- llama_cloud/types/paginated_report_response.py +0 -35
- llama_cloud/types/progress_event_status.py +0 -33
- llama_cloud/types/report.py +0 -33
- llama_cloud/types/report_block.py +0 -35
- llama_cloud/types/report_block_dependency.py +0 -29
- llama_cloud/types/report_event_item.py +0 -40
- llama_cloud/types/report_event_item_event_data.py +0 -45
- llama_cloud/types/report_event_type.py +0 -37
- llama_cloud/types/report_metadata.py +0 -43
- llama_cloud/types/report_plan.py +0 -36
- llama_cloud/types/report_plan_block.py +0 -36
- llama_cloud/types/report_query.py +0 -33
- llama_cloud/types/report_response.py +0 -41
- llama_cloud/types/report_state.py +0 -37
- llama_cloud/types/text_block.py +0 -31
- {llama_cloud-0.1.40.dist-info → llama_cloud-0.1.42.dist-info}/LICENSE +0 -0
|
@@ -12,6 +12,9 @@ from ...core.jsonable_encoder import jsonable_encoder
|
|
|
12
12
|
from ...core.remove_none_from_dict import remove_none_from_dict
|
|
13
13
|
from ...errors.unprocessable_entity_error import UnprocessableEntityError
|
|
14
14
|
from ...types.agent_data import AgentData
|
|
15
|
+
from ...types.api_key import ApiKey
|
|
16
|
+
from ...types.api_key_query_response import ApiKeyQueryResponse
|
|
17
|
+
from ...types.api_key_type import ApiKeyType
|
|
15
18
|
from ...types.batch import Batch
|
|
16
19
|
from ...types.batch_paginated_list import BatchPaginatedList
|
|
17
20
|
from ...types.batch_public_output import BatchPublicOutput
|
|
@@ -25,6 +28,10 @@ from ...types.llama_parse_parameters import LlamaParseParameters
|
|
|
25
28
|
from ...types.paginated_response_agent_data import PaginatedResponseAgentData
|
|
26
29
|
from ...types.paginated_response_aggregate_group import PaginatedResponseAggregateGroup
|
|
27
30
|
from ...types.paginated_response_quota_configuration import PaginatedResponseQuotaConfiguration
|
|
31
|
+
from ...types.parse_configuration import ParseConfiguration
|
|
32
|
+
from ...types.parse_configuration_create import ParseConfigurationCreate
|
|
33
|
+
from ...types.parse_configuration_filter import ParseConfigurationFilter
|
|
34
|
+
from ...types.parse_configuration_query_response import ParseConfigurationQueryResponse
|
|
28
35
|
|
|
29
36
|
try:
|
|
30
37
|
import pydantic
|
|
@@ -42,6 +49,220 @@ class BetaClient:
|
|
|
42
49
|
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
43
50
|
self._client_wrapper = client_wrapper
|
|
44
51
|
|
|
52
|
+
def list_api_keys(
|
|
53
|
+
self,
|
|
54
|
+
*,
|
|
55
|
+
page_size: typing.Optional[int] = None,
|
|
56
|
+
page_token: typing.Optional[str] = None,
|
|
57
|
+
name: typing.Optional[str] = None,
|
|
58
|
+
project_id: typing.Optional[str] = None,
|
|
59
|
+
key_type: typing.Optional[ApiKeyType] = None,
|
|
60
|
+
) -> ApiKeyQueryResponse:
|
|
61
|
+
"""
|
|
62
|
+
List API keys.
|
|
63
|
+
|
|
64
|
+
If project_id is provided, validates user has access to that project.
|
|
65
|
+
If project_id is not provided, scopes results to the current user.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
user: Current user
|
|
69
|
+
db: Database session
|
|
70
|
+
page_size: Number of items per page
|
|
71
|
+
page_token: Token for pagination
|
|
72
|
+
name: Filter by API key name
|
|
73
|
+
project_id: Filter by project ID
|
|
74
|
+
key_type: Filter by key type
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
Paginated response with API keys
|
|
78
|
+
|
|
79
|
+
Parameters:
|
|
80
|
+
- page_size: typing.Optional[int].
|
|
81
|
+
|
|
82
|
+
- page_token: typing.Optional[str].
|
|
83
|
+
|
|
84
|
+
- name: typing.Optional[str].
|
|
85
|
+
|
|
86
|
+
- project_id: typing.Optional[str].
|
|
87
|
+
|
|
88
|
+
- key_type: typing.Optional[ApiKeyType].
|
|
89
|
+
---
|
|
90
|
+
from llama_cloud import ApiKeyType
|
|
91
|
+
from llama_cloud.client import LlamaCloud
|
|
92
|
+
|
|
93
|
+
client = LlamaCloud(
|
|
94
|
+
token="YOUR_TOKEN",
|
|
95
|
+
)
|
|
96
|
+
client.beta.list_api_keys(
|
|
97
|
+
key_type=ApiKeyType.USER,
|
|
98
|
+
)
|
|
99
|
+
"""
|
|
100
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
101
|
+
"GET",
|
|
102
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/api-keys"),
|
|
103
|
+
params=remove_none_from_dict(
|
|
104
|
+
{
|
|
105
|
+
"page_size": page_size,
|
|
106
|
+
"page_token": page_token,
|
|
107
|
+
"name": name,
|
|
108
|
+
"project_id": project_id,
|
|
109
|
+
"key_type": key_type,
|
|
110
|
+
}
|
|
111
|
+
),
|
|
112
|
+
headers=self._client_wrapper.get_headers(),
|
|
113
|
+
timeout=60,
|
|
114
|
+
)
|
|
115
|
+
if 200 <= _response.status_code < 300:
|
|
116
|
+
return pydantic.parse_obj_as(ApiKeyQueryResponse, _response.json()) # type: ignore
|
|
117
|
+
if _response.status_code == 422:
|
|
118
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
119
|
+
try:
|
|
120
|
+
_response_json = _response.json()
|
|
121
|
+
except JSONDecodeError:
|
|
122
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
123
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
124
|
+
|
|
125
|
+
def create_api_key(
|
|
126
|
+
self,
|
|
127
|
+
*,
|
|
128
|
+
name: typing.Optional[str] = OMIT,
|
|
129
|
+
project_id: typing.Optional[str] = OMIT,
|
|
130
|
+
key_type: typing.Optional[ApiKeyType] = OMIT,
|
|
131
|
+
) -> ApiKey:
|
|
132
|
+
"""
|
|
133
|
+
Create a new API key.
|
|
134
|
+
|
|
135
|
+
If project_id is specified, validates user has admin permissions for that project.
|
|
136
|
+
|
|
137
|
+
Args:
|
|
138
|
+
api_key_create: API key creation data
|
|
139
|
+
user: Current user
|
|
140
|
+
db: Database session
|
|
141
|
+
|
|
142
|
+
Returns:
|
|
143
|
+
The created API key with the secret key visible in redacted_api_key field
|
|
144
|
+
|
|
145
|
+
Parameters:
|
|
146
|
+
- name: typing.Optional[str].
|
|
147
|
+
|
|
148
|
+
- project_id: typing.Optional[str].
|
|
149
|
+
|
|
150
|
+
- key_type: typing.Optional[ApiKeyType].
|
|
151
|
+
---
|
|
152
|
+
from llama_cloud import ApiKeyType
|
|
153
|
+
from llama_cloud.client import LlamaCloud
|
|
154
|
+
|
|
155
|
+
client = LlamaCloud(
|
|
156
|
+
token="YOUR_TOKEN",
|
|
157
|
+
)
|
|
158
|
+
client.beta.create_api_key(
|
|
159
|
+
key_type=ApiKeyType.USER,
|
|
160
|
+
)
|
|
161
|
+
"""
|
|
162
|
+
_request: typing.Dict[str, typing.Any] = {}
|
|
163
|
+
if name is not OMIT:
|
|
164
|
+
_request["name"] = name
|
|
165
|
+
if project_id is not OMIT:
|
|
166
|
+
_request["project_id"] = project_id
|
|
167
|
+
if key_type is not OMIT:
|
|
168
|
+
_request["key_type"] = key_type
|
|
169
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
170
|
+
"POST",
|
|
171
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/api-keys"),
|
|
172
|
+
json=jsonable_encoder(_request),
|
|
173
|
+
headers=self._client_wrapper.get_headers(),
|
|
174
|
+
timeout=60,
|
|
175
|
+
)
|
|
176
|
+
if 200 <= _response.status_code < 300:
|
|
177
|
+
return pydantic.parse_obj_as(ApiKey, _response.json()) # type: ignore
|
|
178
|
+
if _response.status_code == 422:
|
|
179
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
180
|
+
try:
|
|
181
|
+
_response_json = _response.json()
|
|
182
|
+
except JSONDecodeError:
|
|
183
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
184
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
185
|
+
|
|
186
|
+
def get_api_key(self, api_key_id: str) -> ApiKey:
|
|
187
|
+
"""
|
|
188
|
+
Get an API key by ID.
|
|
189
|
+
|
|
190
|
+
Args:
|
|
191
|
+
api_key_id: The ID of the API key
|
|
192
|
+
user: Current user
|
|
193
|
+
db: Database session
|
|
194
|
+
|
|
195
|
+
Returns:
|
|
196
|
+
The API key
|
|
197
|
+
|
|
198
|
+
Parameters:
|
|
199
|
+
- api_key_id: str.
|
|
200
|
+
---
|
|
201
|
+
from llama_cloud.client import LlamaCloud
|
|
202
|
+
|
|
203
|
+
client = LlamaCloud(
|
|
204
|
+
token="YOUR_TOKEN",
|
|
205
|
+
)
|
|
206
|
+
client.beta.get_api_key(
|
|
207
|
+
api_key_id="string",
|
|
208
|
+
)
|
|
209
|
+
"""
|
|
210
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
211
|
+
"GET",
|
|
212
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/api-keys/{api_key_id}"),
|
|
213
|
+
headers=self._client_wrapper.get_headers(),
|
|
214
|
+
timeout=60,
|
|
215
|
+
)
|
|
216
|
+
if 200 <= _response.status_code < 300:
|
|
217
|
+
return pydantic.parse_obj_as(ApiKey, _response.json()) # type: ignore
|
|
218
|
+
if _response.status_code == 422:
|
|
219
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
220
|
+
try:
|
|
221
|
+
_response_json = _response.json()
|
|
222
|
+
except JSONDecodeError:
|
|
223
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
224
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
225
|
+
|
|
226
|
+
def delete_api_key(self, api_key_id: str) -> None:
|
|
227
|
+
"""
|
|
228
|
+
Delete an API key.
|
|
229
|
+
|
|
230
|
+
If the API key belongs to a project, validates user has admin permissions for that project.
|
|
231
|
+
If the API key has no project, validates it belongs to the current user.
|
|
232
|
+
|
|
233
|
+
Args:
|
|
234
|
+
api_key_id: The ID of the API key to delete
|
|
235
|
+
user: Current user
|
|
236
|
+
db: Database session
|
|
237
|
+
|
|
238
|
+
Parameters:
|
|
239
|
+
- api_key_id: str.
|
|
240
|
+
---
|
|
241
|
+
from llama_cloud.client import LlamaCloud
|
|
242
|
+
|
|
243
|
+
client = LlamaCloud(
|
|
244
|
+
token="YOUR_TOKEN",
|
|
245
|
+
)
|
|
246
|
+
client.beta.delete_api_key(
|
|
247
|
+
api_key_id="string",
|
|
248
|
+
)
|
|
249
|
+
"""
|
|
250
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
251
|
+
"DELETE",
|
|
252
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/api-keys/{api_key_id}"),
|
|
253
|
+
headers=self._client_wrapper.get_headers(),
|
|
254
|
+
timeout=60,
|
|
255
|
+
)
|
|
256
|
+
if 200 <= _response.status_code < 300:
|
|
257
|
+
return
|
|
258
|
+
if _response.status_code == 422:
|
|
259
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
260
|
+
try:
|
|
261
|
+
_response_json = _response.json()
|
|
262
|
+
except JSONDecodeError:
|
|
263
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
264
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
265
|
+
|
|
45
266
|
def list_batches(
|
|
46
267
|
self,
|
|
47
268
|
*,
|
|
@@ -346,7 +567,7 @@ class BetaClient:
|
|
|
346
567
|
*,
|
|
347
568
|
project_id: typing.Optional[str] = None,
|
|
348
569
|
organization_id: typing.Optional[str] = None,
|
|
349
|
-
|
|
570
|
+
deployment_name: str,
|
|
350
571
|
collection: typing.Optional[str] = OMIT,
|
|
351
572
|
data: typing.Dict[str, typing.Any],
|
|
352
573
|
) -> AgentData:
|
|
@@ -358,7 +579,7 @@ class BetaClient:
|
|
|
358
579
|
|
|
359
580
|
- organization_id: typing.Optional[str].
|
|
360
581
|
|
|
361
|
-
-
|
|
582
|
+
- deployment_name: str.
|
|
362
583
|
|
|
363
584
|
- collection: typing.Optional[str].
|
|
364
585
|
|
|
@@ -370,11 +591,11 @@ class BetaClient:
|
|
|
370
591
|
token="YOUR_TOKEN",
|
|
371
592
|
)
|
|
372
593
|
client.beta.create_agent_data(
|
|
373
|
-
|
|
594
|
+
deployment_name="string",
|
|
374
595
|
data={"string": {}},
|
|
375
596
|
)
|
|
376
597
|
"""
|
|
377
|
-
_request: typing.Dict[str, typing.Any] = {"
|
|
598
|
+
_request: typing.Dict[str, typing.Any] = {"deployment_name": deployment_name, "data": data}
|
|
378
599
|
if collection is not OMIT:
|
|
379
600
|
_request["collection"] = collection
|
|
380
601
|
_response = self._client_wrapper.httpx_client.request(
|
|
@@ -404,7 +625,7 @@ class BetaClient:
|
|
|
404
625
|
page_token: typing.Optional[str] = OMIT,
|
|
405
626
|
filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
|
|
406
627
|
order_by: typing.Optional[str] = OMIT,
|
|
407
|
-
|
|
628
|
+
deployment_name: str,
|
|
408
629
|
collection: typing.Optional[str] = OMIT,
|
|
409
630
|
include_total: typing.Optional[bool] = OMIT,
|
|
410
631
|
offset: typing.Optional[int] = OMIT,
|
|
@@ -425,7 +646,7 @@ class BetaClient:
|
|
|
425
646
|
|
|
426
647
|
- order_by: typing.Optional[str].
|
|
427
648
|
|
|
428
|
-
-
|
|
649
|
+
- deployment_name: str. The agent deployment's name to search within
|
|
429
650
|
|
|
430
651
|
- collection: typing.Optional[str]. The logical agent data collection to search within
|
|
431
652
|
|
|
@@ -439,10 +660,10 @@ class BetaClient:
|
|
|
439
660
|
token="YOUR_TOKEN",
|
|
440
661
|
)
|
|
441
662
|
client.beta.search_agent_data_api_v_1_beta_agent_data_search_post(
|
|
442
|
-
|
|
663
|
+
deployment_name="string",
|
|
443
664
|
)
|
|
444
665
|
"""
|
|
445
|
-
_request: typing.Dict[str, typing.Any] = {"
|
|
666
|
+
_request: typing.Dict[str, typing.Any] = {"deployment_name": deployment_name}
|
|
446
667
|
if page_size is not OMIT:
|
|
447
668
|
_request["page_size"] = page_size
|
|
448
669
|
if page_token is not OMIT:
|
|
@@ -484,7 +705,7 @@ class BetaClient:
|
|
|
484
705
|
page_token: typing.Optional[str] = OMIT,
|
|
485
706
|
filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
|
|
486
707
|
order_by: typing.Optional[str] = OMIT,
|
|
487
|
-
|
|
708
|
+
deployment_name: str,
|
|
488
709
|
collection: typing.Optional[str] = OMIT,
|
|
489
710
|
group_by: typing.Optional[typing.List[str]] = OMIT,
|
|
490
711
|
count: typing.Optional[bool] = OMIT,
|
|
@@ -507,7 +728,7 @@ class BetaClient:
|
|
|
507
728
|
|
|
508
729
|
- order_by: typing.Optional[str].
|
|
509
730
|
|
|
510
|
-
-
|
|
731
|
+
- deployment_name: str. The agent deployment's name to aggregate data for
|
|
511
732
|
|
|
512
733
|
- collection: typing.Optional[str]. The logical agent data collection to aggregate data for
|
|
513
734
|
|
|
@@ -525,10 +746,10 @@ class BetaClient:
|
|
|
525
746
|
token="YOUR_TOKEN",
|
|
526
747
|
)
|
|
527
748
|
client.beta.aggregate_agent_data_api_v_1_beta_agent_data_aggregate_post(
|
|
528
|
-
|
|
749
|
+
deployment_name="string",
|
|
529
750
|
)
|
|
530
751
|
"""
|
|
531
|
-
_request: typing.Dict[str, typing.Any] = {"
|
|
752
|
+
_request: typing.Dict[str, typing.Any] = {"deployment_name": deployment_name}
|
|
532
753
|
if page_size is not OMIT:
|
|
533
754
|
_request["page_size"] = page_size
|
|
534
755
|
if page_token is not OMIT:
|
|
@@ -842,47 +1063,74 @@ class BetaClient:
|
|
|
842
1063
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
843
1064
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
844
1065
|
|
|
845
|
-
|
|
846
|
-
class AsyncBetaClient:
|
|
847
|
-
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
848
|
-
self._client_wrapper = client_wrapper
|
|
849
|
-
|
|
850
|
-
async def list_batches(
|
|
1066
|
+
def list_parse_configurations(
|
|
851
1067
|
self,
|
|
852
1068
|
*,
|
|
853
|
-
|
|
854
|
-
|
|
1069
|
+
page_size: typing.Optional[int] = None,
|
|
1070
|
+
page_token: typing.Optional[str] = None,
|
|
1071
|
+
name: typing.Optional[str] = None,
|
|
1072
|
+
creator: typing.Optional[str] = None,
|
|
1073
|
+
version: typing.Optional[str] = None,
|
|
855
1074
|
project_id: typing.Optional[str] = None,
|
|
856
1075
|
organization_id: typing.Optional[str] = None,
|
|
857
|
-
) ->
|
|
1076
|
+
) -> ParseConfigurationQueryResponse:
|
|
858
1077
|
"""
|
|
1078
|
+
List parse configurations for the current project.
|
|
1079
|
+
|
|
1080
|
+
Args:
|
|
1081
|
+
project: Validated project from dependency
|
|
1082
|
+
user: Current user
|
|
1083
|
+
db: Database session
|
|
1084
|
+
page_size: Number of items per page
|
|
1085
|
+
page_token: Token for pagination
|
|
1086
|
+
name: Filter by configuration name
|
|
1087
|
+
creator: Filter by creator
|
|
1088
|
+
version: Filter by version
|
|
1089
|
+
|
|
1090
|
+
Returns:
|
|
1091
|
+
Paginated response with parse configurations
|
|
1092
|
+
|
|
859
1093
|
Parameters:
|
|
860
|
-
-
|
|
1094
|
+
- page_size: typing.Optional[int].
|
|
861
1095
|
|
|
862
|
-
-
|
|
1096
|
+
- page_token: typing.Optional[str].
|
|
1097
|
+
|
|
1098
|
+
- name: typing.Optional[str].
|
|
1099
|
+
|
|
1100
|
+
- creator: typing.Optional[str].
|
|
1101
|
+
|
|
1102
|
+
- version: typing.Optional[str].
|
|
863
1103
|
|
|
864
1104
|
- project_id: typing.Optional[str].
|
|
865
1105
|
|
|
866
1106
|
- organization_id: typing.Optional[str].
|
|
867
1107
|
---
|
|
868
|
-
from llama_cloud.client import
|
|
1108
|
+
from llama_cloud.client import LlamaCloud
|
|
869
1109
|
|
|
870
|
-
client =
|
|
1110
|
+
client = LlamaCloud(
|
|
871
1111
|
token="YOUR_TOKEN",
|
|
872
1112
|
)
|
|
873
|
-
|
|
1113
|
+
client.beta.list_parse_configurations()
|
|
874
1114
|
"""
|
|
875
|
-
_response =
|
|
1115
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
876
1116
|
"GET",
|
|
877
|
-
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/
|
|
1117
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations"),
|
|
878
1118
|
params=remove_none_from_dict(
|
|
879
|
-
{
|
|
1119
|
+
{
|
|
1120
|
+
"page_size": page_size,
|
|
1121
|
+
"page_token": page_token,
|
|
1122
|
+
"name": name,
|
|
1123
|
+
"creator": creator,
|
|
1124
|
+
"version": version,
|
|
1125
|
+
"project_id": project_id,
|
|
1126
|
+
"organization_id": organization_id,
|
|
1127
|
+
}
|
|
880
1128
|
),
|
|
881
1129
|
headers=self._client_wrapper.get_headers(),
|
|
882
1130
|
timeout=60,
|
|
883
1131
|
)
|
|
884
1132
|
if 200 <= _response.status_code < 300:
|
|
885
|
-
return pydantic.parse_obj_as(
|
|
1133
|
+
return pydantic.parse_obj_as(ParseConfigurationQueryResponse, _response.json()) # type: ignore
|
|
886
1134
|
if _response.status_code == 422:
|
|
887
1135
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
888
1136
|
try:
|
|
@@ -891,94 +1139,66 @@ class AsyncBetaClient:
|
|
|
891
1139
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
892
1140
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
893
1141
|
|
|
894
|
-
|
|
1142
|
+
def create_parse_configuration(
|
|
895
1143
|
self,
|
|
896
1144
|
*,
|
|
897
|
-
organization_id: typing.Optional[str] = None,
|
|
898
1145
|
project_id: typing.Optional[str] = None,
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
input_id: str,
|
|
903
|
-
output_type: typing.Optional[str] = OMIT,
|
|
904
|
-
output_id: typing.Optional[str] = OMIT,
|
|
905
|
-
batch_create_project_id: str,
|
|
906
|
-
external_id: str,
|
|
907
|
-
completion_window: typing.Optional[int] = OMIT,
|
|
908
|
-
) -> Batch:
|
|
1146
|
+
organization_id: typing.Optional[str] = None,
|
|
1147
|
+
request: ParseConfigurationCreate,
|
|
1148
|
+
) -> ParseConfiguration:
|
|
909
1149
|
"""
|
|
910
|
-
|
|
911
|
-
- organization_id: typing.Optional[str].
|
|
912
|
-
|
|
913
|
-
- project_id: typing.Optional[str].
|
|
914
|
-
|
|
915
|
-
- tool: str. The tool to be used for all requests in the batch.
|
|
916
|
-
|
|
917
|
-
- tool_data: typing.Optional[LlamaParseParameters].
|
|
918
|
-
|
|
919
|
-
- input_type: str. The type of input file. Currently only 'datasource' is supported.
|
|
920
|
-
|
|
921
|
-
- input_id: str. The ID of the input file for the batch.
|
|
1150
|
+
Create a new parse configuration.
|
|
922
1151
|
|
|
923
|
-
|
|
1152
|
+
Args:
|
|
1153
|
+
config_create: Parse configuration creation data
|
|
1154
|
+
project: Validated project from dependency
|
|
1155
|
+
user: Current user
|
|
1156
|
+
db: Database session
|
|
924
1157
|
|
|
925
|
-
|
|
1158
|
+
Returns:
|
|
1159
|
+
The created parse configuration
|
|
926
1160
|
|
|
927
|
-
|
|
1161
|
+
Parameters:
|
|
1162
|
+
- project_id: typing.Optional[str].
|
|
928
1163
|
|
|
929
|
-
-
|
|
1164
|
+
- organization_id: typing.Optional[str].
|
|
930
1165
|
|
|
931
|
-
-
|
|
1166
|
+
- request: ParseConfigurationCreate.
|
|
932
1167
|
---
|
|
933
1168
|
from llama_cloud import (
|
|
934
1169
|
FailPageMode,
|
|
935
1170
|
LlamaParseParameters,
|
|
936
1171
|
LlamaParseParametersPriority,
|
|
1172
|
+
ParseConfigurationCreate,
|
|
937
1173
|
ParsingMode,
|
|
938
1174
|
)
|
|
939
|
-
from llama_cloud.client import
|
|
1175
|
+
from llama_cloud.client import LlamaCloud
|
|
940
1176
|
|
|
941
|
-
client =
|
|
1177
|
+
client = LlamaCloud(
|
|
942
1178
|
token="YOUR_TOKEN",
|
|
943
1179
|
)
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
1180
|
+
client.beta.create_parse_configuration(
|
|
1181
|
+
request=ParseConfigurationCreate(
|
|
1182
|
+
name="string",
|
|
1183
|
+
version="string",
|
|
1184
|
+
parameters=LlamaParseParameters(
|
|
1185
|
+
priority=LlamaParseParametersPriority.LOW,
|
|
1186
|
+
parse_mode=ParsingMode.PARSE_PAGE_WITHOUT_LLM,
|
|
1187
|
+
replace_failed_page_mode=FailPageMode.RAW_TEXT,
|
|
1188
|
+
),
|
|
950
1189
|
),
|
|
951
|
-
input_type="string",
|
|
952
|
-
input_id="string",
|
|
953
|
-
batch_create_project_id="string",
|
|
954
|
-
external_id="string",
|
|
955
1190
|
)
|
|
956
1191
|
"""
|
|
957
|
-
|
|
958
|
-
"tool": tool,
|
|
959
|
-
"input_type": input_type,
|
|
960
|
-
"input_id": input_id,
|
|
961
|
-
"project_id": batch_create_project_id,
|
|
962
|
-
"external_id": external_id,
|
|
963
|
-
}
|
|
964
|
-
if tool_data is not OMIT:
|
|
965
|
-
_request["tool_data"] = tool_data
|
|
966
|
-
if output_type is not OMIT:
|
|
967
|
-
_request["output_type"] = output_type
|
|
968
|
-
if output_id is not OMIT:
|
|
969
|
-
_request["output_id"] = output_id
|
|
970
|
-
if completion_window is not OMIT:
|
|
971
|
-
_request["completion_window"] = completion_window
|
|
972
|
-
_response = await self._client_wrapper.httpx_client.request(
|
|
1192
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
973
1193
|
"POST",
|
|
974
|
-
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/
|
|
975
|
-
params=remove_none_from_dict({"
|
|
976
|
-
json=jsonable_encoder(
|
|
1194
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations"),
|
|
1195
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1196
|
+
json=jsonable_encoder(request),
|
|
977
1197
|
headers=self._client_wrapper.get_headers(),
|
|
978
1198
|
timeout=60,
|
|
979
1199
|
)
|
|
980
1200
|
if 200 <= _response.status_code < 300:
|
|
981
|
-
return pydantic.parse_obj_as(
|
|
1201
|
+
return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
|
|
982
1202
|
if _response.status_code == 422:
|
|
983
1203
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
984
1204
|
try:
|
|
@@ -987,31 +1207,1151 @@ class AsyncBetaClient:
|
|
|
987
1207
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
988
1208
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
989
1209
|
|
|
990
|
-
|
|
1210
|
+
def upsert_parse_configuration(
|
|
1211
|
+
self,
|
|
1212
|
+
*,
|
|
1213
|
+
project_id: typing.Optional[str] = None,
|
|
1214
|
+
organization_id: typing.Optional[str] = None,
|
|
1215
|
+
request: ParseConfigurationCreate,
|
|
1216
|
+
) -> ParseConfiguration:
|
|
991
1217
|
"""
|
|
1218
|
+
Create or update a parse configuration by name.
|
|
1219
|
+
|
|
1220
|
+
Args:
|
|
1221
|
+
config_create: Parse configuration creation data
|
|
1222
|
+
project: Validated project from dependency
|
|
1223
|
+
user: Current user
|
|
1224
|
+
db: Database session
|
|
1225
|
+
|
|
1226
|
+
Returns:
|
|
1227
|
+
The created or updated parse configuration
|
|
1228
|
+
|
|
1229
|
+
Parameters:
|
|
1230
|
+
- project_id: typing.Optional[str].
|
|
1231
|
+
|
|
1232
|
+
- organization_id: typing.Optional[str].
|
|
1233
|
+
|
|
1234
|
+
- request: ParseConfigurationCreate.
|
|
1235
|
+
---
|
|
1236
|
+
from llama_cloud import (
|
|
1237
|
+
FailPageMode,
|
|
1238
|
+
LlamaParseParameters,
|
|
1239
|
+
LlamaParseParametersPriority,
|
|
1240
|
+
ParseConfigurationCreate,
|
|
1241
|
+
ParsingMode,
|
|
1242
|
+
)
|
|
1243
|
+
from llama_cloud.client import LlamaCloud
|
|
1244
|
+
|
|
1245
|
+
client = LlamaCloud(
|
|
1246
|
+
token="YOUR_TOKEN",
|
|
1247
|
+
)
|
|
1248
|
+
client.beta.upsert_parse_configuration(
|
|
1249
|
+
request=ParseConfigurationCreate(
|
|
1250
|
+
name="string",
|
|
1251
|
+
version="string",
|
|
1252
|
+
parameters=LlamaParseParameters(
|
|
1253
|
+
priority=LlamaParseParametersPriority.LOW,
|
|
1254
|
+
parse_mode=ParsingMode.PARSE_PAGE_WITHOUT_LLM,
|
|
1255
|
+
replace_failed_page_mode=FailPageMode.RAW_TEXT,
|
|
1256
|
+
),
|
|
1257
|
+
),
|
|
1258
|
+
)
|
|
1259
|
+
"""
|
|
1260
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1261
|
+
"PUT",
|
|
1262
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations"),
|
|
1263
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1264
|
+
json=jsonable_encoder(request),
|
|
1265
|
+
headers=self._client_wrapper.get_headers(),
|
|
1266
|
+
timeout=60,
|
|
1267
|
+
)
|
|
1268
|
+
if 200 <= _response.status_code < 300:
|
|
1269
|
+
return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
|
|
1270
|
+
if _response.status_code == 422:
|
|
1271
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1272
|
+
try:
|
|
1273
|
+
_response_json = _response.json()
|
|
1274
|
+
except JSONDecodeError:
|
|
1275
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1276
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1277
|
+
|
|
1278
|
+
def get_parse_configuration(
|
|
1279
|
+
self, config_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
|
|
1280
|
+
) -> ParseConfiguration:
|
|
1281
|
+
"""
|
|
1282
|
+
Get a parse configuration by ID.
|
|
1283
|
+
|
|
1284
|
+
Args:
|
|
1285
|
+
config_id: The ID of the parse configuration
|
|
1286
|
+
project: Validated project from dependency
|
|
1287
|
+
user: Current user
|
|
1288
|
+
db: Database session
|
|
1289
|
+
|
|
1290
|
+
Returns:
|
|
1291
|
+
The parse configuration
|
|
1292
|
+
|
|
1293
|
+
Parameters:
|
|
1294
|
+
- config_id: str.
|
|
1295
|
+
|
|
1296
|
+
- project_id: typing.Optional[str].
|
|
1297
|
+
|
|
1298
|
+
- organization_id: typing.Optional[str].
|
|
1299
|
+
---
|
|
1300
|
+
from llama_cloud.client import LlamaCloud
|
|
1301
|
+
|
|
1302
|
+
client = LlamaCloud(
|
|
1303
|
+
token="YOUR_TOKEN",
|
|
1304
|
+
)
|
|
1305
|
+
client.beta.get_parse_configuration(
|
|
1306
|
+
config_id="string",
|
|
1307
|
+
)
|
|
1308
|
+
"""
|
|
1309
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1310
|
+
"GET",
|
|
1311
|
+
urllib.parse.urljoin(
|
|
1312
|
+
f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/parse-configurations/{config_id}"
|
|
1313
|
+
),
|
|
1314
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1315
|
+
headers=self._client_wrapper.get_headers(),
|
|
1316
|
+
timeout=60,
|
|
1317
|
+
)
|
|
1318
|
+
if 200 <= _response.status_code < 300:
|
|
1319
|
+
return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
|
|
1320
|
+
if _response.status_code == 422:
|
|
1321
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1322
|
+
try:
|
|
1323
|
+
_response_json = _response.json()
|
|
1324
|
+
except JSONDecodeError:
|
|
1325
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1326
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1327
|
+
|
|
1328
|
+
def update_parse_configuration(
|
|
1329
|
+
self,
|
|
1330
|
+
config_id: str,
|
|
1331
|
+
*,
|
|
1332
|
+
project_id: typing.Optional[str] = None,
|
|
1333
|
+
organization_id: typing.Optional[str] = None,
|
|
1334
|
+
parameters: typing.Optional[LlamaParseParameters] = OMIT,
|
|
1335
|
+
) -> ParseConfiguration:
|
|
1336
|
+
"""
|
|
1337
|
+
Update a parse configuration.
|
|
1338
|
+
|
|
1339
|
+
Args:
|
|
1340
|
+
config_id: The ID of the parse configuration to update
|
|
1341
|
+
config_update: Update data
|
|
1342
|
+
project: Validated project from dependency
|
|
1343
|
+
user: Current user
|
|
1344
|
+
db: Database session
|
|
1345
|
+
|
|
1346
|
+
Returns:
|
|
1347
|
+
The updated parse configuration
|
|
1348
|
+
|
|
1349
|
+
Parameters:
|
|
1350
|
+
- config_id: str.
|
|
1351
|
+
|
|
1352
|
+
- project_id: typing.Optional[str].
|
|
1353
|
+
|
|
1354
|
+
- organization_id: typing.Optional[str].
|
|
1355
|
+
|
|
1356
|
+
- parameters: typing.Optional[LlamaParseParameters].
|
|
1357
|
+
---
|
|
1358
|
+
from llama_cloud import (
|
|
1359
|
+
FailPageMode,
|
|
1360
|
+
LlamaParseParameters,
|
|
1361
|
+
LlamaParseParametersPriority,
|
|
1362
|
+
ParsingMode,
|
|
1363
|
+
)
|
|
1364
|
+
from llama_cloud.client import LlamaCloud
|
|
1365
|
+
|
|
1366
|
+
client = LlamaCloud(
|
|
1367
|
+
token="YOUR_TOKEN",
|
|
1368
|
+
)
|
|
1369
|
+
client.beta.update_parse_configuration(
|
|
1370
|
+
config_id="string",
|
|
1371
|
+
parameters=LlamaParseParameters(
|
|
1372
|
+
priority=LlamaParseParametersPriority.LOW,
|
|
1373
|
+
parse_mode=ParsingMode.PARSE_PAGE_WITHOUT_LLM,
|
|
1374
|
+
replace_failed_page_mode=FailPageMode.RAW_TEXT,
|
|
1375
|
+
),
|
|
1376
|
+
)
|
|
1377
|
+
"""
|
|
1378
|
+
_request: typing.Dict[str, typing.Any] = {}
|
|
1379
|
+
if parameters is not OMIT:
|
|
1380
|
+
_request["parameters"] = parameters
|
|
1381
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1382
|
+
"PUT",
|
|
1383
|
+
urllib.parse.urljoin(
|
|
1384
|
+
f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/parse-configurations/{config_id}"
|
|
1385
|
+
),
|
|
1386
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1387
|
+
json=jsonable_encoder(_request),
|
|
1388
|
+
headers=self._client_wrapper.get_headers(),
|
|
1389
|
+
timeout=60,
|
|
1390
|
+
)
|
|
1391
|
+
if 200 <= _response.status_code < 300:
|
|
1392
|
+
return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
|
|
1393
|
+
if _response.status_code == 422:
|
|
1394
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1395
|
+
try:
|
|
1396
|
+
_response_json = _response.json()
|
|
1397
|
+
except JSONDecodeError:
|
|
1398
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1399
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1400
|
+
|
|
1401
|
+
def delete_parse_configuration(
|
|
1402
|
+
self, config_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
|
|
1403
|
+
) -> None:
|
|
1404
|
+
"""
|
|
1405
|
+
Delete a parse configuration.
|
|
1406
|
+
|
|
1407
|
+
Args:
|
|
1408
|
+
config_id: The ID of the parse configuration to delete
|
|
1409
|
+
project: Validated project from dependency
|
|
1410
|
+
user: Current user
|
|
1411
|
+
db: Database session
|
|
1412
|
+
|
|
1413
|
+
Parameters:
|
|
1414
|
+
- config_id: str.
|
|
1415
|
+
|
|
1416
|
+
- project_id: typing.Optional[str].
|
|
1417
|
+
|
|
1418
|
+
- organization_id: typing.Optional[str].
|
|
1419
|
+
---
|
|
1420
|
+
from llama_cloud.client import LlamaCloud
|
|
1421
|
+
|
|
1422
|
+
client = LlamaCloud(
|
|
1423
|
+
token="YOUR_TOKEN",
|
|
1424
|
+
)
|
|
1425
|
+
client.beta.delete_parse_configuration(
|
|
1426
|
+
config_id="string",
|
|
1427
|
+
)
|
|
1428
|
+
"""
|
|
1429
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1430
|
+
"DELETE",
|
|
1431
|
+
urllib.parse.urljoin(
|
|
1432
|
+
f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/parse-configurations/{config_id}"
|
|
1433
|
+
),
|
|
1434
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1435
|
+
headers=self._client_wrapper.get_headers(),
|
|
1436
|
+
timeout=60,
|
|
1437
|
+
)
|
|
1438
|
+
if 200 <= _response.status_code < 300:
|
|
1439
|
+
return
|
|
1440
|
+
if _response.status_code == 422:
|
|
1441
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1442
|
+
try:
|
|
1443
|
+
_response_json = _response.json()
|
|
1444
|
+
except JSONDecodeError:
|
|
1445
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1446
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1447
|
+
|
|
1448
|
+
def query_parse_configurations(
|
|
1449
|
+
self,
|
|
1450
|
+
*,
|
|
1451
|
+
project_id: typing.Optional[str] = None,
|
|
1452
|
+
organization_id: typing.Optional[str] = None,
|
|
1453
|
+
page_size: typing.Optional[int] = OMIT,
|
|
1454
|
+
page_token: typing.Optional[str] = OMIT,
|
|
1455
|
+
filter: typing.Optional[ParseConfigurationFilter] = OMIT,
|
|
1456
|
+
order_by: typing.Optional[str] = OMIT,
|
|
1457
|
+
) -> ParseConfigurationQueryResponse:
|
|
1458
|
+
"""
|
|
1459
|
+
Query parse configurations with filtering and pagination.
|
|
1460
|
+
|
|
1461
|
+
Args:
|
|
1462
|
+
query_request: Query request with filters and pagination
|
|
1463
|
+
project: Validated project from dependency
|
|
1464
|
+
user: Current user
|
|
1465
|
+
db: Database session
|
|
1466
|
+
|
|
1467
|
+
Returns:
|
|
1468
|
+
Paginated response with parse configurations
|
|
1469
|
+
|
|
1470
|
+
Parameters:
|
|
1471
|
+
- project_id: typing.Optional[str].
|
|
1472
|
+
|
|
1473
|
+
- organization_id: typing.Optional[str].
|
|
1474
|
+
|
|
1475
|
+
- page_size: typing.Optional[int].
|
|
1476
|
+
|
|
1477
|
+
- page_token: typing.Optional[str].
|
|
1478
|
+
|
|
1479
|
+
- filter: typing.Optional[ParseConfigurationFilter].
|
|
1480
|
+
|
|
1481
|
+
- order_by: typing.Optional[str].
|
|
1482
|
+
---
|
|
1483
|
+
from llama_cloud import ParseConfigurationFilter
|
|
1484
|
+
from llama_cloud.client import LlamaCloud
|
|
1485
|
+
|
|
1486
|
+
client = LlamaCloud(
|
|
1487
|
+
token="YOUR_TOKEN",
|
|
1488
|
+
)
|
|
1489
|
+
client.beta.query_parse_configurations(
|
|
1490
|
+
filter=ParseConfigurationFilter(),
|
|
1491
|
+
)
|
|
1492
|
+
"""
|
|
1493
|
+
_request: typing.Dict[str, typing.Any] = {}
|
|
1494
|
+
if page_size is not OMIT:
|
|
1495
|
+
_request["page_size"] = page_size
|
|
1496
|
+
if page_token is not OMIT:
|
|
1497
|
+
_request["page_token"] = page_token
|
|
1498
|
+
if filter is not OMIT:
|
|
1499
|
+
_request["filter"] = filter
|
|
1500
|
+
if order_by is not OMIT:
|
|
1501
|
+
_request["order_by"] = order_by
|
|
1502
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1503
|
+
"POST",
|
|
1504
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations/query"),
|
|
1505
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1506
|
+
json=jsonable_encoder(_request),
|
|
1507
|
+
headers=self._client_wrapper.get_headers(),
|
|
1508
|
+
timeout=60,
|
|
1509
|
+
)
|
|
1510
|
+
if 200 <= _response.status_code < 300:
|
|
1511
|
+
return pydantic.parse_obj_as(ParseConfigurationQueryResponse, _response.json()) # type: ignore
|
|
1512
|
+
if _response.status_code == 422:
|
|
1513
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1514
|
+
try:
|
|
1515
|
+
_response_json = _response.json()
|
|
1516
|
+
except JSONDecodeError:
|
|
1517
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1518
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1519
|
+
|
|
1520
|
+
def get_latest_parse_configuration(
|
|
1521
|
+
self,
|
|
1522
|
+
*,
|
|
1523
|
+
creator: typing.Optional[str] = None,
|
|
1524
|
+
project_id: typing.Optional[str] = None,
|
|
1525
|
+
organization_id: typing.Optional[str] = None,
|
|
1526
|
+
) -> typing.Optional[ParseConfiguration]:
|
|
1527
|
+
"""
|
|
1528
|
+
Get the latest parse configuration for the current project.
|
|
1529
|
+
|
|
1530
|
+
Args:
|
|
1531
|
+
project: Validated project from dependency
|
|
1532
|
+
user: Current user
|
|
1533
|
+
db: Database session
|
|
1534
|
+
creator: Optional creator filter
|
|
1535
|
+
|
|
1536
|
+
Returns:
|
|
1537
|
+
The latest parse configuration or None if not found
|
|
1538
|
+
|
|
1539
|
+
Parameters:
|
|
1540
|
+
- creator: typing.Optional[str].
|
|
1541
|
+
|
|
1542
|
+
- project_id: typing.Optional[str].
|
|
1543
|
+
|
|
1544
|
+
- organization_id: typing.Optional[str].
|
|
1545
|
+
---
|
|
1546
|
+
from llama_cloud.client import LlamaCloud
|
|
1547
|
+
|
|
1548
|
+
client = LlamaCloud(
|
|
1549
|
+
token="YOUR_TOKEN",
|
|
1550
|
+
)
|
|
1551
|
+
client.beta.get_latest_parse_configuration()
|
|
1552
|
+
"""
|
|
1553
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1554
|
+
"GET",
|
|
1555
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations/latest"),
|
|
1556
|
+
params=remove_none_from_dict(
|
|
1557
|
+
{"creator": creator, "project_id": project_id, "organization_id": organization_id}
|
|
1558
|
+
),
|
|
1559
|
+
headers=self._client_wrapper.get_headers(),
|
|
1560
|
+
timeout=60,
|
|
1561
|
+
)
|
|
1562
|
+
if 200 <= _response.status_code < 300:
|
|
1563
|
+
return pydantic.parse_obj_as(typing.Optional[ParseConfiguration], _response.json()) # type: ignore
|
|
1564
|
+
if _response.status_code == 422:
|
|
1565
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1566
|
+
try:
|
|
1567
|
+
_response_json = _response.json()
|
|
1568
|
+
except JSONDecodeError:
|
|
1569
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1570
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1571
|
+
|
|
1572
|
+
|
|
1573
|
+
class AsyncBetaClient:
|
|
1574
|
+
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
1575
|
+
self._client_wrapper = client_wrapper
|
|
1576
|
+
|
|
1577
|
+
async def list_api_keys(
|
|
1578
|
+
self,
|
|
1579
|
+
*,
|
|
1580
|
+
page_size: typing.Optional[int] = None,
|
|
1581
|
+
page_token: typing.Optional[str] = None,
|
|
1582
|
+
name: typing.Optional[str] = None,
|
|
1583
|
+
project_id: typing.Optional[str] = None,
|
|
1584
|
+
key_type: typing.Optional[ApiKeyType] = None,
|
|
1585
|
+
) -> ApiKeyQueryResponse:
|
|
1586
|
+
"""
|
|
1587
|
+
List API keys.
|
|
1588
|
+
|
|
1589
|
+
If project_id is provided, validates user has access to that project.
|
|
1590
|
+
If project_id is not provided, scopes results to the current user.
|
|
1591
|
+
|
|
1592
|
+
Args:
|
|
1593
|
+
user: Current user
|
|
1594
|
+
db: Database session
|
|
1595
|
+
page_size: Number of items per page
|
|
1596
|
+
page_token: Token for pagination
|
|
1597
|
+
name: Filter by API key name
|
|
1598
|
+
project_id: Filter by project ID
|
|
1599
|
+
key_type: Filter by key type
|
|
1600
|
+
|
|
1601
|
+
Returns:
|
|
1602
|
+
Paginated response with API keys
|
|
1603
|
+
|
|
1604
|
+
Parameters:
|
|
1605
|
+
- page_size: typing.Optional[int].
|
|
1606
|
+
|
|
1607
|
+
- page_token: typing.Optional[str].
|
|
1608
|
+
|
|
1609
|
+
- name: typing.Optional[str].
|
|
1610
|
+
|
|
1611
|
+
- project_id: typing.Optional[str].
|
|
1612
|
+
|
|
1613
|
+
- key_type: typing.Optional[ApiKeyType].
|
|
1614
|
+
---
|
|
1615
|
+
from llama_cloud import ApiKeyType
|
|
1616
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1617
|
+
|
|
1618
|
+
client = AsyncLlamaCloud(
|
|
1619
|
+
token="YOUR_TOKEN",
|
|
1620
|
+
)
|
|
1621
|
+
await client.beta.list_api_keys(
|
|
1622
|
+
key_type=ApiKeyType.USER,
|
|
1623
|
+
)
|
|
1624
|
+
"""
|
|
1625
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1626
|
+
"GET",
|
|
1627
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/api-keys"),
|
|
1628
|
+
params=remove_none_from_dict(
|
|
1629
|
+
{
|
|
1630
|
+
"page_size": page_size,
|
|
1631
|
+
"page_token": page_token,
|
|
1632
|
+
"name": name,
|
|
1633
|
+
"project_id": project_id,
|
|
1634
|
+
"key_type": key_type,
|
|
1635
|
+
}
|
|
1636
|
+
),
|
|
1637
|
+
headers=self._client_wrapper.get_headers(),
|
|
1638
|
+
timeout=60,
|
|
1639
|
+
)
|
|
1640
|
+
if 200 <= _response.status_code < 300:
|
|
1641
|
+
return pydantic.parse_obj_as(ApiKeyQueryResponse, _response.json()) # type: ignore
|
|
1642
|
+
if _response.status_code == 422:
|
|
1643
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1644
|
+
try:
|
|
1645
|
+
_response_json = _response.json()
|
|
1646
|
+
except JSONDecodeError:
|
|
1647
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1648
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1649
|
+
|
|
1650
|
+
async def create_api_key(
|
|
1651
|
+
self,
|
|
1652
|
+
*,
|
|
1653
|
+
name: typing.Optional[str] = OMIT,
|
|
1654
|
+
project_id: typing.Optional[str] = OMIT,
|
|
1655
|
+
key_type: typing.Optional[ApiKeyType] = OMIT,
|
|
1656
|
+
) -> ApiKey:
|
|
1657
|
+
"""
|
|
1658
|
+
Create a new API key.
|
|
1659
|
+
|
|
1660
|
+
If project_id is specified, validates user has admin permissions for that project.
|
|
1661
|
+
|
|
1662
|
+
Args:
|
|
1663
|
+
api_key_create: API key creation data
|
|
1664
|
+
user: Current user
|
|
1665
|
+
db: Database session
|
|
1666
|
+
|
|
1667
|
+
Returns:
|
|
1668
|
+
The created API key with the secret key visible in redacted_api_key field
|
|
1669
|
+
|
|
1670
|
+
Parameters:
|
|
1671
|
+
- name: typing.Optional[str].
|
|
1672
|
+
|
|
1673
|
+
- project_id: typing.Optional[str].
|
|
1674
|
+
|
|
1675
|
+
- key_type: typing.Optional[ApiKeyType].
|
|
1676
|
+
---
|
|
1677
|
+
from llama_cloud import ApiKeyType
|
|
1678
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1679
|
+
|
|
1680
|
+
client = AsyncLlamaCloud(
|
|
1681
|
+
token="YOUR_TOKEN",
|
|
1682
|
+
)
|
|
1683
|
+
await client.beta.create_api_key(
|
|
1684
|
+
key_type=ApiKeyType.USER,
|
|
1685
|
+
)
|
|
1686
|
+
"""
|
|
1687
|
+
_request: typing.Dict[str, typing.Any] = {}
|
|
1688
|
+
if name is not OMIT:
|
|
1689
|
+
_request["name"] = name
|
|
1690
|
+
if project_id is not OMIT:
|
|
1691
|
+
_request["project_id"] = project_id
|
|
1692
|
+
if key_type is not OMIT:
|
|
1693
|
+
_request["key_type"] = key_type
|
|
1694
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1695
|
+
"POST",
|
|
1696
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/api-keys"),
|
|
1697
|
+
json=jsonable_encoder(_request),
|
|
1698
|
+
headers=self._client_wrapper.get_headers(),
|
|
1699
|
+
timeout=60,
|
|
1700
|
+
)
|
|
1701
|
+
if 200 <= _response.status_code < 300:
|
|
1702
|
+
return pydantic.parse_obj_as(ApiKey, _response.json()) # type: ignore
|
|
1703
|
+
if _response.status_code == 422:
|
|
1704
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1705
|
+
try:
|
|
1706
|
+
_response_json = _response.json()
|
|
1707
|
+
except JSONDecodeError:
|
|
1708
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1709
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1710
|
+
|
|
1711
|
+
async def get_api_key(self, api_key_id: str) -> ApiKey:
|
|
1712
|
+
"""
|
|
1713
|
+
Get an API key by ID.
|
|
1714
|
+
|
|
1715
|
+
Args:
|
|
1716
|
+
api_key_id: The ID of the API key
|
|
1717
|
+
user: Current user
|
|
1718
|
+
db: Database session
|
|
1719
|
+
|
|
1720
|
+
Returns:
|
|
1721
|
+
The API key
|
|
1722
|
+
|
|
1723
|
+
Parameters:
|
|
1724
|
+
- api_key_id: str.
|
|
1725
|
+
---
|
|
1726
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1727
|
+
|
|
1728
|
+
client = AsyncLlamaCloud(
|
|
1729
|
+
token="YOUR_TOKEN",
|
|
1730
|
+
)
|
|
1731
|
+
await client.beta.get_api_key(
|
|
1732
|
+
api_key_id="string",
|
|
1733
|
+
)
|
|
1734
|
+
"""
|
|
1735
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1736
|
+
"GET",
|
|
1737
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/api-keys/{api_key_id}"),
|
|
1738
|
+
headers=self._client_wrapper.get_headers(),
|
|
1739
|
+
timeout=60,
|
|
1740
|
+
)
|
|
1741
|
+
if 200 <= _response.status_code < 300:
|
|
1742
|
+
return pydantic.parse_obj_as(ApiKey, _response.json()) # type: ignore
|
|
1743
|
+
if _response.status_code == 422:
|
|
1744
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1745
|
+
try:
|
|
1746
|
+
_response_json = _response.json()
|
|
1747
|
+
except JSONDecodeError:
|
|
1748
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1749
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1750
|
+
|
|
1751
|
+
async def delete_api_key(self, api_key_id: str) -> None:
|
|
1752
|
+
"""
|
|
1753
|
+
Delete an API key.
|
|
1754
|
+
|
|
1755
|
+
If the API key belongs to a project, validates user has admin permissions for that project.
|
|
1756
|
+
If the API key has no project, validates it belongs to the current user.
|
|
1757
|
+
|
|
1758
|
+
Args:
|
|
1759
|
+
api_key_id: The ID of the API key to delete
|
|
1760
|
+
user: Current user
|
|
1761
|
+
db: Database session
|
|
1762
|
+
|
|
1763
|
+
Parameters:
|
|
1764
|
+
- api_key_id: str.
|
|
1765
|
+
---
|
|
1766
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1767
|
+
|
|
1768
|
+
client = AsyncLlamaCloud(
|
|
1769
|
+
token="YOUR_TOKEN",
|
|
1770
|
+
)
|
|
1771
|
+
await client.beta.delete_api_key(
|
|
1772
|
+
api_key_id="string",
|
|
1773
|
+
)
|
|
1774
|
+
"""
|
|
1775
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1776
|
+
"DELETE",
|
|
1777
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/api-keys/{api_key_id}"),
|
|
1778
|
+
headers=self._client_wrapper.get_headers(),
|
|
1779
|
+
timeout=60,
|
|
1780
|
+
)
|
|
1781
|
+
if 200 <= _response.status_code < 300:
|
|
1782
|
+
return
|
|
1783
|
+
if _response.status_code == 422:
|
|
1784
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1785
|
+
try:
|
|
1786
|
+
_response_json = _response.json()
|
|
1787
|
+
except JSONDecodeError:
|
|
1788
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1789
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1790
|
+
|
|
1791
|
+
async def list_batches(
|
|
1792
|
+
self,
|
|
1793
|
+
*,
|
|
1794
|
+
limit: typing.Optional[int] = None,
|
|
1795
|
+
offset: typing.Optional[int] = None,
|
|
1796
|
+
project_id: typing.Optional[str] = None,
|
|
1797
|
+
organization_id: typing.Optional[str] = None,
|
|
1798
|
+
) -> BatchPaginatedList:
|
|
1799
|
+
"""
|
|
1800
|
+
Parameters:
|
|
1801
|
+
- limit: typing.Optional[int].
|
|
1802
|
+
|
|
1803
|
+
- offset: typing.Optional[int].
|
|
1804
|
+
|
|
1805
|
+
- project_id: typing.Optional[str].
|
|
1806
|
+
|
|
1807
|
+
- organization_id: typing.Optional[str].
|
|
1808
|
+
---
|
|
1809
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1810
|
+
|
|
1811
|
+
client = AsyncLlamaCloud(
|
|
1812
|
+
token="YOUR_TOKEN",
|
|
1813
|
+
)
|
|
1814
|
+
await client.beta.list_batches()
|
|
1815
|
+
"""
|
|
1816
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1817
|
+
"GET",
|
|
1818
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/batches"),
|
|
1819
|
+
params=remove_none_from_dict(
|
|
1820
|
+
{"limit": limit, "offset": offset, "project_id": project_id, "organization_id": organization_id}
|
|
1821
|
+
),
|
|
1822
|
+
headers=self._client_wrapper.get_headers(),
|
|
1823
|
+
timeout=60,
|
|
1824
|
+
)
|
|
1825
|
+
if 200 <= _response.status_code < 300:
|
|
1826
|
+
return pydantic.parse_obj_as(BatchPaginatedList, _response.json()) # type: ignore
|
|
1827
|
+
if _response.status_code == 422:
|
|
1828
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1829
|
+
try:
|
|
1830
|
+
_response_json = _response.json()
|
|
1831
|
+
except JSONDecodeError:
|
|
1832
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1833
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1834
|
+
|
|
1835
|
+
async def create_batch(
|
|
1836
|
+
self,
|
|
1837
|
+
*,
|
|
1838
|
+
organization_id: typing.Optional[str] = None,
|
|
1839
|
+
project_id: typing.Optional[str] = None,
|
|
1840
|
+
tool: str,
|
|
1841
|
+
tool_data: typing.Optional[LlamaParseParameters] = OMIT,
|
|
1842
|
+
input_type: str,
|
|
1843
|
+
input_id: str,
|
|
1844
|
+
output_type: typing.Optional[str] = OMIT,
|
|
1845
|
+
output_id: typing.Optional[str] = OMIT,
|
|
1846
|
+
batch_create_project_id: str,
|
|
1847
|
+
external_id: str,
|
|
1848
|
+
completion_window: typing.Optional[int] = OMIT,
|
|
1849
|
+
) -> Batch:
|
|
1850
|
+
"""
|
|
1851
|
+
Parameters:
|
|
1852
|
+
- organization_id: typing.Optional[str].
|
|
1853
|
+
|
|
1854
|
+
- project_id: typing.Optional[str].
|
|
1855
|
+
|
|
1856
|
+
- tool: str. The tool to be used for all requests in the batch.
|
|
1857
|
+
|
|
1858
|
+
- tool_data: typing.Optional[LlamaParseParameters].
|
|
1859
|
+
|
|
1860
|
+
- input_type: str. The type of input file. Currently only 'datasource' is supported.
|
|
1861
|
+
|
|
1862
|
+
- input_id: str. The ID of the input file for the batch.
|
|
1863
|
+
|
|
1864
|
+
- output_type: typing.Optional[str].
|
|
1865
|
+
|
|
1866
|
+
- output_id: typing.Optional[str].
|
|
1867
|
+
|
|
1868
|
+
- batch_create_project_id: str. The ID of the project to which the batch belongs
|
|
1869
|
+
|
|
1870
|
+
- external_id: str. A developer-provided ID for the batch. This ID will be returned in the response.
|
|
1871
|
+
|
|
1872
|
+
- completion_window: typing.Optional[int]. The time frame within which the batch should be processed. Currently only 24h is supported.
|
|
1873
|
+
---
|
|
1874
|
+
from llama_cloud import (
|
|
1875
|
+
FailPageMode,
|
|
1876
|
+
LlamaParseParameters,
|
|
1877
|
+
LlamaParseParametersPriority,
|
|
1878
|
+
ParsingMode,
|
|
1879
|
+
)
|
|
1880
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1881
|
+
|
|
1882
|
+
client = AsyncLlamaCloud(
|
|
1883
|
+
token="YOUR_TOKEN",
|
|
1884
|
+
)
|
|
1885
|
+
await client.beta.create_batch(
|
|
1886
|
+
tool="string",
|
|
1887
|
+
tool_data=LlamaParseParameters(
|
|
1888
|
+
priority=LlamaParseParametersPriority.LOW,
|
|
1889
|
+
parse_mode=ParsingMode.PARSE_PAGE_WITHOUT_LLM,
|
|
1890
|
+
replace_failed_page_mode=FailPageMode.RAW_TEXT,
|
|
1891
|
+
),
|
|
1892
|
+
input_type="string",
|
|
1893
|
+
input_id="string",
|
|
1894
|
+
batch_create_project_id="string",
|
|
1895
|
+
external_id="string",
|
|
1896
|
+
)
|
|
1897
|
+
"""
|
|
1898
|
+
_request: typing.Dict[str, typing.Any] = {
|
|
1899
|
+
"tool": tool,
|
|
1900
|
+
"input_type": input_type,
|
|
1901
|
+
"input_id": input_id,
|
|
1902
|
+
"project_id": batch_create_project_id,
|
|
1903
|
+
"external_id": external_id,
|
|
1904
|
+
}
|
|
1905
|
+
if tool_data is not OMIT:
|
|
1906
|
+
_request["tool_data"] = tool_data
|
|
1907
|
+
if output_type is not OMIT:
|
|
1908
|
+
_request["output_type"] = output_type
|
|
1909
|
+
if output_id is not OMIT:
|
|
1910
|
+
_request["output_id"] = output_id
|
|
1911
|
+
if completion_window is not OMIT:
|
|
1912
|
+
_request["completion_window"] = completion_window
|
|
1913
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1914
|
+
"POST",
|
|
1915
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/batches"),
|
|
1916
|
+
params=remove_none_from_dict({"organization_id": organization_id, "project_id": project_id}),
|
|
1917
|
+
json=jsonable_encoder(_request),
|
|
1918
|
+
headers=self._client_wrapper.get_headers(),
|
|
1919
|
+
timeout=60,
|
|
1920
|
+
)
|
|
1921
|
+
if 200 <= _response.status_code < 300:
|
|
1922
|
+
return pydantic.parse_obj_as(Batch, _response.json()) # type: ignore
|
|
1923
|
+
if _response.status_code == 422:
|
|
1924
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1925
|
+
try:
|
|
1926
|
+
_response_json = _response.json()
|
|
1927
|
+
except JSONDecodeError:
|
|
1928
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1929
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1930
|
+
|
|
1931
|
+
async def get_batch(self, batch_id: str, *, organization_id: typing.Optional[str] = None) -> BatchPublicOutput:
|
|
1932
|
+
"""
|
|
1933
|
+
Parameters:
|
|
1934
|
+
- batch_id: str.
|
|
1935
|
+
|
|
1936
|
+
- organization_id: typing.Optional[str].
|
|
1937
|
+
---
|
|
1938
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1939
|
+
|
|
1940
|
+
client = AsyncLlamaCloud(
|
|
1941
|
+
token="YOUR_TOKEN",
|
|
1942
|
+
)
|
|
1943
|
+
await client.beta.get_batch(
|
|
1944
|
+
batch_id="string",
|
|
1945
|
+
)
|
|
1946
|
+
"""
|
|
1947
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1948
|
+
"GET",
|
|
1949
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/batches/{batch_id}"),
|
|
1950
|
+
params=remove_none_from_dict({"organization_id": organization_id}),
|
|
1951
|
+
headers=self._client_wrapper.get_headers(),
|
|
1952
|
+
timeout=60,
|
|
1953
|
+
)
|
|
1954
|
+
if 200 <= _response.status_code < 300:
|
|
1955
|
+
return pydantic.parse_obj_as(BatchPublicOutput, _response.json()) # type: ignore
|
|
1956
|
+
if _response.status_code == 422:
|
|
1957
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1958
|
+
try:
|
|
1959
|
+
_response_json = _response.json()
|
|
1960
|
+
except JSONDecodeError:
|
|
1961
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1962
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1963
|
+
|
|
1964
|
+
async def get_agent_data(
|
|
1965
|
+
self, item_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
|
|
1966
|
+
) -> AgentData:
|
|
1967
|
+
"""
|
|
1968
|
+
Get agent data by ID.
|
|
1969
|
+
|
|
1970
|
+
Parameters:
|
|
1971
|
+
- item_id: str.
|
|
1972
|
+
|
|
1973
|
+
- project_id: typing.Optional[str].
|
|
1974
|
+
|
|
1975
|
+
- organization_id: typing.Optional[str].
|
|
1976
|
+
---
|
|
1977
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1978
|
+
|
|
1979
|
+
client = AsyncLlamaCloud(
|
|
1980
|
+
token="YOUR_TOKEN",
|
|
1981
|
+
)
|
|
1982
|
+
await client.beta.get_agent_data(
|
|
1983
|
+
item_id="string",
|
|
1984
|
+
)
|
|
1985
|
+
"""
|
|
1986
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1987
|
+
"GET",
|
|
1988
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
|
|
1989
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1990
|
+
headers=self._client_wrapper.get_headers(),
|
|
1991
|
+
timeout=60,
|
|
1992
|
+
)
|
|
1993
|
+
if 200 <= _response.status_code < 300:
|
|
1994
|
+
return pydantic.parse_obj_as(AgentData, _response.json()) # type: ignore
|
|
1995
|
+
if _response.status_code == 422:
|
|
1996
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1997
|
+
try:
|
|
1998
|
+
_response_json = _response.json()
|
|
1999
|
+
except JSONDecodeError:
|
|
2000
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2001
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2002
|
+
|
|
2003
|
+
async def update_agent_data(
|
|
2004
|
+
self,
|
|
2005
|
+
item_id: str,
|
|
2006
|
+
*,
|
|
2007
|
+
project_id: typing.Optional[str] = None,
|
|
2008
|
+
organization_id: typing.Optional[str] = None,
|
|
2009
|
+
data: typing.Dict[str, typing.Any],
|
|
2010
|
+
) -> AgentData:
|
|
2011
|
+
"""
|
|
2012
|
+
Update agent data by ID (overwrites).
|
|
2013
|
+
|
|
2014
|
+
Parameters:
|
|
2015
|
+
- item_id: str.
|
|
2016
|
+
|
|
2017
|
+
- project_id: typing.Optional[str].
|
|
2018
|
+
|
|
2019
|
+
- organization_id: typing.Optional[str].
|
|
2020
|
+
|
|
2021
|
+
- data: typing.Dict[str, typing.Any].
|
|
2022
|
+
---
|
|
2023
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
2024
|
+
|
|
2025
|
+
client = AsyncLlamaCloud(
|
|
2026
|
+
token="YOUR_TOKEN",
|
|
2027
|
+
)
|
|
2028
|
+
await client.beta.update_agent_data(
|
|
2029
|
+
item_id="string",
|
|
2030
|
+
data={"string": {}},
|
|
2031
|
+
)
|
|
2032
|
+
"""
|
|
2033
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2034
|
+
"PUT",
|
|
2035
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
|
|
2036
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
2037
|
+
json=jsonable_encoder({"data": data}),
|
|
2038
|
+
headers=self._client_wrapper.get_headers(),
|
|
2039
|
+
timeout=60,
|
|
2040
|
+
)
|
|
2041
|
+
if 200 <= _response.status_code < 300:
|
|
2042
|
+
return pydantic.parse_obj_as(AgentData, _response.json()) # type: ignore
|
|
2043
|
+
if _response.status_code == 422:
|
|
2044
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
2045
|
+
try:
|
|
2046
|
+
_response_json = _response.json()
|
|
2047
|
+
except JSONDecodeError:
|
|
2048
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2049
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2050
|
+
|
|
2051
|
+
async def delete_agent_data(
|
|
2052
|
+
self, item_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
|
|
2053
|
+
) -> typing.Dict[str, str]:
|
|
2054
|
+
"""
|
|
2055
|
+
Delete agent data by ID.
|
|
2056
|
+
|
|
2057
|
+
Parameters:
|
|
2058
|
+
- item_id: str.
|
|
2059
|
+
|
|
2060
|
+
- project_id: typing.Optional[str].
|
|
2061
|
+
|
|
2062
|
+
- organization_id: typing.Optional[str].
|
|
2063
|
+
---
|
|
2064
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
2065
|
+
|
|
2066
|
+
client = AsyncLlamaCloud(
|
|
2067
|
+
token="YOUR_TOKEN",
|
|
2068
|
+
)
|
|
2069
|
+
await client.beta.delete_agent_data(
|
|
2070
|
+
item_id="string",
|
|
2071
|
+
)
|
|
2072
|
+
"""
|
|
2073
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2074
|
+
"DELETE",
|
|
2075
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
|
|
2076
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
2077
|
+
headers=self._client_wrapper.get_headers(),
|
|
2078
|
+
timeout=60,
|
|
2079
|
+
)
|
|
2080
|
+
if 200 <= _response.status_code < 300:
|
|
2081
|
+
return pydantic.parse_obj_as(typing.Dict[str, str], _response.json()) # type: ignore
|
|
2082
|
+
if _response.status_code == 422:
|
|
2083
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
2084
|
+
try:
|
|
2085
|
+
_response_json = _response.json()
|
|
2086
|
+
except JSONDecodeError:
|
|
2087
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2088
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2089
|
+
|
|
2090
|
+
async def create_agent_data(
|
|
2091
|
+
self,
|
|
2092
|
+
*,
|
|
2093
|
+
project_id: typing.Optional[str] = None,
|
|
2094
|
+
organization_id: typing.Optional[str] = None,
|
|
2095
|
+
deployment_name: str,
|
|
2096
|
+
collection: typing.Optional[str] = OMIT,
|
|
2097
|
+
data: typing.Dict[str, typing.Any],
|
|
2098
|
+
) -> AgentData:
|
|
2099
|
+
"""
|
|
2100
|
+
Create new agent data.
|
|
2101
|
+
|
|
2102
|
+
Parameters:
|
|
2103
|
+
- project_id: typing.Optional[str].
|
|
2104
|
+
|
|
2105
|
+
- organization_id: typing.Optional[str].
|
|
2106
|
+
|
|
2107
|
+
- deployment_name: str.
|
|
2108
|
+
|
|
2109
|
+
- collection: typing.Optional[str].
|
|
2110
|
+
|
|
2111
|
+
- data: typing.Dict[str, typing.Any].
|
|
2112
|
+
---
|
|
2113
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
2114
|
+
|
|
2115
|
+
client = AsyncLlamaCloud(
|
|
2116
|
+
token="YOUR_TOKEN",
|
|
2117
|
+
)
|
|
2118
|
+
await client.beta.create_agent_data(
|
|
2119
|
+
deployment_name="string",
|
|
2120
|
+
data={"string": {}},
|
|
2121
|
+
)
|
|
2122
|
+
"""
|
|
2123
|
+
_request: typing.Dict[str, typing.Any] = {"deployment_name": deployment_name, "data": data}
|
|
2124
|
+
if collection is not OMIT:
|
|
2125
|
+
_request["collection"] = collection
|
|
2126
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2127
|
+
"POST",
|
|
2128
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data"),
|
|
2129
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
2130
|
+
json=jsonable_encoder(_request),
|
|
2131
|
+
headers=self._client_wrapper.get_headers(),
|
|
2132
|
+
timeout=60,
|
|
2133
|
+
)
|
|
2134
|
+
if 200 <= _response.status_code < 300:
|
|
2135
|
+
return pydantic.parse_obj_as(AgentData, _response.json()) # type: ignore
|
|
2136
|
+
if _response.status_code == 422:
|
|
2137
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
2138
|
+
try:
|
|
2139
|
+
_response_json = _response.json()
|
|
2140
|
+
except JSONDecodeError:
|
|
2141
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2142
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2143
|
+
|
|
2144
|
+
async def search_agent_data_api_v_1_beta_agent_data_search_post(
|
|
2145
|
+
self,
|
|
2146
|
+
*,
|
|
2147
|
+
project_id: typing.Optional[str] = None,
|
|
2148
|
+
organization_id: typing.Optional[str] = None,
|
|
2149
|
+
page_size: typing.Optional[int] = OMIT,
|
|
2150
|
+
page_token: typing.Optional[str] = OMIT,
|
|
2151
|
+
filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
|
|
2152
|
+
order_by: typing.Optional[str] = OMIT,
|
|
2153
|
+
deployment_name: str,
|
|
2154
|
+
collection: typing.Optional[str] = OMIT,
|
|
2155
|
+
include_total: typing.Optional[bool] = OMIT,
|
|
2156
|
+
offset: typing.Optional[int] = OMIT,
|
|
2157
|
+
) -> PaginatedResponseAgentData:
|
|
2158
|
+
"""
|
|
2159
|
+
Search agent data with filtering, sorting, and pagination.
|
|
2160
|
+
|
|
2161
|
+
Parameters:
|
|
2162
|
+
- project_id: typing.Optional[str].
|
|
2163
|
+
|
|
2164
|
+
- organization_id: typing.Optional[str].
|
|
2165
|
+
|
|
2166
|
+
- page_size: typing.Optional[int].
|
|
2167
|
+
|
|
2168
|
+
- page_token: typing.Optional[str].
|
|
2169
|
+
|
|
2170
|
+
- filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]].
|
|
2171
|
+
|
|
2172
|
+
- order_by: typing.Optional[str].
|
|
2173
|
+
|
|
2174
|
+
- deployment_name: str. The agent deployment's name to search within
|
|
2175
|
+
|
|
2176
|
+
- collection: typing.Optional[str]. The logical agent data collection to search within
|
|
2177
|
+
|
|
2178
|
+
- include_total: typing.Optional[bool]. Whether to include the total number of items in the response
|
|
2179
|
+
|
|
2180
|
+
- offset: typing.Optional[int].
|
|
2181
|
+
---
|
|
2182
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
2183
|
+
|
|
2184
|
+
client = AsyncLlamaCloud(
|
|
2185
|
+
token="YOUR_TOKEN",
|
|
2186
|
+
)
|
|
2187
|
+
await client.beta.search_agent_data_api_v_1_beta_agent_data_search_post(
|
|
2188
|
+
deployment_name="string",
|
|
2189
|
+
)
|
|
2190
|
+
"""
|
|
2191
|
+
_request: typing.Dict[str, typing.Any] = {"deployment_name": deployment_name}
|
|
2192
|
+
if page_size is not OMIT:
|
|
2193
|
+
_request["page_size"] = page_size
|
|
2194
|
+
if page_token is not OMIT:
|
|
2195
|
+
_request["page_token"] = page_token
|
|
2196
|
+
if filter is not OMIT:
|
|
2197
|
+
_request["filter"] = filter
|
|
2198
|
+
if order_by is not OMIT:
|
|
2199
|
+
_request["order_by"] = order_by
|
|
2200
|
+
if collection is not OMIT:
|
|
2201
|
+
_request["collection"] = collection
|
|
2202
|
+
if include_total is not OMIT:
|
|
2203
|
+
_request["include_total"] = include_total
|
|
2204
|
+
if offset is not OMIT:
|
|
2205
|
+
_request["offset"] = offset
|
|
2206
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2207
|
+
"POST",
|
|
2208
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:search"),
|
|
2209
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
2210
|
+
json=jsonable_encoder(_request),
|
|
2211
|
+
headers=self._client_wrapper.get_headers(),
|
|
2212
|
+
timeout=60,
|
|
2213
|
+
)
|
|
2214
|
+
if 200 <= _response.status_code < 300:
|
|
2215
|
+
return pydantic.parse_obj_as(PaginatedResponseAgentData, _response.json()) # type: ignore
|
|
2216
|
+
if _response.status_code == 422:
|
|
2217
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
2218
|
+
try:
|
|
2219
|
+
_response_json = _response.json()
|
|
2220
|
+
except JSONDecodeError:
|
|
2221
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2222
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2223
|
+
|
|
2224
|
+
async def aggregate_agent_data_api_v_1_beta_agent_data_aggregate_post(
|
|
2225
|
+
self,
|
|
2226
|
+
*,
|
|
2227
|
+
project_id: typing.Optional[str] = None,
|
|
2228
|
+
organization_id: typing.Optional[str] = None,
|
|
2229
|
+
page_size: typing.Optional[int] = OMIT,
|
|
2230
|
+
page_token: typing.Optional[str] = OMIT,
|
|
2231
|
+
filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
|
|
2232
|
+
order_by: typing.Optional[str] = OMIT,
|
|
2233
|
+
deployment_name: str,
|
|
2234
|
+
collection: typing.Optional[str] = OMIT,
|
|
2235
|
+
group_by: typing.Optional[typing.List[str]] = OMIT,
|
|
2236
|
+
count: typing.Optional[bool] = OMIT,
|
|
2237
|
+
first: typing.Optional[bool] = OMIT,
|
|
2238
|
+
offset: typing.Optional[int] = OMIT,
|
|
2239
|
+
) -> PaginatedResponseAggregateGroup:
|
|
2240
|
+
"""
|
|
2241
|
+
Aggregate agent data with grouping and optional counting/first item retrieval.
|
|
2242
|
+
|
|
2243
|
+
Parameters:
|
|
2244
|
+
- project_id: typing.Optional[str].
|
|
2245
|
+
|
|
2246
|
+
- organization_id: typing.Optional[str].
|
|
2247
|
+
|
|
2248
|
+
- page_size: typing.Optional[int].
|
|
2249
|
+
|
|
2250
|
+
- page_token: typing.Optional[str].
|
|
2251
|
+
|
|
2252
|
+
- filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]].
|
|
2253
|
+
|
|
2254
|
+
- order_by: typing.Optional[str].
|
|
2255
|
+
|
|
2256
|
+
- deployment_name: str. The agent deployment's name to aggregate data for
|
|
2257
|
+
|
|
2258
|
+
- collection: typing.Optional[str]. The logical agent data collection to aggregate data for
|
|
2259
|
+
|
|
2260
|
+
- group_by: typing.Optional[typing.List[str]].
|
|
2261
|
+
|
|
2262
|
+
- count: typing.Optional[bool].
|
|
2263
|
+
|
|
2264
|
+
- first: typing.Optional[bool].
|
|
2265
|
+
|
|
2266
|
+
- offset: typing.Optional[int].
|
|
2267
|
+
---
|
|
2268
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
2269
|
+
|
|
2270
|
+
client = AsyncLlamaCloud(
|
|
2271
|
+
token="YOUR_TOKEN",
|
|
2272
|
+
)
|
|
2273
|
+
await client.beta.aggregate_agent_data_api_v_1_beta_agent_data_aggregate_post(
|
|
2274
|
+
deployment_name="string",
|
|
2275
|
+
)
|
|
2276
|
+
"""
|
|
2277
|
+
_request: typing.Dict[str, typing.Any] = {"deployment_name": deployment_name}
|
|
2278
|
+
if page_size is not OMIT:
|
|
2279
|
+
_request["page_size"] = page_size
|
|
2280
|
+
if page_token is not OMIT:
|
|
2281
|
+
_request["page_token"] = page_token
|
|
2282
|
+
if filter is not OMIT:
|
|
2283
|
+
_request["filter"] = filter
|
|
2284
|
+
if order_by is not OMIT:
|
|
2285
|
+
_request["order_by"] = order_by
|
|
2286
|
+
if collection is not OMIT:
|
|
2287
|
+
_request["collection"] = collection
|
|
2288
|
+
if group_by is not OMIT:
|
|
2289
|
+
_request["group_by"] = group_by
|
|
2290
|
+
if count is not OMIT:
|
|
2291
|
+
_request["count"] = count
|
|
2292
|
+
if first is not OMIT:
|
|
2293
|
+
_request["first"] = first
|
|
2294
|
+
if offset is not OMIT:
|
|
2295
|
+
_request["offset"] = offset
|
|
2296
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2297
|
+
"POST",
|
|
2298
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:aggregate"),
|
|
2299
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
2300
|
+
json=jsonable_encoder(_request),
|
|
2301
|
+
headers=self._client_wrapper.get_headers(),
|
|
2302
|
+
timeout=60,
|
|
2303
|
+
)
|
|
2304
|
+
if 200 <= _response.status_code < 300:
|
|
2305
|
+
return pydantic.parse_obj_as(PaginatedResponseAggregateGroup, _response.json()) # type: ignore
|
|
2306
|
+
if _response.status_code == 422:
|
|
2307
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
2308
|
+
try:
|
|
2309
|
+
_response_json = _response.json()
|
|
2310
|
+
except JSONDecodeError:
|
|
2311
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2312
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2313
|
+
|
|
2314
|
+
async def list_quota_configurations(
|
|
2315
|
+
self,
|
|
2316
|
+
*,
|
|
2317
|
+
source_type: typing_extensions.Literal["organization"],
|
|
2318
|
+
source_id: str,
|
|
2319
|
+
page: typing.Optional[int] = None,
|
|
2320
|
+
page_size: typing.Optional[int] = None,
|
|
2321
|
+
) -> PaginatedResponseQuotaConfiguration:
|
|
2322
|
+
"""
|
|
2323
|
+
Retrieve a paginated list of quota configurations with optional filtering.
|
|
2324
|
+
|
|
992
2325
|
Parameters:
|
|
993
|
-
-
|
|
2326
|
+
- source_type: typing_extensions.Literal["organization"].
|
|
994
2327
|
|
|
995
|
-
-
|
|
2328
|
+
- source_id: str.
|
|
2329
|
+
|
|
2330
|
+
- page: typing.Optional[int].
|
|
2331
|
+
|
|
2332
|
+
- page_size: typing.Optional[int].
|
|
996
2333
|
---
|
|
997
2334
|
from llama_cloud.client import AsyncLlamaCloud
|
|
998
2335
|
|
|
999
2336
|
client = AsyncLlamaCloud(
|
|
1000
2337
|
token="YOUR_TOKEN",
|
|
1001
2338
|
)
|
|
1002
|
-
await client.beta.
|
|
1003
|
-
|
|
2339
|
+
await client.beta.list_quota_configurations(
|
|
2340
|
+
source_type="organization",
|
|
2341
|
+
source_id="string",
|
|
1004
2342
|
)
|
|
1005
2343
|
"""
|
|
1006
2344
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1007
2345
|
"GET",
|
|
1008
|
-
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/",
|
|
1009
|
-
params=remove_none_from_dict(
|
|
2346
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/quota-management"),
|
|
2347
|
+
params=remove_none_from_dict(
|
|
2348
|
+
{"source_type": source_type, "source_id": source_id, "page": page, "page_size": page_size}
|
|
2349
|
+
),
|
|
1010
2350
|
headers=self._client_wrapper.get_headers(),
|
|
1011
2351
|
timeout=60,
|
|
1012
2352
|
)
|
|
1013
2353
|
if 200 <= _response.status_code < 300:
|
|
1014
|
-
return pydantic.parse_obj_as(
|
|
2354
|
+
return pydantic.parse_obj_as(PaginatedResponseQuotaConfiguration, _response.json()) # type: ignore
|
|
1015
2355
|
if _response.status_code == 422:
|
|
1016
2356
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1017
2357
|
try:
|
|
@@ -1020,37 +2360,53 @@ class AsyncBetaClient:
|
|
|
1020
2360
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1021
2361
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1022
2362
|
|
|
1023
|
-
async def
|
|
1024
|
-
self,
|
|
1025
|
-
|
|
2363
|
+
async def create_file(
|
|
2364
|
+
self,
|
|
2365
|
+
*,
|
|
2366
|
+
project_id: typing.Optional[str] = None,
|
|
2367
|
+
organization_id: typing.Optional[str] = None,
|
|
2368
|
+
request: FileCreate,
|
|
2369
|
+
) -> File:
|
|
1026
2370
|
"""
|
|
1027
|
-
|
|
2371
|
+
Create a new file in the project.
|
|
1028
2372
|
|
|
1029
|
-
|
|
1030
|
-
|
|
2373
|
+
Args:
|
|
2374
|
+
file_create: File creation data
|
|
2375
|
+
project: Validated project from dependency
|
|
2376
|
+
db: Database session
|
|
2377
|
+
|
|
2378
|
+
Returns:
|
|
2379
|
+
The created file
|
|
1031
2380
|
|
|
2381
|
+
Parameters:
|
|
1032
2382
|
- project_id: typing.Optional[str].
|
|
1033
2383
|
|
|
1034
2384
|
- organization_id: typing.Optional[str].
|
|
2385
|
+
|
|
2386
|
+
- request: FileCreate.
|
|
1035
2387
|
---
|
|
2388
|
+
from llama_cloud import FileCreate
|
|
1036
2389
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1037
2390
|
|
|
1038
2391
|
client = AsyncLlamaCloud(
|
|
1039
2392
|
token="YOUR_TOKEN",
|
|
1040
2393
|
)
|
|
1041
|
-
await client.beta.
|
|
1042
|
-
|
|
2394
|
+
await client.beta.create_file(
|
|
2395
|
+
request=FileCreate(
|
|
2396
|
+
name="string",
|
|
2397
|
+
),
|
|
1043
2398
|
)
|
|
1044
2399
|
"""
|
|
1045
2400
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1046
|
-
"
|
|
1047
|
-
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/",
|
|
2401
|
+
"POST",
|
|
2402
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
|
|
1048
2403
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
2404
|
+
json=jsonable_encoder(request),
|
|
1049
2405
|
headers=self._client_wrapper.get_headers(),
|
|
1050
2406
|
timeout=60,
|
|
1051
2407
|
)
|
|
1052
2408
|
if 200 <= _response.status_code < 300:
|
|
1053
|
-
return pydantic.parse_obj_as(
|
|
2409
|
+
return pydantic.parse_obj_as(File, _response.json()) # type: ignore
|
|
1054
2410
|
if _response.status_code == 422:
|
|
1055
2411
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1056
2412
|
try:
|
|
@@ -1059,46 +2415,53 @@ class AsyncBetaClient:
|
|
|
1059
2415
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1060
2416
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1061
2417
|
|
|
1062
|
-
async def
|
|
2418
|
+
async def upsert_file(
|
|
1063
2419
|
self,
|
|
1064
|
-
item_id: str,
|
|
1065
2420
|
*,
|
|
1066
2421
|
project_id: typing.Optional[str] = None,
|
|
1067
2422
|
organization_id: typing.Optional[str] = None,
|
|
1068
|
-
|
|
1069
|
-
) ->
|
|
2423
|
+
request: FileCreate,
|
|
2424
|
+
) -> File:
|
|
1070
2425
|
"""
|
|
1071
|
-
|
|
2426
|
+
Upsert a file (create or update if exists) in the project.
|
|
1072
2427
|
|
|
1073
|
-
|
|
1074
|
-
|
|
2428
|
+
Args:
|
|
2429
|
+
file_create: File creation/update data
|
|
2430
|
+
project: Validated project from dependency
|
|
2431
|
+
db: Database session
|
|
2432
|
+
|
|
2433
|
+
Returns:
|
|
2434
|
+
The upserted file
|
|
1075
2435
|
|
|
2436
|
+
Parameters:
|
|
1076
2437
|
- project_id: typing.Optional[str].
|
|
1077
2438
|
|
|
1078
2439
|
- organization_id: typing.Optional[str].
|
|
1079
2440
|
|
|
1080
|
-
-
|
|
2441
|
+
- request: FileCreate.
|
|
1081
2442
|
---
|
|
2443
|
+
from llama_cloud import FileCreate
|
|
1082
2444
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1083
2445
|
|
|
1084
2446
|
client = AsyncLlamaCloud(
|
|
1085
2447
|
token="YOUR_TOKEN",
|
|
1086
2448
|
)
|
|
1087
|
-
await client.beta.
|
|
1088
|
-
|
|
1089
|
-
|
|
2449
|
+
await client.beta.upsert_file(
|
|
2450
|
+
request=FileCreate(
|
|
2451
|
+
name="string",
|
|
2452
|
+
),
|
|
1090
2453
|
)
|
|
1091
2454
|
"""
|
|
1092
2455
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1093
2456
|
"PUT",
|
|
1094
|
-
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/",
|
|
2457
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
|
|
1095
2458
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1096
|
-
json=jsonable_encoder(
|
|
2459
|
+
json=jsonable_encoder(request),
|
|
1097
2460
|
headers=self._client_wrapper.get_headers(),
|
|
1098
2461
|
timeout=60,
|
|
1099
2462
|
)
|
|
1100
2463
|
if 200 <= _response.status_code < 300:
|
|
1101
|
-
return pydantic.parse_obj_as(
|
|
2464
|
+
return pydantic.parse_obj_as(File, _response.json()) # type: ignore
|
|
1102
2465
|
if _response.status_code == 422:
|
|
1103
2466
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1104
2467
|
try:
|
|
@@ -1107,14 +2470,93 @@ class AsyncBetaClient:
|
|
|
1107
2470
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1108
2471
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1109
2472
|
|
|
1110
|
-
async def
|
|
1111
|
-
self,
|
|
1112
|
-
|
|
2473
|
+
async def query_files(
|
|
2474
|
+
self,
|
|
2475
|
+
*,
|
|
2476
|
+
project_id: typing.Optional[str] = None,
|
|
2477
|
+
organization_id: typing.Optional[str] = None,
|
|
2478
|
+
page_size: typing.Optional[int] = OMIT,
|
|
2479
|
+
page_token: typing.Optional[str] = OMIT,
|
|
2480
|
+
filter: typing.Optional[FileFilter] = OMIT,
|
|
2481
|
+
order_by: typing.Optional[str] = OMIT,
|
|
2482
|
+
) -> FileQueryResponse:
|
|
1113
2483
|
"""
|
|
1114
|
-
|
|
2484
|
+
Query files with flexible filtering and pagination.
|
|
2485
|
+
|
|
2486
|
+
Args:
|
|
2487
|
+
request: The query request with filters and pagination
|
|
2488
|
+
project: Validated project from dependency
|
|
2489
|
+
db: Database session
|
|
2490
|
+
|
|
2491
|
+
Returns:
|
|
2492
|
+
Paginated response with files
|
|
1115
2493
|
|
|
1116
2494
|
Parameters:
|
|
1117
|
-
-
|
|
2495
|
+
- project_id: typing.Optional[str].
|
|
2496
|
+
|
|
2497
|
+
- organization_id: typing.Optional[str].
|
|
2498
|
+
|
|
2499
|
+
- page_size: typing.Optional[int].
|
|
2500
|
+
|
|
2501
|
+
- page_token: typing.Optional[str].
|
|
2502
|
+
|
|
2503
|
+
- filter: typing.Optional[FileFilter].
|
|
2504
|
+
|
|
2505
|
+
- order_by: typing.Optional[str].
|
|
2506
|
+
---
|
|
2507
|
+
from llama_cloud import FileFilter
|
|
2508
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
2509
|
+
|
|
2510
|
+
client = AsyncLlamaCloud(
|
|
2511
|
+
token="YOUR_TOKEN",
|
|
2512
|
+
)
|
|
2513
|
+
await client.beta.query_files(
|
|
2514
|
+
filter=FileFilter(),
|
|
2515
|
+
)
|
|
2516
|
+
"""
|
|
2517
|
+
_request: typing.Dict[str, typing.Any] = {}
|
|
2518
|
+
if page_size is not OMIT:
|
|
2519
|
+
_request["page_size"] = page_size
|
|
2520
|
+
if page_token is not OMIT:
|
|
2521
|
+
_request["page_token"] = page_token
|
|
2522
|
+
if filter is not OMIT:
|
|
2523
|
+
_request["filter"] = filter
|
|
2524
|
+
if order_by is not OMIT:
|
|
2525
|
+
_request["order_by"] = order_by
|
|
2526
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2527
|
+
"POST",
|
|
2528
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files/query"),
|
|
2529
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
2530
|
+
json=jsonable_encoder(_request),
|
|
2531
|
+
headers=self._client_wrapper.get_headers(),
|
|
2532
|
+
timeout=60,
|
|
2533
|
+
)
|
|
2534
|
+
if 200 <= _response.status_code < 300:
|
|
2535
|
+
return pydantic.parse_obj_as(FileQueryResponse, _response.json()) # type: ignore
|
|
2536
|
+
if _response.status_code == 422:
|
|
2537
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
2538
|
+
try:
|
|
2539
|
+
_response_json = _response.json()
|
|
2540
|
+
except JSONDecodeError:
|
|
2541
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2542
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2543
|
+
|
|
2544
|
+
async def delete_file(
|
|
2545
|
+
self, file_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
|
|
2546
|
+
) -> None:
|
|
2547
|
+
"""
|
|
2548
|
+
Delete a single file from the project.
|
|
2549
|
+
|
|
2550
|
+
Args:
|
|
2551
|
+
file_id: The ID of the file to delete
|
|
2552
|
+
project: Validated project from dependency
|
|
2553
|
+
db: Database session
|
|
2554
|
+
|
|
2555
|
+
Returns:
|
|
2556
|
+
None (204 No Content on success)
|
|
2557
|
+
|
|
2558
|
+
Parameters:
|
|
2559
|
+
- file_id: str.
|
|
1118
2560
|
|
|
1119
2561
|
- project_id: typing.Optional[str].
|
|
1120
2562
|
|
|
@@ -1125,19 +2567,19 @@ class AsyncBetaClient:
|
|
|
1125
2567
|
client = AsyncLlamaCloud(
|
|
1126
2568
|
token="YOUR_TOKEN",
|
|
1127
2569
|
)
|
|
1128
|
-
await client.beta.
|
|
1129
|
-
|
|
2570
|
+
await client.beta.delete_file(
|
|
2571
|
+
file_id="string",
|
|
1130
2572
|
)
|
|
1131
2573
|
"""
|
|
1132
2574
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1133
2575
|
"DELETE",
|
|
1134
|
-
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/
|
|
2576
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/files/{file_id}"),
|
|
1135
2577
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1136
2578
|
headers=self._client_wrapper.get_headers(),
|
|
1137
2579
|
timeout=60,
|
|
1138
2580
|
)
|
|
1139
2581
|
if 200 <= _response.status_code < 300:
|
|
1140
|
-
return
|
|
2582
|
+
return
|
|
1141
2583
|
if _response.status_code == 422:
|
|
1142
2584
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1143
2585
|
try:
|
|
@@ -1146,52 +2588,74 @@ class AsyncBetaClient:
|
|
|
1146
2588
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1147
2589
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1148
2590
|
|
|
1149
|
-
async def
|
|
2591
|
+
async def list_parse_configurations(
|
|
1150
2592
|
self,
|
|
1151
2593
|
*,
|
|
2594
|
+
page_size: typing.Optional[int] = None,
|
|
2595
|
+
page_token: typing.Optional[str] = None,
|
|
2596
|
+
name: typing.Optional[str] = None,
|
|
2597
|
+
creator: typing.Optional[str] = None,
|
|
2598
|
+
version: typing.Optional[str] = None,
|
|
1152
2599
|
project_id: typing.Optional[str] = None,
|
|
1153
2600
|
organization_id: typing.Optional[str] = None,
|
|
1154
|
-
|
|
1155
|
-
collection: typing.Optional[str] = OMIT,
|
|
1156
|
-
data: typing.Dict[str, typing.Any],
|
|
1157
|
-
) -> AgentData:
|
|
2601
|
+
) -> ParseConfigurationQueryResponse:
|
|
1158
2602
|
"""
|
|
1159
|
-
|
|
2603
|
+
List parse configurations for the current project.
|
|
2604
|
+
|
|
2605
|
+
Args:
|
|
2606
|
+
project: Validated project from dependency
|
|
2607
|
+
user: Current user
|
|
2608
|
+
db: Database session
|
|
2609
|
+
page_size: Number of items per page
|
|
2610
|
+
page_token: Token for pagination
|
|
2611
|
+
name: Filter by configuration name
|
|
2612
|
+
creator: Filter by creator
|
|
2613
|
+
version: Filter by version
|
|
2614
|
+
|
|
2615
|
+
Returns:
|
|
2616
|
+
Paginated response with parse configurations
|
|
1160
2617
|
|
|
1161
2618
|
Parameters:
|
|
1162
|
-
-
|
|
2619
|
+
- page_size: typing.Optional[int].
|
|
1163
2620
|
|
|
1164
|
-
-
|
|
2621
|
+
- page_token: typing.Optional[str].
|
|
1165
2622
|
|
|
1166
|
-
-
|
|
2623
|
+
- name: typing.Optional[str].
|
|
1167
2624
|
|
|
1168
|
-
-
|
|
2625
|
+
- creator: typing.Optional[str].
|
|
1169
2626
|
|
|
1170
|
-
-
|
|
2627
|
+
- version: typing.Optional[str].
|
|
2628
|
+
|
|
2629
|
+
- project_id: typing.Optional[str].
|
|
2630
|
+
|
|
2631
|
+
- organization_id: typing.Optional[str].
|
|
1171
2632
|
---
|
|
1172
2633
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1173
2634
|
|
|
1174
2635
|
client = AsyncLlamaCloud(
|
|
1175
2636
|
token="YOUR_TOKEN",
|
|
1176
2637
|
)
|
|
1177
|
-
await client.beta.
|
|
1178
|
-
agent_slug="string",
|
|
1179
|
-
data={"string": {}},
|
|
1180
|
-
)
|
|
2638
|
+
await client.beta.list_parse_configurations()
|
|
1181
2639
|
"""
|
|
1182
|
-
_request: typing.Dict[str, typing.Any] = {"agent_slug": agent_slug, "data": data}
|
|
1183
|
-
if collection is not OMIT:
|
|
1184
|
-
_request["collection"] = collection
|
|
1185
2640
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1186
|
-
"
|
|
1187
|
-
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/
|
|
1188
|
-
params=remove_none_from_dict(
|
|
1189
|
-
|
|
2641
|
+
"GET",
|
|
2642
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations"),
|
|
2643
|
+
params=remove_none_from_dict(
|
|
2644
|
+
{
|
|
2645
|
+
"page_size": page_size,
|
|
2646
|
+
"page_token": page_token,
|
|
2647
|
+
"name": name,
|
|
2648
|
+
"creator": creator,
|
|
2649
|
+
"version": version,
|
|
2650
|
+
"project_id": project_id,
|
|
2651
|
+
"organization_id": organization_id,
|
|
2652
|
+
}
|
|
2653
|
+
),
|
|
1190
2654
|
headers=self._client_wrapper.get_headers(),
|
|
1191
2655
|
timeout=60,
|
|
1192
2656
|
)
|
|
1193
2657
|
if 200 <= _response.status_code < 300:
|
|
1194
|
-
return pydantic.parse_obj_as(
|
|
2658
|
+
return pydantic.parse_obj_as(ParseConfigurationQueryResponse, _response.json()) # type: ignore
|
|
1195
2659
|
if _response.status_code == 422:
|
|
1196
2660
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1197
2661
|
try:
|
|
@@ -1200,78 +2664,66 @@ class AsyncBetaClient:
|
|
|
1200
2664
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1201
2665
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1202
2666
|
|
|
1203
|
-
async def
|
|
2667
|
+
async def create_parse_configuration(
|
|
1204
2668
|
self,
|
|
1205
2669
|
*,
|
|
1206
2670
|
project_id: typing.Optional[str] = None,
|
|
1207
2671
|
organization_id: typing.Optional[str] = None,
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
|
|
1211
|
-
order_by: typing.Optional[str] = OMIT,
|
|
1212
|
-
agent_slug: str,
|
|
1213
|
-
collection: typing.Optional[str] = OMIT,
|
|
1214
|
-
include_total: typing.Optional[bool] = OMIT,
|
|
1215
|
-
offset: typing.Optional[int] = OMIT,
|
|
1216
|
-
) -> PaginatedResponseAgentData:
|
|
2672
|
+
request: ParseConfigurationCreate,
|
|
2673
|
+
) -> ParseConfiguration:
|
|
1217
2674
|
"""
|
|
1218
|
-
|
|
2675
|
+
Create a new parse configuration.
|
|
2676
|
+
|
|
2677
|
+
Args:
|
|
2678
|
+
config_create: Parse configuration creation data
|
|
2679
|
+
project: Validated project from dependency
|
|
2680
|
+
user: Current user
|
|
2681
|
+
db: Database session
|
|
2682
|
+
|
|
2683
|
+
Returns:
|
|
2684
|
+
The created parse configuration
|
|
1219
2685
|
|
|
1220
2686
|
Parameters:
|
|
1221
2687
|
- project_id: typing.Optional[str].
|
|
1222
2688
|
|
|
1223
2689
|
- organization_id: typing.Optional[str].
|
|
1224
2690
|
|
|
1225
|
-
-
|
|
1226
|
-
|
|
1227
|
-
- page_token: typing.Optional[str].
|
|
1228
|
-
|
|
1229
|
-
- filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]].
|
|
1230
|
-
|
|
1231
|
-
- order_by: typing.Optional[str].
|
|
1232
|
-
|
|
1233
|
-
- agent_slug: str. The agent deployment's agent_slug to search within
|
|
1234
|
-
|
|
1235
|
-
- collection: typing.Optional[str]. The logical agent data collection to search within
|
|
1236
|
-
|
|
1237
|
-
- include_total: typing.Optional[bool]. Whether to include the total number of items in the response
|
|
1238
|
-
|
|
1239
|
-
- offset: typing.Optional[int].
|
|
2691
|
+
- request: ParseConfigurationCreate.
|
|
1240
2692
|
---
|
|
2693
|
+
from llama_cloud import (
|
|
2694
|
+
FailPageMode,
|
|
2695
|
+
LlamaParseParameters,
|
|
2696
|
+
LlamaParseParametersPriority,
|
|
2697
|
+
ParseConfigurationCreate,
|
|
2698
|
+
ParsingMode,
|
|
2699
|
+
)
|
|
1241
2700
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1242
2701
|
|
|
1243
2702
|
client = AsyncLlamaCloud(
|
|
1244
2703
|
token="YOUR_TOKEN",
|
|
1245
2704
|
)
|
|
1246
|
-
await client.beta.
|
|
1247
|
-
|
|
2705
|
+
await client.beta.create_parse_configuration(
|
|
2706
|
+
request=ParseConfigurationCreate(
|
|
2707
|
+
name="string",
|
|
2708
|
+
version="string",
|
|
2709
|
+
parameters=LlamaParseParameters(
|
|
2710
|
+
priority=LlamaParseParametersPriority.LOW,
|
|
2711
|
+
parse_mode=ParsingMode.PARSE_PAGE_WITHOUT_LLM,
|
|
2712
|
+
replace_failed_page_mode=FailPageMode.RAW_TEXT,
|
|
2713
|
+
),
|
|
2714
|
+
),
|
|
1248
2715
|
)
|
|
1249
2716
|
"""
|
|
1250
|
-
_request: typing.Dict[str, typing.Any] = {"agent_slug": agent_slug}
|
|
1251
|
-
if page_size is not OMIT:
|
|
1252
|
-
_request["page_size"] = page_size
|
|
1253
|
-
if page_token is not OMIT:
|
|
1254
|
-
_request["page_token"] = page_token
|
|
1255
|
-
if filter is not OMIT:
|
|
1256
|
-
_request["filter"] = filter
|
|
1257
|
-
if order_by is not OMIT:
|
|
1258
|
-
_request["order_by"] = order_by
|
|
1259
|
-
if collection is not OMIT:
|
|
1260
|
-
_request["collection"] = collection
|
|
1261
|
-
if include_total is not OMIT:
|
|
1262
|
-
_request["include_total"] = include_total
|
|
1263
|
-
if offset is not OMIT:
|
|
1264
|
-
_request["offset"] = offset
|
|
1265
2717
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1266
2718
|
"POST",
|
|
1267
|
-
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/
|
|
2719
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations"),
|
|
1268
2720
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1269
|
-
json=jsonable_encoder(
|
|
2721
|
+
json=jsonable_encoder(request),
|
|
1270
2722
|
headers=self._client_wrapper.get_headers(),
|
|
1271
2723
|
timeout=60,
|
|
1272
2724
|
)
|
|
1273
2725
|
if 200 <= _response.status_code < 300:
|
|
1274
|
-
return pydantic.parse_obj_as(
|
|
2726
|
+
return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
|
|
1275
2727
|
if _response.status_code == 422:
|
|
1276
2728
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1277
2729
|
try:
|
|
@@ -1280,88 +2732,66 @@ class AsyncBetaClient:
|
|
|
1280
2732
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1281
2733
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1282
2734
|
|
|
1283
|
-
async def
|
|
2735
|
+
async def upsert_parse_configuration(
|
|
1284
2736
|
self,
|
|
1285
2737
|
*,
|
|
1286
2738
|
project_id: typing.Optional[str] = None,
|
|
1287
2739
|
organization_id: typing.Optional[str] = None,
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]] = OMIT,
|
|
1291
|
-
order_by: typing.Optional[str] = OMIT,
|
|
1292
|
-
agent_slug: str,
|
|
1293
|
-
collection: typing.Optional[str] = OMIT,
|
|
1294
|
-
group_by: typing.Optional[typing.List[str]] = OMIT,
|
|
1295
|
-
count: typing.Optional[bool] = OMIT,
|
|
1296
|
-
first: typing.Optional[bool] = OMIT,
|
|
1297
|
-
offset: typing.Optional[int] = OMIT,
|
|
1298
|
-
) -> PaginatedResponseAggregateGroup:
|
|
2740
|
+
request: ParseConfigurationCreate,
|
|
2741
|
+
) -> ParseConfiguration:
|
|
1299
2742
|
"""
|
|
1300
|
-
|
|
2743
|
+
Create or update a parse configuration by name.
|
|
2744
|
+
|
|
2745
|
+
Args:
|
|
2746
|
+
config_create: Parse configuration creation data
|
|
2747
|
+
project: Validated project from dependency
|
|
2748
|
+
user: Current user
|
|
2749
|
+
db: Database session
|
|
2750
|
+
|
|
2751
|
+
Returns:
|
|
2752
|
+
The created or updated parse configuration
|
|
1301
2753
|
|
|
1302
2754
|
Parameters:
|
|
1303
2755
|
- project_id: typing.Optional[str].
|
|
1304
2756
|
|
|
1305
2757
|
- organization_id: typing.Optional[str].
|
|
1306
2758
|
|
|
1307
|
-
-
|
|
1308
|
-
|
|
1309
|
-
- page_token: typing.Optional[str].
|
|
1310
|
-
|
|
1311
|
-
- filter: typing.Optional[typing.Dict[str, typing.Optional[FilterOperation]]].
|
|
1312
|
-
|
|
1313
|
-
- order_by: typing.Optional[str].
|
|
1314
|
-
|
|
1315
|
-
- agent_slug: str. The agent deployment's agent_slug to aggregate data for
|
|
1316
|
-
|
|
1317
|
-
- collection: typing.Optional[str]. The logical agent data collection to aggregate data for
|
|
1318
|
-
|
|
1319
|
-
- group_by: typing.Optional[typing.List[str]].
|
|
1320
|
-
|
|
1321
|
-
- count: typing.Optional[bool].
|
|
1322
|
-
|
|
1323
|
-
- first: typing.Optional[bool].
|
|
1324
|
-
|
|
1325
|
-
- offset: typing.Optional[int].
|
|
2759
|
+
- request: ParseConfigurationCreate.
|
|
1326
2760
|
---
|
|
2761
|
+
from llama_cloud import (
|
|
2762
|
+
FailPageMode,
|
|
2763
|
+
LlamaParseParameters,
|
|
2764
|
+
LlamaParseParametersPriority,
|
|
2765
|
+
ParseConfigurationCreate,
|
|
2766
|
+
ParsingMode,
|
|
2767
|
+
)
|
|
1327
2768
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1328
2769
|
|
|
1329
2770
|
client = AsyncLlamaCloud(
|
|
1330
2771
|
token="YOUR_TOKEN",
|
|
1331
2772
|
)
|
|
1332
|
-
await client.beta.
|
|
1333
|
-
|
|
2773
|
+
await client.beta.upsert_parse_configuration(
|
|
2774
|
+
request=ParseConfigurationCreate(
|
|
2775
|
+
name="string",
|
|
2776
|
+
version="string",
|
|
2777
|
+
parameters=LlamaParseParameters(
|
|
2778
|
+
priority=LlamaParseParametersPriority.LOW,
|
|
2779
|
+
parse_mode=ParsingMode.PARSE_PAGE_WITHOUT_LLM,
|
|
2780
|
+
replace_failed_page_mode=FailPageMode.RAW_TEXT,
|
|
2781
|
+
),
|
|
2782
|
+
),
|
|
1334
2783
|
)
|
|
1335
2784
|
"""
|
|
1336
|
-
_request: typing.Dict[str, typing.Any] = {"agent_slug": agent_slug}
|
|
1337
|
-
if page_size is not OMIT:
|
|
1338
|
-
_request["page_size"] = page_size
|
|
1339
|
-
if page_token is not OMIT:
|
|
1340
|
-
_request["page_token"] = page_token
|
|
1341
|
-
if filter is not OMIT:
|
|
1342
|
-
_request["filter"] = filter
|
|
1343
|
-
if order_by is not OMIT:
|
|
1344
|
-
_request["order_by"] = order_by
|
|
1345
|
-
if collection is not OMIT:
|
|
1346
|
-
_request["collection"] = collection
|
|
1347
|
-
if group_by is not OMIT:
|
|
1348
|
-
_request["group_by"] = group_by
|
|
1349
|
-
if count is not OMIT:
|
|
1350
|
-
_request["count"] = count
|
|
1351
|
-
if first is not OMIT:
|
|
1352
|
-
_request["first"] = first
|
|
1353
|
-
if offset is not OMIT:
|
|
1354
|
-
_request["offset"] = offset
|
|
1355
2785
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1356
|
-
"
|
|
1357
|
-
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/
|
|
2786
|
+
"PUT",
|
|
2787
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations"),
|
|
1358
2788
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1359
|
-
json=jsonable_encoder(
|
|
2789
|
+
json=jsonable_encoder(request),
|
|
1360
2790
|
headers=self._client_wrapper.get_headers(),
|
|
1361
2791
|
timeout=60,
|
|
1362
2792
|
)
|
|
1363
2793
|
if 200 <= _response.status_code < 300:
|
|
1364
|
-
return pydantic.parse_obj_as(
|
|
2794
|
+
return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
|
|
1365
2795
|
if _response.status_code == 422:
|
|
1366
2796
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1367
2797
|
try:
|
|
@@ -1370,47 +2800,48 @@ class AsyncBetaClient:
|
|
|
1370
2800
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1371
2801
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1372
2802
|
|
|
1373
|
-
async def
|
|
1374
|
-
self,
|
|
1375
|
-
|
|
1376
|
-
source_type: typing_extensions.Literal["organization"],
|
|
1377
|
-
source_id: str,
|
|
1378
|
-
page: typing.Optional[int] = None,
|
|
1379
|
-
page_size: typing.Optional[int] = None,
|
|
1380
|
-
) -> PaginatedResponseQuotaConfiguration:
|
|
2803
|
+
async def get_parse_configuration(
|
|
2804
|
+
self, config_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
|
|
2805
|
+
) -> ParseConfiguration:
|
|
1381
2806
|
"""
|
|
1382
|
-
|
|
2807
|
+
Get a parse configuration by ID.
|
|
1383
2808
|
|
|
1384
|
-
|
|
1385
|
-
|
|
2809
|
+
Args:
|
|
2810
|
+
config_id: The ID of the parse configuration
|
|
2811
|
+
project: Validated project from dependency
|
|
2812
|
+
user: Current user
|
|
2813
|
+
db: Database session
|
|
1386
2814
|
|
|
1387
|
-
|
|
2815
|
+
Returns:
|
|
2816
|
+
The parse configuration
|
|
1388
2817
|
|
|
1389
|
-
|
|
2818
|
+
Parameters:
|
|
2819
|
+
- config_id: str.
|
|
1390
2820
|
|
|
1391
|
-
-
|
|
2821
|
+
- project_id: typing.Optional[str].
|
|
2822
|
+
|
|
2823
|
+
- organization_id: typing.Optional[str].
|
|
1392
2824
|
---
|
|
1393
2825
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1394
2826
|
|
|
1395
2827
|
client = AsyncLlamaCloud(
|
|
1396
2828
|
token="YOUR_TOKEN",
|
|
1397
2829
|
)
|
|
1398
|
-
await client.beta.
|
|
1399
|
-
|
|
1400
|
-
source_id="string",
|
|
2830
|
+
await client.beta.get_parse_configuration(
|
|
2831
|
+
config_id="string",
|
|
1401
2832
|
)
|
|
1402
2833
|
"""
|
|
1403
2834
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1404
2835
|
"GET",
|
|
1405
|
-
urllib.parse.urljoin(
|
|
1406
|
-
|
|
1407
|
-
{"source_type": source_type, "source_id": source_id, "page": page, "page_size": page_size}
|
|
2836
|
+
urllib.parse.urljoin(
|
|
2837
|
+
f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/parse-configurations/{config_id}"
|
|
1408
2838
|
),
|
|
2839
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1409
2840
|
headers=self._client_wrapper.get_headers(),
|
|
1410
2841
|
timeout=60,
|
|
1411
2842
|
)
|
|
1412
2843
|
if 200 <= _response.status_code < 300:
|
|
1413
|
-
return pydantic.parse_obj_as(
|
|
2844
|
+
return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
|
|
1414
2845
|
if _response.status_code == 422:
|
|
1415
2846
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1416
2847
|
try:
|
|
@@ -1419,53 +2850,71 @@ class AsyncBetaClient:
|
|
|
1419
2850
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1420
2851
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1421
2852
|
|
|
1422
|
-
async def
|
|
2853
|
+
async def update_parse_configuration(
|
|
1423
2854
|
self,
|
|
2855
|
+
config_id: str,
|
|
1424
2856
|
*,
|
|
1425
2857
|
project_id: typing.Optional[str] = None,
|
|
1426
2858
|
organization_id: typing.Optional[str] = None,
|
|
1427
|
-
|
|
1428
|
-
) ->
|
|
2859
|
+
parameters: typing.Optional[LlamaParseParameters] = OMIT,
|
|
2860
|
+
) -> ParseConfiguration:
|
|
1429
2861
|
"""
|
|
1430
|
-
|
|
2862
|
+
Update a parse configuration.
|
|
1431
2863
|
|
|
1432
2864
|
Args:
|
|
1433
|
-
|
|
2865
|
+
config_id: The ID of the parse configuration to update
|
|
2866
|
+
config_update: Update data
|
|
1434
2867
|
project: Validated project from dependency
|
|
2868
|
+
user: Current user
|
|
1435
2869
|
db: Database session
|
|
1436
2870
|
|
|
1437
2871
|
Returns:
|
|
1438
|
-
The
|
|
2872
|
+
The updated parse configuration
|
|
1439
2873
|
|
|
1440
2874
|
Parameters:
|
|
2875
|
+
- config_id: str.
|
|
2876
|
+
|
|
1441
2877
|
- project_id: typing.Optional[str].
|
|
1442
2878
|
|
|
1443
2879
|
- organization_id: typing.Optional[str].
|
|
1444
2880
|
|
|
1445
|
-
-
|
|
2881
|
+
- parameters: typing.Optional[LlamaParseParameters].
|
|
1446
2882
|
---
|
|
1447
|
-
from llama_cloud import
|
|
2883
|
+
from llama_cloud import (
|
|
2884
|
+
FailPageMode,
|
|
2885
|
+
LlamaParseParameters,
|
|
2886
|
+
LlamaParseParametersPriority,
|
|
2887
|
+
ParsingMode,
|
|
2888
|
+
)
|
|
1448
2889
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1449
2890
|
|
|
1450
2891
|
client = AsyncLlamaCloud(
|
|
1451
2892
|
token="YOUR_TOKEN",
|
|
1452
2893
|
)
|
|
1453
|
-
await client.beta.
|
|
1454
|
-
|
|
1455
|
-
|
|
2894
|
+
await client.beta.update_parse_configuration(
|
|
2895
|
+
config_id="string",
|
|
2896
|
+
parameters=LlamaParseParameters(
|
|
2897
|
+
priority=LlamaParseParametersPriority.LOW,
|
|
2898
|
+
parse_mode=ParsingMode.PARSE_PAGE_WITHOUT_LLM,
|
|
2899
|
+
replace_failed_page_mode=FailPageMode.RAW_TEXT,
|
|
1456
2900
|
),
|
|
1457
2901
|
)
|
|
1458
2902
|
"""
|
|
2903
|
+
_request: typing.Dict[str, typing.Any] = {}
|
|
2904
|
+
if parameters is not OMIT:
|
|
2905
|
+
_request["parameters"] = parameters
|
|
1459
2906
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1460
|
-
"
|
|
1461
|
-
urllib.parse.urljoin(
|
|
2907
|
+
"PUT",
|
|
2908
|
+
urllib.parse.urljoin(
|
|
2909
|
+
f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/parse-configurations/{config_id}"
|
|
2910
|
+
),
|
|
1462
2911
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1463
|
-
json=jsonable_encoder(
|
|
2912
|
+
json=jsonable_encoder(_request),
|
|
1464
2913
|
headers=self._client_wrapper.get_headers(),
|
|
1465
2914
|
timeout=60,
|
|
1466
2915
|
)
|
|
1467
2916
|
if 200 <= _response.status_code < 300:
|
|
1468
|
-
return pydantic.parse_obj_as(
|
|
2917
|
+
return pydantic.parse_obj_as(ParseConfiguration, _response.json()) # type: ignore
|
|
1469
2918
|
if _response.status_code == 422:
|
|
1470
2919
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1471
2920
|
try:
|
|
@@ -1474,53 +2923,45 @@ class AsyncBetaClient:
|
|
|
1474
2923
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1475
2924
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1476
2925
|
|
|
1477
|
-
async def
|
|
1478
|
-
self,
|
|
1479
|
-
|
|
1480
|
-
project_id: typing.Optional[str] = None,
|
|
1481
|
-
organization_id: typing.Optional[str] = None,
|
|
1482
|
-
request: FileCreate,
|
|
1483
|
-
) -> File:
|
|
2926
|
+
async def delete_parse_configuration(
|
|
2927
|
+
self, config_id: str, *, project_id: typing.Optional[str] = None, organization_id: typing.Optional[str] = None
|
|
2928
|
+
) -> None:
|
|
1484
2929
|
"""
|
|
1485
|
-
|
|
2930
|
+
Delete a parse configuration.
|
|
1486
2931
|
|
|
1487
2932
|
Args:
|
|
1488
|
-
|
|
2933
|
+
config_id: The ID of the parse configuration to delete
|
|
1489
2934
|
project: Validated project from dependency
|
|
2935
|
+
user: Current user
|
|
1490
2936
|
db: Database session
|
|
1491
2937
|
|
|
1492
|
-
Returns:
|
|
1493
|
-
The upserted file
|
|
1494
|
-
|
|
1495
2938
|
Parameters:
|
|
2939
|
+
- config_id: str.
|
|
2940
|
+
|
|
1496
2941
|
- project_id: typing.Optional[str].
|
|
1497
2942
|
|
|
1498
2943
|
- organization_id: typing.Optional[str].
|
|
1499
|
-
|
|
1500
|
-
- request: FileCreate.
|
|
1501
2944
|
---
|
|
1502
|
-
from llama_cloud import FileCreate
|
|
1503
2945
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1504
2946
|
|
|
1505
2947
|
client = AsyncLlamaCloud(
|
|
1506
2948
|
token="YOUR_TOKEN",
|
|
1507
2949
|
)
|
|
1508
|
-
await client.beta.
|
|
1509
|
-
|
|
1510
|
-
name="string",
|
|
1511
|
-
),
|
|
2950
|
+
await client.beta.delete_parse_configuration(
|
|
2951
|
+
config_id="string",
|
|
1512
2952
|
)
|
|
1513
2953
|
"""
|
|
1514
2954
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1515
|
-
"
|
|
1516
|
-
urllib.parse.urljoin(
|
|
2955
|
+
"DELETE",
|
|
2956
|
+
urllib.parse.urljoin(
|
|
2957
|
+
f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/parse-configurations/{config_id}"
|
|
2958
|
+
),
|
|
1517
2959
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1518
|
-
json=jsonable_encoder(request),
|
|
1519
2960
|
headers=self._client_wrapper.get_headers(),
|
|
1520
2961
|
timeout=60,
|
|
1521
2962
|
)
|
|
1522
2963
|
if 200 <= _response.status_code < 300:
|
|
1523
|
-
return
|
|
2964
|
+
return
|
|
1524
2965
|
if _response.status_code == 422:
|
|
1525
2966
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1526
2967
|
try:
|
|
@@ -1529,26 +2970,27 @@ class AsyncBetaClient:
|
|
|
1529
2970
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1530
2971
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1531
2972
|
|
|
1532
|
-
async def
|
|
2973
|
+
async def query_parse_configurations(
|
|
1533
2974
|
self,
|
|
1534
2975
|
*,
|
|
1535
2976
|
project_id: typing.Optional[str] = None,
|
|
1536
2977
|
organization_id: typing.Optional[str] = None,
|
|
1537
2978
|
page_size: typing.Optional[int] = OMIT,
|
|
1538
2979
|
page_token: typing.Optional[str] = OMIT,
|
|
1539
|
-
filter: typing.Optional[
|
|
2980
|
+
filter: typing.Optional[ParseConfigurationFilter] = OMIT,
|
|
1540
2981
|
order_by: typing.Optional[str] = OMIT,
|
|
1541
|
-
) ->
|
|
2982
|
+
) -> ParseConfigurationQueryResponse:
|
|
1542
2983
|
"""
|
|
1543
|
-
Query
|
|
2984
|
+
Query parse configurations with filtering and pagination.
|
|
1544
2985
|
|
|
1545
2986
|
Args:
|
|
1546
|
-
|
|
2987
|
+
query_request: Query request with filters and pagination
|
|
1547
2988
|
project: Validated project from dependency
|
|
2989
|
+
user: Current user
|
|
1548
2990
|
db: Database session
|
|
1549
2991
|
|
|
1550
2992
|
Returns:
|
|
1551
|
-
Paginated response with
|
|
2993
|
+
Paginated response with parse configurations
|
|
1552
2994
|
|
|
1553
2995
|
Parameters:
|
|
1554
2996
|
- project_id: typing.Optional[str].
|
|
@@ -1559,18 +3001,18 @@ class AsyncBetaClient:
|
|
|
1559
3001
|
|
|
1560
3002
|
- page_token: typing.Optional[str].
|
|
1561
3003
|
|
|
1562
|
-
- filter: typing.Optional[
|
|
3004
|
+
- filter: typing.Optional[ParseConfigurationFilter].
|
|
1563
3005
|
|
|
1564
3006
|
- order_by: typing.Optional[str].
|
|
1565
3007
|
---
|
|
1566
|
-
from llama_cloud import
|
|
3008
|
+
from llama_cloud import ParseConfigurationFilter
|
|
1567
3009
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1568
3010
|
|
|
1569
3011
|
client = AsyncLlamaCloud(
|
|
1570
3012
|
token="YOUR_TOKEN",
|
|
1571
3013
|
)
|
|
1572
|
-
await client.beta.
|
|
1573
|
-
filter=
|
|
3014
|
+
await client.beta.query_parse_configurations(
|
|
3015
|
+
filter=ParseConfigurationFilter(),
|
|
1574
3016
|
)
|
|
1575
3017
|
"""
|
|
1576
3018
|
_request: typing.Dict[str, typing.Any] = {}
|
|
@@ -1584,14 +3026,14 @@ class AsyncBetaClient:
|
|
|
1584
3026
|
_request["order_by"] = order_by
|
|
1585
3027
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1586
3028
|
"POST",
|
|
1587
|
-
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/
|
|
3029
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations/query"),
|
|
1588
3030
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1589
3031
|
json=jsonable_encoder(_request),
|
|
1590
3032
|
headers=self._client_wrapper.get_headers(),
|
|
1591
3033
|
timeout=60,
|
|
1592
3034
|
)
|
|
1593
3035
|
if 200 <= _response.status_code < 300:
|
|
1594
|
-
return pydantic.parse_obj_as(
|
|
3036
|
+
return pydantic.parse_obj_as(ParseConfigurationQueryResponse, _response.json()) # type: ignore
|
|
1595
3037
|
if _response.status_code == 422:
|
|
1596
3038
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1597
3039
|
try:
|
|
@@ -1600,22 +3042,27 @@ class AsyncBetaClient:
|
|
|
1600
3042
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1601
3043
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1602
3044
|
|
|
1603
|
-
async def
|
|
1604
|
-
self,
|
|
1605
|
-
|
|
3045
|
+
async def get_latest_parse_configuration(
|
|
3046
|
+
self,
|
|
3047
|
+
*,
|
|
3048
|
+
creator: typing.Optional[str] = None,
|
|
3049
|
+
project_id: typing.Optional[str] = None,
|
|
3050
|
+
organization_id: typing.Optional[str] = None,
|
|
3051
|
+
) -> typing.Optional[ParseConfiguration]:
|
|
1606
3052
|
"""
|
|
1607
|
-
|
|
3053
|
+
Get the latest parse configuration for the current project.
|
|
1608
3054
|
|
|
1609
3055
|
Args:
|
|
1610
|
-
file_id: The ID of the file to delete
|
|
1611
3056
|
project: Validated project from dependency
|
|
3057
|
+
user: Current user
|
|
1612
3058
|
db: Database session
|
|
3059
|
+
creator: Optional creator filter
|
|
1613
3060
|
|
|
1614
3061
|
Returns:
|
|
1615
|
-
|
|
3062
|
+
The latest parse configuration or None if not found
|
|
1616
3063
|
|
|
1617
3064
|
Parameters:
|
|
1618
|
-
-
|
|
3065
|
+
- creator: typing.Optional[str].
|
|
1619
3066
|
|
|
1620
3067
|
- project_id: typing.Optional[str].
|
|
1621
3068
|
|
|
@@ -1626,19 +3073,19 @@ class AsyncBetaClient:
|
|
|
1626
3073
|
client = AsyncLlamaCloud(
|
|
1627
3074
|
token="YOUR_TOKEN",
|
|
1628
3075
|
)
|
|
1629
|
-
await client.beta.
|
|
1630
|
-
file_id="string",
|
|
1631
|
-
)
|
|
3076
|
+
await client.beta.get_latest_parse_configuration()
|
|
1632
3077
|
"""
|
|
1633
3078
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1634
|
-
"
|
|
1635
|
-
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/",
|
|
1636
|
-
params=remove_none_from_dict(
|
|
3079
|
+
"GET",
|
|
3080
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/parse-configurations/latest"),
|
|
3081
|
+
params=remove_none_from_dict(
|
|
3082
|
+
{"creator": creator, "project_id": project_id, "organization_id": organization_id}
|
|
3083
|
+
),
|
|
1637
3084
|
headers=self._client_wrapper.get_headers(),
|
|
1638
3085
|
timeout=60,
|
|
1639
3086
|
)
|
|
1640
3087
|
if 200 <= _response.status_code < 300:
|
|
1641
|
-
return
|
|
3088
|
+
return pydantic.parse_obj_as(typing.Optional[ParseConfiguration], _response.json()) # type: ignore
|
|
1642
3089
|
if _response.status_code == 422:
|
|
1643
3090
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1644
3091
|
try:
|