llama-cloud 0.1.36__py3-none-any.whl → 0.1.38__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of llama-cloud might be problematic. Click here for more details.
- llama_cloud/__init__.py +20 -4
- llama_cloud/client.py +3 -0
- llama_cloud/resources/__init__.py +3 -3
- llama_cloud/resources/admin/client.py +57 -0
- llama_cloud/resources/alpha/__init__.py +2 -0
- llama_cloud/resources/alpha/client.py +118 -0
- llama_cloud/resources/beta/client.py +576 -20
- llama_cloud/resources/chat_apps/client.py +32 -8
- llama_cloud/resources/classifier/client.py +139 -11
- llama_cloud/resources/data_sinks/client.py +32 -8
- llama_cloud/resources/data_sources/client.py +32 -8
- llama_cloud/resources/data_sources/types/data_source_update_component.py +2 -0
- llama_cloud/resources/embedding_model_configs/client.py +48 -12
- llama_cloud/resources/files/__init__.py +2 -2
- llama_cloud/resources/files/client.py +189 -113
- llama_cloud/resources/files/types/__init__.py +1 -3
- llama_cloud/resources/jobs/client.py +12 -6
- llama_cloud/resources/llama_extract/client.py +138 -32
- llama_cloud/resources/organizations/client.py +18 -4
- llama_cloud/resources/parsing/client.py +16 -4
- llama_cloud/resources/pipelines/client.py +32 -8
- llama_cloud/resources/projects/client.py +78 -18
- llama_cloud/resources/reports/client.py +126 -30
- llama_cloud/resources/retrievers/client.py +48 -12
- llama_cloud/types/__init__.py +20 -2
- llama_cloud/types/agent_deployment_summary.py +1 -0
- llama_cloud/types/classify_job.py +2 -0
- llama_cloud/types/cloud_jira_data_source_v_2.py +52 -0
- llama_cloud/types/cloud_jira_data_source_v_2_api_version.py +21 -0
- llama_cloud/types/configurable_data_source_names.py +4 -0
- llama_cloud/types/data_source_component.py +2 -0
- llama_cloud/types/data_source_create_component.py +2 -0
- llama_cloud/types/data_source_reader_version_metadata_reader_version.py +9 -1
- llama_cloud/types/file_create.py +41 -0
- llama_cloud/types/{classify_job_with_status.py → file_filter.py} +8 -15
- llama_cloud/types/file_query_response.py +38 -0
- llama_cloud/types/llama_extract_mode_availability.py +37 -0
- llama_cloud/types/llama_extract_mode_availability_status.py +17 -0
- llama_cloud/types/paginated_response_classify_job.py +34 -0
- llama_cloud/types/pipeline_data_source_component.py +2 -0
- llama_cloud/types/usage_response_active_alerts_item.py +4 -0
- {llama_cloud-0.1.36.dist-info → llama_cloud-0.1.38.dist-info}/METADATA +2 -1
- {llama_cloud-0.1.36.dist-info → llama_cloud-0.1.38.dist-info}/RECORD +47 -38
- {llama_cloud-0.1.36.dist-info → llama_cloud-0.1.38.dist-info}/WHEEL +1 -1
- /llama_cloud/{resources/files/types → types}/file_create_permission_info_value.py +0 -0
- /llama_cloud/{resources/files/types → types}/file_create_resource_info_value.py +0 -0
- {llama_cloud-0.1.36.dist-info → llama_cloud-0.1.38.dist-info}/LICENSE +0 -0
|
@@ -15,6 +15,10 @@ from ...types.agent_data import AgentData
|
|
|
15
15
|
from ...types.batch import Batch
|
|
16
16
|
from ...types.batch_paginated_list import BatchPaginatedList
|
|
17
17
|
from ...types.batch_public_output import BatchPublicOutput
|
|
18
|
+
from ...types.file import File
|
|
19
|
+
from ...types.file_create import FileCreate
|
|
20
|
+
from ...types.file_filter import FileFilter
|
|
21
|
+
from ...types.file_query_response import FileQueryResponse
|
|
18
22
|
from ...types.filter_operation import FilterOperation
|
|
19
23
|
from ...types.http_validation_error import HttpValidationError
|
|
20
24
|
from ...types.llama_parse_parameters import LlamaParseParameters
|
|
@@ -45,6 +49,7 @@ class BetaClient:
|
|
|
45
49
|
offset: typing.Optional[int] = None,
|
|
46
50
|
project_id: typing.Optional[str] = None,
|
|
47
51
|
organization_id: typing.Optional[str] = None,
|
|
52
|
+
project_id: typing.Optional[str] = None,
|
|
48
53
|
) -> BatchPaginatedList:
|
|
49
54
|
"""
|
|
50
55
|
Parameters:
|
|
@@ -55,6 +60,8 @@ class BetaClient:
|
|
|
55
60
|
- project_id: typing.Optional[str].
|
|
56
61
|
|
|
57
62
|
- organization_id: typing.Optional[str].
|
|
63
|
+
|
|
64
|
+
- project_id: typing.Optional[str].
|
|
58
65
|
---
|
|
59
66
|
from llama_cloud.client import LlamaCloud
|
|
60
67
|
|
|
@@ -69,7 +76,7 @@ class BetaClient:
|
|
|
69
76
|
params=remove_none_from_dict(
|
|
70
77
|
{"limit": limit, "offset": offset, "project_id": project_id, "organization_id": organization_id}
|
|
71
78
|
),
|
|
72
|
-
headers=self._client_wrapper.get_headers(),
|
|
79
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
73
80
|
timeout=60,
|
|
74
81
|
)
|
|
75
82
|
if 200 <= _response.status_code < 300:
|
|
@@ -96,6 +103,7 @@ class BetaClient:
|
|
|
96
103
|
batch_create_project_id: str,
|
|
97
104
|
external_id: str,
|
|
98
105
|
completion_window: typing.Optional[int] = OMIT,
|
|
106
|
+
project_id: typing.Optional[str] = None,
|
|
99
107
|
) -> Batch:
|
|
100
108
|
"""
|
|
101
109
|
Parameters:
|
|
@@ -120,6 +128,8 @@ class BetaClient:
|
|
|
120
128
|
- external_id: str. A developer-provided ID for the batch. This ID will be returned in the response.
|
|
121
129
|
|
|
122
130
|
- completion_window: typing.Optional[int]. The time frame within which the batch should be processed. Currently only 24h is supported.
|
|
131
|
+
|
|
132
|
+
- project_id: typing.Optional[str].
|
|
123
133
|
---
|
|
124
134
|
from llama_cloud import (
|
|
125
135
|
FailPageMode,
|
|
@@ -165,7 +175,7 @@ class BetaClient:
|
|
|
165
175
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/batches"),
|
|
166
176
|
params=remove_none_from_dict({"organization_id": organization_id, "project_id": project_id}),
|
|
167
177
|
json=jsonable_encoder(_request),
|
|
168
|
-
headers=self._client_wrapper.get_headers(),
|
|
178
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
169
179
|
timeout=60,
|
|
170
180
|
)
|
|
171
181
|
if 200 <= _response.status_code < 300:
|
|
@@ -212,7 +222,12 @@ class BetaClient:
|
|
|
212
222
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
213
223
|
|
|
214
224
|
def get_agent_data(
|
|
215
|
-
self,
|
|
225
|
+
self,
|
|
226
|
+
item_id: str,
|
|
227
|
+
*,
|
|
228
|
+
project_id: typing.Optional[str] = None,
|
|
229
|
+
organization_id: typing.Optional[str] = None,
|
|
230
|
+
project_id: typing.Optional[str] = None,
|
|
216
231
|
) -> AgentData:
|
|
217
232
|
"""
|
|
218
233
|
Get agent data by ID.
|
|
@@ -223,6 +238,8 @@ class BetaClient:
|
|
|
223
238
|
- project_id: typing.Optional[str].
|
|
224
239
|
|
|
225
240
|
- organization_id: typing.Optional[str].
|
|
241
|
+
|
|
242
|
+
- project_id: typing.Optional[str].
|
|
226
243
|
---
|
|
227
244
|
from llama_cloud.client import LlamaCloud
|
|
228
245
|
|
|
@@ -237,7 +254,7 @@ class BetaClient:
|
|
|
237
254
|
"GET",
|
|
238
255
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
|
|
239
256
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
240
|
-
headers=self._client_wrapper.get_headers(),
|
|
257
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
241
258
|
timeout=60,
|
|
242
259
|
)
|
|
243
260
|
if 200 <= _response.status_code < 300:
|
|
@@ -257,6 +274,7 @@ class BetaClient:
|
|
|
257
274
|
project_id: typing.Optional[str] = None,
|
|
258
275
|
organization_id: typing.Optional[str] = None,
|
|
259
276
|
data: typing.Dict[str, typing.Any],
|
|
277
|
+
project_id: typing.Optional[str] = None,
|
|
260
278
|
) -> AgentData:
|
|
261
279
|
"""
|
|
262
280
|
Update agent data by ID (overwrites).
|
|
@@ -269,6 +287,8 @@ class BetaClient:
|
|
|
269
287
|
- organization_id: typing.Optional[str].
|
|
270
288
|
|
|
271
289
|
- data: typing.Dict[str, typing.Any].
|
|
290
|
+
|
|
291
|
+
- project_id: typing.Optional[str].
|
|
272
292
|
---
|
|
273
293
|
from llama_cloud.client import LlamaCloud
|
|
274
294
|
|
|
@@ -285,7 +305,7 @@ class BetaClient:
|
|
|
285
305
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
|
|
286
306
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
287
307
|
json=jsonable_encoder({"data": data}),
|
|
288
|
-
headers=self._client_wrapper.get_headers(),
|
|
308
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
289
309
|
timeout=60,
|
|
290
310
|
)
|
|
291
311
|
if 200 <= _response.status_code < 300:
|
|
@@ -299,7 +319,12 @@ class BetaClient:
|
|
|
299
319
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
300
320
|
|
|
301
321
|
def delete_agent_data(
|
|
302
|
-
self,
|
|
322
|
+
self,
|
|
323
|
+
item_id: str,
|
|
324
|
+
*,
|
|
325
|
+
project_id: typing.Optional[str] = None,
|
|
326
|
+
organization_id: typing.Optional[str] = None,
|
|
327
|
+
project_id: typing.Optional[str] = None,
|
|
303
328
|
) -> typing.Dict[str, str]:
|
|
304
329
|
"""
|
|
305
330
|
Delete agent data by ID.
|
|
@@ -310,6 +335,8 @@ class BetaClient:
|
|
|
310
335
|
- project_id: typing.Optional[str].
|
|
311
336
|
|
|
312
337
|
- organization_id: typing.Optional[str].
|
|
338
|
+
|
|
339
|
+
- project_id: typing.Optional[str].
|
|
313
340
|
---
|
|
314
341
|
from llama_cloud.client import LlamaCloud
|
|
315
342
|
|
|
@@ -324,7 +351,7 @@ class BetaClient:
|
|
|
324
351
|
"DELETE",
|
|
325
352
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
|
|
326
353
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
327
|
-
headers=self._client_wrapper.get_headers(),
|
|
354
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
328
355
|
timeout=60,
|
|
329
356
|
)
|
|
330
357
|
if 200 <= _response.status_code < 300:
|
|
@@ -345,6 +372,7 @@ class BetaClient:
|
|
|
345
372
|
agent_slug: str,
|
|
346
373
|
collection: typing.Optional[str] = OMIT,
|
|
347
374
|
data: typing.Dict[str, typing.Any],
|
|
375
|
+
project_id: typing.Optional[str] = None,
|
|
348
376
|
) -> AgentData:
|
|
349
377
|
"""
|
|
350
378
|
Create new agent data.
|
|
@@ -359,6 +387,8 @@ class BetaClient:
|
|
|
359
387
|
- collection: typing.Optional[str].
|
|
360
388
|
|
|
361
389
|
- data: typing.Dict[str, typing.Any].
|
|
390
|
+
|
|
391
|
+
- project_id: typing.Optional[str].
|
|
362
392
|
---
|
|
363
393
|
from llama_cloud.client import LlamaCloud
|
|
364
394
|
|
|
@@ -378,7 +408,7 @@ class BetaClient:
|
|
|
378
408
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data"),
|
|
379
409
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
380
410
|
json=jsonable_encoder(_request),
|
|
381
|
-
headers=self._client_wrapper.get_headers(),
|
|
411
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
382
412
|
timeout=60,
|
|
383
413
|
)
|
|
384
414
|
if 200 <= _response.status_code < 300:
|
|
@@ -404,6 +434,7 @@ class BetaClient:
|
|
|
404
434
|
collection: typing.Optional[str] = OMIT,
|
|
405
435
|
include_total: typing.Optional[bool] = OMIT,
|
|
406
436
|
offset: typing.Optional[int] = OMIT,
|
|
437
|
+
project_id: typing.Optional[str] = None,
|
|
407
438
|
) -> PaginatedResponseAgentData:
|
|
408
439
|
"""
|
|
409
440
|
Search agent data with filtering, sorting, and pagination.
|
|
@@ -428,6 +459,8 @@ class BetaClient:
|
|
|
428
459
|
- include_total: typing.Optional[bool]. Whether to include the total number of items in the response
|
|
429
460
|
|
|
430
461
|
- offset: typing.Optional[int].
|
|
462
|
+
|
|
463
|
+
- project_id: typing.Optional[str].
|
|
431
464
|
---
|
|
432
465
|
from llama_cloud.client import LlamaCloud
|
|
433
466
|
|
|
@@ -458,7 +491,7 @@ class BetaClient:
|
|
|
458
491
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:search"),
|
|
459
492
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
460
493
|
json=jsonable_encoder(_request),
|
|
461
|
-
headers=self._client_wrapper.get_headers(),
|
|
494
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
462
495
|
timeout=60,
|
|
463
496
|
)
|
|
464
497
|
if 200 <= _response.status_code < 300:
|
|
@@ -486,6 +519,7 @@ class BetaClient:
|
|
|
486
519
|
count: typing.Optional[bool] = OMIT,
|
|
487
520
|
first: typing.Optional[bool] = OMIT,
|
|
488
521
|
offset: typing.Optional[int] = OMIT,
|
|
522
|
+
project_id: typing.Optional[str] = None,
|
|
489
523
|
) -> PaginatedResponseAggregateGroup:
|
|
490
524
|
"""
|
|
491
525
|
Aggregate agent data with grouping and optional counting/first item retrieval.
|
|
@@ -514,6 +548,8 @@ class BetaClient:
|
|
|
514
548
|
- first: typing.Optional[bool].
|
|
515
549
|
|
|
516
550
|
- offset: typing.Optional[int].
|
|
551
|
+
|
|
552
|
+
- project_id: typing.Optional[str].
|
|
517
553
|
---
|
|
518
554
|
from llama_cloud.client import LlamaCloud
|
|
519
555
|
|
|
@@ -548,7 +584,7 @@ class BetaClient:
|
|
|
548
584
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:aggregate"),
|
|
549
585
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
550
586
|
json=jsonable_encoder(_request),
|
|
551
|
-
headers=self._client_wrapper.get_headers(),
|
|
587
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
552
588
|
timeout=60,
|
|
553
589
|
)
|
|
554
590
|
if 200 <= _response.status_code < 300:
|
|
@@ -610,6 +646,250 @@ class BetaClient:
|
|
|
610
646
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
611
647
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
612
648
|
|
|
649
|
+
def create_file(
|
|
650
|
+
self,
|
|
651
|
+
*,
|
|
652
|
+
project_id: typing.Optional[str] = None,
|
|
653
|
+
organization_id: typing.Optional[str] = None,
|
|
654
|
+
request: FileCreate,
|
|
655
|
+
project_id: typing.Optional[str] = None,
|
|
656
|
+
) -> File:
|
|
657
|
+
"""
|
|
658
|
+
Create a new file in the project.
|
|
659
|
+
|
|
660
|
+
Args:
|
|
661
|
+
file_create: File creation data
|
|
662
|
+
project: Validated project from dependency
|
|
663
|
+
db: Database session
|
|
664
|
+
|
|
665
|
+
Returns:
|
|
666
|
+
The created file
|
|
667
|
+
|
|
668
|
+
Parameters:
|
|
669
|
+
- project_id: typing.Optional[str].
|
|
670
|
+
|
|
671
|
+
- organization_id: typing.Optional[str].
|
|
672
|
+
|
|
673
|
+
- request: FileCreate.
|
|
674
|
+
|
|
675
|
+
- project_id: typing.Optional[str].
|
|
676
|
+
---
|
|
677
|
+
from llama_cloud import FileCreate
|
|
678
|
+
from llama_cloud.client import LlamaCloud
|
|
679
|
+
|
|
680
|
+
client = LlamaCloud(
|
|
681
|
+
token="YOUR_TOKEN",
|
|
682
|
+
)
|
|
683
|
+
client.beta.create_file(
|
|
684
|
+
request=FileCreate(
|
|
685
|
+
name="string",
|
|
686
|
+
),
|
|
687
|
+
)
|
|
688
|
+
"""
|
|
689
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
690
|
+
"POST",
|
|
691
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
|
|
692
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
693
|
+
json=jsonable_encoder(request),
|
|
694
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
695
|
+
timeout=60,
|
|
696
|
+
)
|
|
697
|
+
if 200 <= _response.status_code < 300:
|
|
698
|
+
return pydantic.parse_obj_as(File, _response.json()) # type: ignore
|
|
699
|
+
if _response.status_code == 422:
|
|
700
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
701
|
+
try:
|
|
702
|
+
_response_json = _response.json()
|
|
703
|
+
except JSONDecodeError:
|
|
704
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
705
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
706
|
+
|
|
707
|
+
def upsert_file(
|
|
708
|
+
self,
|
|
709
|
+
*,
|
|
710
|
+
project_id: typing.Optional[str] = None,
|
|
711
|
+
organization_id: typing.Optional[str] = None,
|
|
712
|
+
request: FileCreate,
|
|
713
|
+
project_id: typing.Optional[str] = None,
|
|
714
|
+
) -> File:
|
|
715
|
+
"""
|
|
716
|
+
Upsert a file (create or update if exists) in the project.
|
|
717
|
+
|
|
718
|
+
Args:
|
|
719
|
+
file_create: File creation/update data
|
|
720
|
+
project: Validated project from dependency
|
|
721
|
+
db: Database session
|
|
722
|
+
|
|
723
|
+
Returns:
|
|
724
|
+
The upserted file
|
|
725
|
+
|
|
726
|
+
Parameters:
|
|
727
|
+
- project_id: typing.Optional[str].
|
|
728
|
+
|
|
729
|
+
- organization_id: typing.Optional[str].
|
|
730
|
+
|
|
731
|
+
- request: FileCreate.
|
|
732
|
+
|
|
733
|
+
- project_id: typing.Optional[str].
|
|
734
|
+
---
|
|
735
|
+
from llama_cloud import FileCreate
|
|
736
|
+
from llama_cloud.client import LlamaCloud
|
|
737
|
+
|
|
738
|
+
client = LlamaCloud(
|
|
739
|
+
token="YOUR_TOKEN",
|
|
740
|
+
)
|
|
741
|
+
client.beta.upsert_file(
|
|
742
|
+
request=FileCreate(
|
|
743
|
+
name="string",
|
|
744
|
+
),
|
|
745
|
+
)
|
|
746
|
+
"""
|
|
747
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
748
|
+
"PUT",
|
|
749
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
|
|
750
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
751
|
+
json=jsonable_encoder(request),
|
|
752
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
753
|
+
timeout=60,
|
|
754
|
+
)
|
|
755
|
+
if 200 <= _response.status_code < 300:
|
|
756
|
+
return pydantic.parse_obj_as(File, _response.json()) # type: ignore
|
|
757
|
+
if _response.status_code == 422:
|
|
758
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
759
|
+
try:
|
|
760
|
+
_response_json = _response.json()
|
|
761
|
+
except JSONDecodeError:
|
|
762
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
763
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
764
|
+
|
|
765
|
+
def query_files(
|
|
766
|
+
self,
|
|
767
|
+
*,
|
|
768
|
+
project_id: typing.Optional[str] = None,
|
|
769
|
+
organization_id: typing.Optional[str] = None,
|
|
770
|
+
page_size: typing.Optional[int] = OMIT,
|
|
771
|
+
page_token: typing.Optional[str] = OMIT,
|
|
772
|
+
filter: typing.Optional[FileFilter] = OMIT,
|
|
773
|
+
order_by: typing.Optional[str] = OMIT,
|
|
774
|
+
project_id: typing.Optional[str] = None,
|
|
775
|
+
) -> FileQueryResponse:
|
|
776
|
+
"""
|
|
777
|
+
Query files with flexible filtering and pagination.
|
|
778
|
+
|
|
779
|
+
Args:
|
|
780
|
+
request: The query request with filters and pagination
|
|
781
|
+
project: Validated project from dependency
|
|
782
|
+
db: Database session
|
|
783
|
+
|
|
784
|
+
Returns:
|
|
785
|
+
Paginated response with files
|
|
786
|
+
|
|
787
|
+
Parameters:
|
|
788
|
+
- project_id: typing.Optional[str].
|
|
789
|
+
|
|
790
|
+
- organization_id: typing.Optional[str].
|
|
791
|
+
|
|
792
|
+
- page_size: typing.Optional[int].
|
|
793
|
+
|
|
794
|
+
- page_token: typing.Optional[str].
|
|
795
|
+
|
|
796
|
+
- filter: typing.Optional[FileFilter].
|
|
797
|
+
|
|
798
|
+
- order_by: typing.Optional[str].
|
|
799
|
+
|
|
800
|
+
- project_id: typing.Optional[str].
|
|
801
|
+
---
|
|
802
|
+
from llama_cloud import FileFilter
|
|
803
|
+
from llama_cloud.client import LlamaCloud
|
|
804
|
+
|
|
805
|
+
client = LlamaCloud(
|
|
806
|
+
token="YOUR_TOKEN",
|
|
807
|
+
)
|
|
808
|
+
client.beta.query_files(
|
|
809
|
+
filter=FileFilter(),
|
|
810
|
+
)
|
|
811
|
+
"""
|
|
812
|
+
_request: typing.Dict[str, typing.Any] = {}
|
|
813
|
+
if page_size is not OMIT:
|
|
814
|
+
_request["page_size"] = page_size
|
|
815
|
+
if page_token is not OMIT:
|
|
816
|
+
_request["page_token"] = page_token
|
|
817
|
+
if filter is not OMIT:
|
|
818
|
+
_request["filter"] = filter
|
|
819
|
+
if order_by is not OMIT:
|
|
820
|
+
_request["order_by"] = order_by
|
|
821
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
822
|
+
"POST",
|
|
823
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files/query"),
|
|
824
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
825
|
+
json=jsonable_encoder(_request),
|
|
826
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
827
|
+
timeout=60,
|
|
828
|
+
)
|
|
829
|
+
if 200 <= _response.status_code < 300:
|
|
830
|
+
return pydantic.parse_obj_as(FileQueryResponse, _response.json()) # type: ignore
|
|
831
|
+
if _response.status_code == 422:
|
|
832
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
833
|
+
try:
|
|
834
|
+
_response_json = _response.json()
|
|
835
|
+
except JSONDecodeError:
|
|
836
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
837
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
838
|
+
|
|
839
|
+
def delete_file(
|
|
840
|
+
self,
|
|
841
|
+
file_id: str,
|
|
842
|
+
*,
|
|
843
|
+
project_id: typing.Optional[str] = None,
|
|
844
|
+
organization_id: typing.Optional[str] = None,
|
|
845
|
+
project_id: typing.Optional[str] = None,
|
|
846
|
+
) -> None:
|
|
847
|
+
"""
|
|
848
|
+
Delete a single file from the project.
|
|
849
|
+
|
|
850
|
+
Args:
|
|
851
|
+
file_id: The ID of the file to delete
|
|
852
|
+
project: Validated project from dependency
|
|
853
|
+
db: Database session
|
|
854
|
+
|
|
855
|
+
Returns:
|
|
856
|
+
None (204 No Content on success)
|
|
857
|
+
|
|
858
|
+
Parameters:
|
|
859
|
+
- file_id: str.
|
|
860
|
+
|
|
861
|
+
- project_id: typing.Optional[str].
|
|
862
|
+
|
|
863
|
+
- organization_id: typing.Optional[str].
|
|
864
|
+
|
|
865
|
+
- project_id: typing.Optional[str].
|
|
866
|
+
---
|
|
867
|
+
from llama_cloud.client import LlamaCloud
|
|
868
|
+
|
|
869
|
+
client = LlamaCloud(
|
|
870
|
+
token="YOUR_TOKEN",
|
|
871
|
+
)
|
|
872
|
+
client.beta.delete_file(
|
|
873
|
+
file_id="string",
|
|
874
|
+
)
|
|
875
|
+
"""
|
|
876
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
877
|
+
"DELETE",
|
|
878
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/files/{file_id}"),
|
|
879
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
880
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
881
|
+
timeout=60,
|
|
882
|
+
)
|
|
883
|
+
if 200 <= _response.status_code < 300:
|
|
884
|
+
return
|
|
885
|
+
if _response.status_code == 422:
|
|
886
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
887
|
+
try:
|
|
888
|
+
_response_json = _response.json()
|
|
889
|
+
except JSONDecodeError:
|
|
890
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
891
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
892
|
+
|
|
613
893
|
|
|
614
894
|
class AsyncBetaClient:
|
|
615
895
|
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
@@ -622,6 +902,7 @@ class AsyncBetaClient:
|
|
|
622
902
|
offset: typing.Optional[int] = None,
|
|
623
903
|
project_id: typing.Optional[str] = None,
|
|
624
904
|
organization_id: typing.Optional[str] = None,
|
|
905
|
+
project_id: typing.Optional[str] = None,
|
|
625
906
|
) -> BatchPaginatedList:
|
|
626
907
|
"""
|
|
627
908
|
Parameters:
|
|
@@ -632,6 +913,8 @@ class AsyncBetaClient:
|
|
|
632
913
|
- project_id: typing.Optional[str].
|
|
633
914
|
|
|
634
915
|
- organization_id: typing.Optional[str].
|
|
916
|
+
|
|
917
|
+
- project_id: typing.Optional[str].
|
|
635
918
|
---
|
|
636
919
|
from llama_cloud.client import AsyncLlamaCloud
|
|
637
920
|
|
|
@@ -646,7 +929,7 @@ class AsyncBetaClient:
|
|
|
646
929
|
params=remove_none_from_dict(
|
|
647
930
|
{"limit": limit, "offset": offset, "project_id": project_id, "organization_id": organization_id}
|
|
648
931
|
),
|
|
649
|
-
headers=self._client_wrapper.get_headers(),
|
|
932
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
650
933
|
timeout=60,
|
|
651
934
|
)
|
|
652
935
|
if 200 <= _response.status_code < 300:
|
|
@@ -673,6 +956,7 @@ class AsyncBetaClient:
|
|
|
673
956
|
batch_create_project_id: str,
|
|
674
957
|
external_id: str,
|
|
675
958
|
completion_window: typing.Optional[int] = OMIT,
|
|
959
|
+
project_id: typing.Optional[str] = None,
|
|
676
960
|
) -> Batch:
|
|
677
961
|
"""
|
|
678
962
|
Parameters:
|
|
@@ -697,6 +981,8 @@ class AsyncBetaClient:
|
|
|
697
981
|
- external_id: str. A developer-provided ID for the batch. This ID will be returned in the response.
|
|
698
982
|
|
|
699
983
|
- completion_window: typing.Optional[int]. The time frame within which the batch should be processed. Currently only 24h is supported.
|
|
984
|
+
|
|
985
|
+
- project_id: typing.Optional[str].
|
|
700
986
|
---
|
|
701
987
|
from llama_cloud import (
|
|
702
988
|
FailPageMode,
|
|
@@ -742,7 +1028,7 @@ class AsyncBetaClient:
|
|
|
742
1028
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/batches"),
|
|
743
1029
|
params=remove_none_from_dict({"organization_id": organization_id, "project_id": project_id}),
|
|
744
1030
|
json=jsonable_encoder(_request),
|
|
745
|
-
headers=self._client_wrapper.get_headers(),
|
|
1031
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
746
1032
|
timeout=60,
|
|
747
1033
|
)
|
|
748
1034
|
if 200 <= _response.status_code < 300:
|
|
@@ -789,7 +1075,12 @@ class AsyncBetaClient:
|
|
|
789
1075
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
790
1076
|
|
|
791
1077
|
async def get_agent_data(
|
|
792
|
-
self,
|
|
1078
|
+
self,
|
|
1079
|
+
item_id: str,
|
|
1080
|
+
*,
|
|
1081
|
+
project_id: typing.Optional[str] = None,
|
|
1082
|
+
organization_id: typing.Optional[str] = None,
|
|
1083
|
+
project_id: typing.Optional[str] = None,
|
|
793
1084
|
) -> AgentData:
|
|
794
1085
|
"""
|
|
795
1086
|
Get agent data by ID.
|
|
@@ -800,6 +1091,8 @@ class AsyncBetaClient:
|
|
|
800
1091
|
- project_id: typing.Optional[str].
|
|
801
1092
|
|
|
802
1093
|
- organization_id: typing.Optional[str].
|
|
1094
|
+
|
|
1095
|
+
- project_id: typing.Optional[str].
|
|
803
1096
|
---
|
|
804
1097
|
from llama_cloud.client import AsyncLlamaCloud
|
|
805
1098
|
|
|
@@ -814,7 +1107,7 @@ class AsyncBetaClient:
|
|
|
814
1107
|
"GET",
|
|
815
1108
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
|
|
816
1109
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
817
|
-
headers=self._client_wrapper.get_headers(),
|
|
1110
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
818
1111
|
timeout=60,
|
|
819
1112
|
)
|
|
820
1113
|
if 200 <= _response.status_code < 300:
|
|
@@ -834,6 +1127,7 @@ class AsyncBetaClient:
|
|
|
834
1127
|
project_id: typing.Optional[str] = None,
|
|
835
1128
|
organization_id: typing.Optional[str] = None,
|
|
836
1129
|
data: typing.Dict[str, typing.Any],
|
|
1130
|
+
project_id: typing.Optional[str] = None,
|
|
837
1131
|
) -> AgentData:
|
|
838
1132
|
"""
|
|
839
1133
|
Update agent data by ID (overwrites).
|
|
@@ -846,6 +1140,8 @@ class AsyncBetaClient:
|
|
|
846
1140
|
- organization_id: typing.Optional[str].
|
|
847
1141
|
|
|
848
1142
|
- data: typing.Dict[str, typing.Any].
|
|
1143
|
+
|
|
1144
|
+
- project_id: typing.Optional[str].
|
|
849
1145
|
---
|
|
850
1146
|
from llama_cloud.client import AsyncLlamaCloud
|
|
851
1147
|
|
|
@@ -862,7 +1158,7 @@ class AsyncBetaClient:
|
|
|
862
1158
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
|
|
863
1159
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
864
1160
|
json=jsonable_encoder({"data": data}),
|
|
865
|
-
headers=self._client_wrapper.get_headers(),
|
|
1161
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
866
1162
|
timeout=60,
|
|
867
1163
|
)
|
|
868
1164
|
if 200 <= _response.status_code < 300:
|
|
@@ -876,7 +1172,12 @@ class AsyncBetaClient:
|
|
|
876
1172
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
877
1173
|
|
|
878
1174
|
async def delete_agent_data(
|
|
879
|
-
self,
|
|
1175
|
+
self,
|
|
1176
|
+
item_id: str,
|
|
1177
|
+
*,
|
|
1178
|
+
project_id: typing.Optional[str] = None,
|
|
1179
|
+
organization_id: typing.Optional[str] = None,
|
|
1180
|
+
project_id: typing.Optional[str] = None,
|
|
880
1181
|
) -> typing.Dict[str, str]:
|
|
881
1182
|
"""
|
|
882
1183
|
Delete agent data by ID.
|
|
@@ -887,6 +1188,8 @@ class AsyncBetaClient:
|
|
|
887
1188
|
- project_id: typing.Optional[str].
|
|
888
1189
|
|
|
889
1190
|
- organization_id: typing.Optional[str].
|
|
1191
|
+
|
|
1192
|
+
- project_id: typing.Optional[str].
|
|
890
1193
|
---
|
|
891
1194
|
from llama_cloud.client import AsyncLlamaCloud
|
|
892
1195
|
|
|
@@ -901,7 +1204,7 @@ class AsyncBetaClient:
|
|
|
901
1204
|
"DELETE",
|
|
902
1205
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/agent-data/{item_id}"),
|
|
903
1206
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
904
|
-
headers=self._client_wrapper.get_headers(),
|
|
1207
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
905
1208
|
timeout=60,
|
|
906
1209
|
)
|
|
907
1210
|
if 200 <= _response.status_code < 300:
|
|
@@ -922,6 +1225,7 @@ class AsyncBetaClient:
|
|
|
922
1225
|
agent_slug: str,
|
|
923
1226
|
collection: typing.Optional[str] = OMIT,
|
|
924
1227
|
data: typing.Dict[str, typing.Any],
|
|
1228
|
+
project_id: typing.Optional[str] = None,
|
|
925
1229
|
) -> AgentData:
|
|
926
1230
|
"""
|
|
927
1231
|
Create new agent data.
|
|
@@ -936,6 +1240,8 @@ class AsyncBetaClient:
|
|
|
936
1240
|
- collection: typing.Optional[str].
|
|
937
1241
|
|
|
938
1242
|
- data: typing.Dict[str, typing.Any].
|
|
1243
|
+
|
|
1244
|
+
- project_id: typing.Optional[str].
|
|
939
1245
|
---
|
|
940
1246
|
from llama_cloud.client import AsyncLlamaCloud
|
|
941
1247
|
|
|
@@ -955,7 +1261,7 @@ class AsyncBetaClient:
|
|
|
955
1261
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data"),
|
|
956
1262
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
957
1263
|
json=jsonable_encoder(_request),
|
|
958
|
-
headers=self._client_wrapper.get_headers(),
|
|
1264
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
959
1265
|
timeout=60,
|
|
960
1266
|
)
|
|
961
1267
|
if 200 <= _response.status_code < 300:
|
|
@@ -981,6 +1287,7 @@ class AsyncBetaClient:
|
|
|
981
1287
|
collection: typing.Optional[str] = OMIT,
|
|
982
1288
|
include_total: typing.Optional[bool] = OMIT,
|
|
983
1289
|
offset: typing.Optional[int] = OMIT,
|
|
1290
|
+
project_id: typing.Optional[str] = None,
|
|
984
1291
|
) -> PaginatedResponseAgentData:
|
|
985
1292
|
"""
|
|
986
1293
|
Search agent data with filtering, sorting, and pagination.
|
|
@@ -1005,6 +1312,8 @@ class AsyncBetaClient:
|
|
|
1005
1312
|
- include_total: typing.Optional[bool]. Whether to include the total number of items in the response
|
|
1006
1313
|
|
|
1007
1314
|
- offset: typing.Optional[int].
|
|
1315
|
+
|
|
1316
|
+
- project_id: typing.Optional[str].
|
|
1008
1317
|
---
|
|
1009
1318
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1010
1319
|
|
|
@@ -1035,7 +1344,7 @@ class AsyncBetaClient:
|
|
|
1035
1344
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:search"),
|
|
1036
1345
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1037
1346
|
json=jsonable_encoder(_request),
|
|
1038
|
-
headers=self._client_wrapper.get_headers(),
|
|
1347
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
1039
1348
|
timeout=60,
|
|
1040
1349
|
)
|
|
1041
1350
|
if 200 <= _response.status_code < 300:
|
|
@@ -1063,6 +1372,7 @@ class AsyncBetaClient:
|
|
|
1063
1372
|
count: typing.Optional[bool] = OMIT,
|
|
1064
1373
|
first: typing.Optional[bool] = OMIT,
|
|
1065
1374
|
offset: typing.Optional[int] = OMIT,
|
|
1375
|
+
project_id: typing.Optional[str] = None,
|
|
1066
1376
|
) -> PaginatedResponseAggregateGroup:
|
|
1067
1377
|
"""
|
|
1068
1378
|
Aggregate agent data with grouping and optional counting/first item retrieval.
|
|
@@ -1091,6 +1401,8 @@ class AsyncBetaClient:
|
|
|
1091
1401
|
- first: typing.Optional[bool].
|
|
1092
1402
|
|
|
1093
1403
|
- offset: typing.Optional[int].
|
|
1404
|
+
|
|
1405
|
+
- project_id: typing.Optional[str].
|
|
1094
1406
|
---
|
|
1095
1407
|
from llama_cloud.client import AsyncLlamaCloud
|
|
1096
1408
|
|
|
@@ -1125,7 +1437,7 @@ class AsyncBetaClient:
|
|
|
1125
1437
|
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/agent-data/:aggregate"),
|
|
1126
1438
|
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1127
1439
|
json=jsonable_encoder(_request),
|
|
1128
|
-
headers=self._client_wrapper.get_headers(),
|
|
1440
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
1129
1441
|
timeout=60,
|
|
1130
1442
|
)
|
|
1131
1443
|
if 200 <= _response.status_code < 300:
|
|
@@ -1186,3 +1498,247 @@ class AsyncBetaClient:
|
|
|
1186
1498
|
except JSONDecodeError:
|
|
1187
1499
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1188
1500
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1501
|
+
|
|
1502
|
+
async def create_file(
|
|
1503
|
+
self,
|
|
1504
|
+
*,
|
|
1505
|
+
project_id: typing.Optional[str] = None,
|
|
1506
|
+
organization_id: typing.Optional[str] = None,
|
|
1507
|
+
request: FileCreate,
|
|
1508
|
+
project_id: typing.Optional[str] = None,
|
|
1509
|
+
) -> File:
|
|
1510
|
+
"""
|
|
1511
|
+
Create a new file in the project.
|
|
1512
|
+
|
|
1513
|
+
Args:
|
|
1514
|
+
file_create: File creation data
|
|
1515
|
+
project: Validated project from dependency
|
|
1516
|
+
db: Database session
|
|
1517
|
+
|
|
1518
|
+
Returns:
|
|
1519
|
+
The created file
|
|
1520
|
+
|
|
1521
|
+
Parameters:
|
|
1522
|
+
- project_id: typing.Optional[str].
|
|
1523
|
+
|
|
1524
|
+
- organization_id: typing.Optional[str].
|
|
1525
|
+
|
|
1526
|
+
- request: FileCreate.
|
|
1527
|
+
|
|
1528
|
+
- project_id: typing.Optional[str].
|
|
1529
|
+
---
|
|
1530
|
+
from llama_cloud import FileCreate
|
|
1531
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1532
|
+
|
|
1533
|
+
client = AsyncLlamaCloud(
|
|
1534
|
+
token="YOUR_TOKEN",
|
|
1535
|
+
)
|
|
1536
|
+
await client.beta.create_file(
|
|
1537
|
+
request=FileCreate(
|
|
1538
|
+
name="string",
|
|
1539
|
+
),
|
|
1540
|
+
)
|
|
1541
|
+
"""
|
|
1542
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1543
|
+
"POST",
|
|
1544
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
|
|
1545
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1546
|
+
json=jsonable_encoder(request),
|
|
1547
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
1548
|
+
timeout=60,
|
|
1549
|
+
)
|
|
1550
|
+
if 200 <= _response.status_code < 300:
|
|
1551
|
+
return pydantic.parse_obj_as(File, _response.json()) # type: ignore
|
|
1552
|
+
if _response.status_code == 422:
|
|
1553
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1554
|
+
try:
|
|
1555
|
+
_response_json = _response.json()
|
|
1556
|
+
except JSONDecodeError:
|
|
1557
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1558
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1559
|
+
|
|
1560
|
+
async def upsert_file(
|
|
1561
|
+
self,
|
|
1562
|
+
*,
|
|
1563
|
+
project_id: typing.Optional[str] = None,
|
|
1564
|
+
organization_id: typing.Optional[str] = None,
|
|
1565
|
+
request: FileCreate,
|
|
1566
|
+
project_id: typing.Optional[str] = None,
|
|
1567
|
+
) -> File:
|
|
1568
|
+
"""
|
|
1569
|
+
Upsert a file (create or update if exists) in the project.
|
|
1570
|
+
|
|
1571
|
+
Args:
|
|
1572
|
+
file_create: File creation/update data
|
|
1573
|
+
project: Validated project from dependency
|
|
1574
|
+
db: Database session
|
|
1575
|
+
|
|
1576
|
+
Returns:
|
|
1577
|
+
The upserted file
|
|
1578
|
+
|
|
1579
|
+
Parameters:
|
|
1580
|
+
- project_id: typing.Optional[str].
|
|
1581
|
+
|
|
1582
|
+
- organization_id: typing.Optional[str].
|
|
1583
|
+
|
|
1584
|
+
- request: FileCreate.
|
|
1585
|
+
|
|
1586
|
+
- project_id: typing.Optional[str].
|
|
1587
|
+
---
|
|
1588
|
+
from llama_cloud import FileCreate
|
|
1589
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1590
|
+
|
|
1591
|
+
client = AsyncLlamaCloud(
|
|
1592
|
+
token="YOUR_TOKEN",
|
|
1593
|
+
)
|
|
1594
|
+
await client.beta.upsert_file(
|
|
1595
|
+
request=FileCreate(
|
|
1596
|
+
name="string",
|
|
1597
|
+
),
|
|
1598
|
+
)
|
|
1599
|
+
"""
|
|
1600
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1601
|
+
"PUT",
|
|
1602
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files"),
|
|
1603
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1604
|
+
json=jsonable_encoder(request),
|
|
1605
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
1606
|
+
timeout=60,
|
|
1607
|
+
)
|
|
1608
|
+
if 200 <= _response.status_code < 300:
|
|
1609
|
+
return pydantic.parse_obj_as(File, _response.json()) # type: ignore
|
|
1610
|
+
if _response.status_code == 422:
|
|
1611
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1612
|
+
try:
|
|
1613
|
+
_response_json = _response.json()
|
|
1614
|
+
except JSONDecodeError:
|
|
1615
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1616
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1617
|
+
|
|
1618
|
+
async def query_files(
|
|
1619
|
+
self,
|
|
1620
|
+
*,
|
|
1621
|
+
project_id: typing.Optional[str] = None,
|
|
1622
|
+
organization_id: typing.Optional[str] = None,
|
|
1623
|
+
page_size: typing.Optional[int] = OMIT,
|
|
1624
|
+
page_token: typing.Optional[str] = OMIT,
|
|
1625
|
+
filter: typing.Optional[FileFilter] = OMIT,
|
|
1626
|
+
order_by: typing.Optional[str] = OMIT,
|
|
1627
|
+
project_id: typing.Optional[str] = None,
|
|
1628
|
+
) -> FileQueryResponse:
|
|
1629
|
+
"""
|
|
1630
|
+
Query files with flexible filtering and pagination.
|
|
1631
|
+
|
|
1632
|
+
Args:
|
|
1633
|
+
request: The query request with filters and pagination
|
|
1634
|
+
project: Validated project from dependency
|
|
1635
|
+
db: Database session
|
|
1636
|
+
|
|
1637
|
+
Returns:
|
|
1638
|
+
Paginated response with files
|
|
1639
|
+
|
|
1640
|
+
Parameters:
|
|
1641
|
+
- project_id: typing.Optional[str].
|
|
1642
|
+
|
|
1643
|
+
- organization_id: typing.Optional[str].
|
|
1644
|
+
|
|
1645
|
+
- page_size: typing.Optional[int].
|
|
1646
|
+
|
|
1647
|
+
- page_token: typing.Optional[str].
|
|
1648
|
+
|
|
1649
|
+
- filter: typing.Optional[FileFilter].
|
|
1650
|
+
|
|
1651
|
+
- order_by: typing.Optional[str].
|
|
1652
|
+
|
|
1653
|
+
- project_id: typing.Optional[str].
|
|
1654
|
+
---
|
|
1655
|
+
from llama_cloud import FileFilter
|
|
1656
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1657
|
+
|
|
1658
|
+
client = AsyncLlamaCloud(
|
|
1659
|
+
token="YOUR_TOKEN",
|
|
1660
|
+
)
|
|
1661
|
+
await client.beta.query_files(
|
|
1662
|
+
filter=FileFilter(),
|
|
1663
|
+
)
|
|
1664
|
+
"""
|
|
1665
|
+
_request: typing.Dict[str, typing.Any] = {}
|
|
1666
|
+
if page_size is not OMIT:
|
|
1667
|
+
_request["page_size"] = page_size
|
|
1668
|
+
if page_token is not OMIT:
|
|
1669
|
+
_request["page_token"] = page_token
|
|
1670
|
+
if filter is not OMIT:
|
|
1671
|
+
_request["filter"] = filter
|
|
1672
|
+
if order_by is not OMIT:
|
|
1673
|
+
_request["order_by"] = order_by
|
|
1674
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1675
|
+
"POST",
|
|
1676
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/beta/files/query"),
|
|
1677
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1678
|
+
json=jsonable_encoder(_request),
|
|
1679
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
1680
|
+
timeout=60,
|
|
1681
|
+
)
|
|
1682
|
+
if 200 <= _response.status_code < 300:
|
|
1683
|
+
return pydantic.parse_obj_as(FileQueryResponse, _response.json()) # type: ignore
|
|
1684
|
+
if _response.status_code == 422:
|
|
1685
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1686
|
+
try:
|
|
1687
|
+
_response_json = _response.json()
|
|
1688
|
+
except JSONDecodeError:
|
|
1689
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1690
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1691
|
+
|
|
1692
|
+
async def delete_file(
|
|
1693
|
+
self,
|
|
1694
|
+
file_id: str,
|
|
1695
|
+
*,
|
|
1696
|
+
project_id: typing.Optional[str] = None,
|
|
1697
|
+
organization_id: typing.Optional[str] = None,
|
|
1698
|
+
project_id: typing.Optional[str] = None,
|
|
1699
|
+
) -> None:
|
|
1700
|
+
"""
|
|
1701
|
+
Delete a single file from the project.
|
|
1702
|
+
|
|
1703
|
+
Args:
|
|
1704
|
+
file_id: The ID of the file to delete
|
|
1705
|
+
project: Validated project from dependency
|
|
1706
|
+
db: Database session
|
|
1707
|
+
|
|
1708
|
+
Returns:
|
|
1709
|
+
None (204 No Content on success)
|
|
1710
|
+
|
|
1711
|
+
Parameters:
|
|
1712
|
+
- file_id: str.
|
|
1713
|
+
|
|
1714
|
+
- project_id: typing.Optional[str].
|
|
1715
|
+
|
|
1716
|
+
- organization_id: typing.Optional[str].
|
|
1717
|
+
|
|
1718
|
+
- project_id: typing.Optional[str].
|
|
1719
|
+
---
|
|
1720
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
1721
|
+
|
|
1722
|
+
client = AsyncLlamaCloud(
|
|
1723
|
+
token="YOUR_TOKEN",
|
|
1724
|
+
)
|
|
1725
|
+
await client.beta.delete_file(
|
|
1726
|
+
file_id="string",
|
|
1727
|
+
)
|
|
1728
|
+
"""
|
|
1729
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1730
|
+
"DELETE",
|
|
1731
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/beta/files/{file_id}"),
|
|
1732
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
1733
|
+
headers=remove_none_from_dict({**self._client_wrapper.get_headers(), "Project-Id": project_id}),
|
|
1734
|
+
timeout=60,
|
|
1735
|
+
)
|
|
1736
|
+
if 200 <= _response.status_code < 300:
|
|
1737
|
+
return
|
|
1738
|
+
if _response.status_code == 422:
|
|
1739
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1740
|
+
try:
|
|
1741
|
+
_response_json = _response.json()
|
|
1742
|
+
except JSONDecodeError:
|
|
1743
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1744
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|