llama-cloud 0.1.37__py3-none-any.whl → 0.1.39__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of llama-cloud might be problematic. Click here for more details.
- llama_cloud/__init__.py +8 -2
- llama_cloud/client.py +3 -0
- llama_cloud/resources/__init__.py +2 -0
- llama_cloud/resources/alpha/__init__.py +2 -0
- llama_cloud/resources/alpha/client.py +112 -0
- llama_cloud/resources/classifier/client.py +109 -5
- llama_cloud/resources/data_sources/types/data_source_update_component.py +2 -0
- llama_cloud/resources/jobs/client.py +4 -4
- llama_cloud/types/__init__.py +6 -2
- llama_cloud/types/agent_deployment_summary.py +1 -0
- llama_cloud/types/classify_job.py +2 -0
- llama_cloud/types/cloud_jira_data_source_v_2.py +52 -0
- llama_cloud/types/cloud_jira_data_source_v_2_api_version.py +21 -0
- llama_cloud/types/configurable_data_source_names.py +4 -0
- llama_cloud/types/data_source_component.py +2 -0
- llama_cloud/types/data_source_create_component.py +2 -0
- llama_cloud/types/data_source_reader_version_metadata_reader_version.py +9 -1
- llama_cloud/types/file.py +1 -1
- llama_cloud/types/{classify_job_with_status.py → paginated_response_classify_job.py} +5 -18
- llama_cloud/types/pipeline_data_source_component.py +2 -0
- llama_cloud/types/pipeline_file.py +4 -4
- llama_cloud/types/usage_response_active_alerts_item.py +4 -0
- {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.39.dist-info}/METADATA +2 -3
- {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.39.dist-info}/RECORD +26 -22
- {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.39.dist-info}/WHEEL +1 -1
- {llama_cloud-0.1.37.dist-info → llama_cloud-0.1.39.dist-info}/LICENSE +0 -0
llama_cloud/__init__.py
CHANGED
|
@@ -41,7 +41,6 @@ from .types import (
|
|
|
41
41
|
ClassifierRule,
|
|
42
42
|
ClassifyJob,
|
|
43
43
|
ClassifyJobResults,
|
|
44
|
-
ClassifyJobWithStatus,
|
|
45
44
|
ClassifyParsingConfiguration,
|
|
46
45
|
CloudAstraDbVectorStore,
|
|
47
46
|
CloudAzStorageBlobDataSource,
|
|
@@ -51,6 +50,8 @@ from .types import (
|
|
|
51
50
|
CloudDocument,
|
|
52
51
|
CloudDocumentCreate,
|
|
53
52
|
CloudJiraDataSource,
|
|
53
|
+
CloudJiraDataSourceV2,
|
|
54
|
+
CloudJiraDataSourceV2ApiVersion,
|
|
54
55
|
CloudMilvusVectorStore,
|
|
55
56
|
CloudMongoDbAtlasVectorSearch,
|
|
56
57
|
CloudNotionPageDataSource,
|
|
@@ -235,6 +236,7 @@ from .types import (
|
|
|
235
236
|
PaginatedReportResponse,
|
|
236
237
|
PaginatedResponseAgentData,
|
|
237
238
|
PaginatedResponseAggregateGroup,
|
|
239
|
+
PaginatedResponseClassifyJob,
|
|
238
240
|
PaginatedResponseQuotaConfiguration,
|
|
239
241
|
ParseJobConfig,
|
|
240
242
|
ParseJobConfigPriority,
|
|
@@ -413,6 +415,7 @@ from .resources import (
|
|
|
413
415
|
UpdateReportPlanApiV1ReportsReportIdPlanPatchRequestAction,
|
|
414
416
|
admin,
|
|
415
417
|
agent_deployments,
|
|
418
|
+
alpha,
|
|
416
419
|
beta,
|
|
417
420
|
chat_apps,
|
|
418
421
|
classifier,
|
|
@@ -474,7 +477,6 @@ __all__ = [
|
|
|
474
477
|
"ClassifierRule",
|
|
475
478
|
"ClassifyJob",
|
|
476
479
|
"ClassifyJobResults",
|
|
477
|
-
"ClassifyJobWithStatus",
|
|
478
480
|
"ClassifyParsingConfiguration",
|
|
479
481
|
"CloudAstraDbVectorStore",
|
|
480
482
|
"CloudAzStorageBlobDataSource",
|
|
@@ -484,6 +486,8 @@ __all__ = [
|
|
|
484
486
|
"CloudDocument",
|
|
485
487
|
"CloudDocumentCreate",
|
|
486
488
|
"CloudJiraDataSource",
|
|
489
|
+
"CloudJiraDataSourceV2",
|
|
490
|
+
"CloudJiraDataSourceV2ApiVersion",
|
|
487
491
|
"CloudMilvusVectorStore",
|
|
488
492
|
"CloudMongoDbAtlasVectorSearch",
|
|
489
493
|
"CloudNotionPageDataSource",
|
|
@@ -691,6 +695,7 @@ __all__ = [
|
|
|
691
695
|
"PaginatedReportResponse",
|
|
692
696
|
"PaginatedResponseAgentData",
|
|
693
697
|
"PaginatedResponseAggregateGroup",
|
|
698
|
+
"PaginatedResponseClassifyJob",
|
|
694
699
|
"PaginatedResponseQuotaConfiguration",
|
|
695
700
|
"ParseJobConfig",
|
|
696
701
|
"ParseJobConfigPriority",
|
|
@@ -845,6 +850,7 @@ __all__ = [
|
|
|
845
850
|
"WebhookConfigurationWebhookEventsItem",
|
|
846
851
|
"admin",
|
|
847
852
|
"agent_deployments",
|
|
853
|
+
"alpha",
|
|
848
854
|
"beta",
|
|
849
855
|
"chat_apps",
|
|
850
856
|
"classifier",
|
llama_cloud/client.py
CHANGED
|
@@ -8,6 +8,7 @@ from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
|
|
|
8
8
|
from .environment import LlamaCloudEnvironment
|
|
9
9
|
from .resources.admin.client import AdminClient, AsyncAdminClient
|
|
10
10
|
from .resources.agent_deployments.client import AgentDeploymentsClient, AsyncAgentDeploymentsClient
|
|
11
|
+
from .resources.alpha.client import AlphaClient, AsyncAlphaClient
|
|
11
12
|
from .resources.beta.client import AsyncBetaClient, BetaClient
|
|
12
13
|
from .resources.chat_apps.client import AsyncChatAppsClient, ChatAppsClient
|
|
13
14
|
from .resources.classifier.client import AsyncClassifierClient, ClassifierClient
|
|
@@ -61,6 +62,7 @@ class LlamaCloud:
|
|
|
61
62
|
self.llama_extract = LlamaExtractClient(client_wrapper=self._client_wrapper)
|
|
62
63
|
self.reports = ReportsClient(client_wrapper=self._client_wrapper)
|
|
63
64
|
self.beta = BetaClient(client_wrapper=self._client_wrapper)
|
|
65
|
+
self.alpha = AlphaClient(client_wrapper=self._client_wrapper)
|
|
64
66
|
|
|
65
67
|
|
|
66
68
|
class AsyncLlamaCloud:
|
|
@@ -97,6 +99,7 @@ class AsyncLlamaCloud:
|
|
|
97
99
|
self.llama_extract = AsyncLlamaExtractClient(client_wrapper=self._client_wrapper)
|
|
98
100
|
self.reports = AsyncReportsClient(client_wrapper=self._client_wrapper)
|
|
99
101
|
self.beta = AsyncBetaClient(client_wrapper=self._client_wrapper)
|
|
102
|
+
self.alpha = AsyncAlphaClient(client_wrapper=self._client_wrapper)
|
|
100
103
|
|
|
101
104
|
|
|
102
105
|
def _get_base_url(*, base_url: typing.Optional[str] = None, environment: LlamaCloudEnvironment) -> str:
|
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
from . import (
|
|
4
4
|
admin,
|
|
5
5
|
agent_deployments,
|
|
6
|
+
alpha,
|
|
6
7
|
beta,
|
|
7
8
|
chat_apps,
|
|
8
9
|
classifier,
|
|
@@ -98,6 +99,7 @@ __all__ = [
|
|
|
98
99
|
"UpdateReportPlanApiV1ReportsReportIdPlanPatchRequestAction",
|
|
99
100
|
"admin",
|
|
100
101
|
"agent_deployments",
|
|
102
|
+
"alpha",
|
|
101
103
|
"beta",
|
|
102
104
|
"chat_apps",
|
|
103
105
|
"classifier",
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
import urllib.parse
|
|
5
|
+
from json.decoder import JSONDecodeError
|
|
6
|
+
|
|
7
|
+
from ...core.api_error import ApiError
|
|
8
|
+
from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
|
|
9
|
+
from ...core.jsonable_encoder import jsonable_encoder
|
|
10
|
+
from ...core.remove_none_from_dict import remove_none_from_dict
|
|
11
|
+
from ...errors.unprocessable_entity_error import UnprocessableEntityError
|
|
12
|
+
from ...types.http_validation_error import HttpValidationError
|
|
13
|
+
from ...types.parsing_job import ParsingJob
|
|
14
|
+
|
|
15
|
+
try:
|
|
16
|
+
import pydantic
|
|
17
|
+
if pydantic.__version__.startswith("1."):
|
|
18
|
+
raise ImportError
|
|
19
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
20
|
+
except ImportError:
|
|
21
|
+
import pydantic # type: ignore
|
|
22
|
+
|
|
23
|
+
# this is used as the default value for optional parameters
|
|
24
|
+
OMIT = typing.cast(typing.Any, ...)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class AlphaClient:
|
|
28
|
+
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
29
|
+
self._client_wrapper = client_wrapper
|
|
30
|
+
|
|
31
|
+
def upload_file_v_2(
|
|
32
|
+
self,
|
|
33
|
+
*,
|
|
34
|
+
project_id: typing.Optional[str] = None,
|
|
35
|
+
organization_id: typing.Optional[str] = None,
|
|
36
|
+
configuration: str,
|
|
37
|
+
file: typing.Optional[str] = OMIT,
|
|
38
|
+
) -> ParsingJob:
|
|
39
|
+
"""
|
|
40
|
+
Parameters:
|
|
41
|
+
- project_id: typing.Optional[str].
|
|
42
|
+
|
|
43
|
+
- organization_id: typing.Optional[str].
|
|
44
|
+
|
|
45
|
+
- configuration: str.
|
|
46
|
+
|
|
47
|
+
- file: typing.Optional[str].
|
|
48
|
+
"""
|
|
49
|
+
_request: typing.Dict[str, typing.Any] = {"configuration": configuration}
|
|
50
|
+
if file is not OMIT:
|
|
51
|
+
_request["file"] = file
|
|
52
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
53
|
+
"POST",
|
|
54
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v2alpha1/parse/upload"),
|
|
55
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
56
|
+
json=jsonable_encoder(_request),
|
|
57
|
+
headers=self._client_wrapper.get_headers(),
|
|
58
|
+
timeout=60,
|
|
59
|
+
)
|
|
60
|
+
if 200 <= _response.status_code < 300:
|
|
61
|
+
return pydantic.parse_obj_as(ParsingJob, _response.json()) # type: ignore
|
|
62
|
+
if _response.status_code == 422:
|
|
63
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
64
|
+
try:
|
|
65
|
+
_response_json = _response.json()
|
|
66
|
+
except JSONDecodeError:
|
|
67
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
68
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
class AsyncAlphaClient:
|
|
72
|
+
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
73
|
+
self._client_wrapper = client_wrapper
|
|
74
|
+
|
|
75
|
+
async def upload_file_v_2(
|
|
76
|
+
self,
|
|
77
|
+
*,
|
|
78
|
+
project_id: typing.Optional[str] = None,
|
|
79
|
+
organization_id: typing.Optional[str] = None,
|
|
80
|
+
configuration: str,
|
|
81
|
+
file: typing.Optional[str] = OMIT,
|
|
82
|
+
) -> ParsingJob:
|
|
83
|
+
"""
|
|
84
|
+
Parameters:
|
|
85
|
+
- project_id: typing.Optional[str].
|
|
86
|
+
|
|
87
|
+
- organization_id: typing.Optional[str].
|
|
88
|
+
|
|
89
|
+
- configuration: str.
|
|
90
|
+
|
|
91
|
+
- file: typing.Optional[str].
|
|
92
|
+
"""
|
|
93
|
+
_request: typing.Dict[str, typing.Any] = {"configuration": configuration}
|
|
94
|
+
if file is not OMIT:
|
|
95
|
+
_request["file"] = file
|
|
96
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
97
|
+
"POST",
|
|
98
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v2alpha1/parse/upload"),
|
|
99
|
+
params=remove_none_from_dict({"project_id": project_id, "organization_id": organization_id}),
|
|
100
|
+
json=jsonable_encoder(_request),
|
|
101
|
+
headers=self._client_wrapper.get_headers(),
|
|
102
|
+
timeout=60,
|
|
103
|
+
)
|
|
104
|
+
if 200 <= _response.status_code < 300:
|
|
105
|
+
return pydantic.parse_obj_as(ParsingJob, _response.json()) # type: ignore
|
|
106
|
+
if _response.status_code == 422:
|
|
107
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
108
|
+
try:
|
|
109
|
+
_response_json = _response.json()
|
|
110
|
+
except JSONDecodeError:
|
|
111
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
112
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
@@ -12,9 +12,9 @@ from ...errors.unprocessable_entity_error import UnprocessableEntityError
|
|
|
12
12
|
from ...types.classifier_rule import ClassifierRule
|
|
13
13
|
from ...types.classify_job import ClassifyJob
|
|
14
14
|
from ...types.classify_job_results import ClassifyJobResults
|
|
15
|
-
from ...types.classify_job_with_status import ClassifyJobWithStatus
|
|
16
15
|
from ...types.classify_parsing_configuration import ClassifyParsingConfiguration
|
|
17
16
|
from ...types.http_validation_error import HttpValidationError
|
|
17
|
+
from ...types.paginated_response_classify_job import PaginatedResponseClassifyJob
|
|
18
18
|
|
|
19
19
|
try:
|
|
20
20
|
import pydantic
|
|
@@ -32,6 +32,58 @@ class ClassifierClient:
|
|
|
32
32
|
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
33
33
|
self._client_wrapper = client_wrapper
|
|
34
34
|
|
|
35
|
+
def list_classify_jobs(
|
|
36
|
+
self,
|
|
37
|
+
*,
|
|
38
|
+
project_id: typing.Optional[str] = None,
|
|
39
|
+
organization_id: typing.Optional[str] = None,
|
|
40
|
+
page_size: typing.Optional[int] = None,
|
|
41
|
+
page_token: typing.Optional[str] = None,
|
|
42
|
+
) -> PaginatedResponseClassifyJob:
|
|
43
|
+
"""
|
|
44
|
+
List classify jobs.
|
|
45
|
+
Experimental: This endpoint is not yet ready for production use and is subject to change at any time.
|
|
46
|
+
|
|
47
|
+
Parameters:
|
|
48
|
+
- project_id: typing.Optional[str].
|
|
49
|
+
|
|
50
|
+
- organization_id: typing.Optional[str].
|
|
51
|
+
|
|
52
|
+
- page_size: typing.Optional[int].
|
|
53
|
+
|
|
54
|
+
- page_token: typing.Optional[str].
|
|
55
|
+
---
|
|
56
|
+
from llama_cloud.client import LlamaCloud
|
|
57
|
+
|
|
58
|
+
client = LlamaCloud(
|
|
59
|
+
token="YOUR_TOKEN",
|
|
60
|
+
)
|
|
61
|
+
client.classifier.list_classify_jobs()
|
|
62
|
+
"""
|
|
63
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
64
|
+
"GET",
|
|
65
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/classifier/jobs"),
|
|
66
|
+
params=remove_none_from_dict(
|
|
67
|
+
{
|
|
68
|
+
"project_id": project_id,
|
|
69
|
+
"organization_id": organization_id,
|
|
70
|
+
"page_size": page_size,
|
|
71
|
+
"page_token": page_token,
|
|
72
|
+
}
|
|
73
|
+
),
|
|
74
|
+
headers=self._client_wrapper.get_headers(),
|
|
75
|
+
timeout=60,
|
|
76
|
+
)
|
|
77
|
+
if 200 <= _response.status_code < 300:
|
|
78
|
+
return pydantic.parse_obj_as(PaginatedResponseClassifyJob, _response.json()) # type: ignore
|
|
79
|
+
if _response.status_code == 422:
|
|
80
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
81
|
+
try:
|
|
82
|
+
_response_json = _response.json()
|
|
83
|
+
except JSONDecodeError:
|
|
84
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
85
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
86
|
+
|
|
35
87
|
def create_classify_job(
|
|
36
88
|
self,
|
|
37
89
|
*,
|
|
@@ -97,7 +149,7 @@ class ClassifierClient:
|
|
|
97
149
|
*,
|
|
98
150
|
project_id: typing.Optional[str] = None,
|
|
99
151
|
organization_id: typing.Optional[str] = None,
|
|
100
|
-
) ->
|
|
152
|
+
) -> ClassifyJob:
|
|
101
153
|
"""
|
|
102
154
|
Get a classify job.
|
|
103
155
|
Experimental: This endpoint is not yet ready for production use and is subject to change at any time.
|
|
@@ -128,7 +180,7 @@ class ClassifierClient:
|
|
|
128
180
|
timeout=60,
|
|
129
181
|
)
|
|
130
182
|
if 200 <= _response.status_code < 300:
|
|
131
|
-
return pydantic.parse_obj_as(
|
|
183
|
+
return pydantic.parse_obj_as(ClassifyJob, _response.json()) # type: ignore
|
|
132
184
|
if _response.status_code == 422:
|
|
133
185
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
134
186
|
try:
|
|
@@ -188,6 +240,58 @@ class AsyncClassifierClient:
|
|
|
188
240
|
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
189
241
|
self._client_wrapper = client_wrapper
|
|
190
242
|
|
|
243
|
+
async def list_classify_jobs(
|
|
244
|
+
self,
|
|
245
|
+
*,
|
|
246
|
+
project_id: typing.Optional[str] = None,
|
|
247
|
+
organization_id: typing.Optional[str] = None,
|
|
248
|
+
page_size: typing.Optional[int] = None,
|
|
249
|
+
page_token: typing.Optional[str] = None,
|
|
250
|
+
) -> PaginatedResponseClassifyJob:
|
|
251
|
+
"""
|
|
252
|
+
List classify jobs.
|
|
253
|
+
Experimental: This endpoint is not yet ready for production use and is subject to change at any time.
|
|
254
|
+
|
|
255
|
+
Parameters:
|
|
256
|
+
- project_id: typing.Optional[str].
|
|
257
|
+
|
|
258
|
+
- organization_id: typing.Optional[str].
|
|
259
|
+
|
|
260
|
+
- page_size: typing.Optional[int].
|
|
261
|
+
|
|
262
|
+
- page_token: typing.Optional[str].
|
|
263
|
+
---
|
|
264
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
265
|
+
|
|
266
|
+
client = AsyncLlamaCloud(
|
|
267
|
+
token="YOUR_TOKEN",
|
|
268
|
+
)
|
|
269
|
+
await client.classifier.list_classify_jobs()
|
|
270
|
+
"""
|
|
271
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
272
|
+
"GET",
|
|
273
|
+
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/classifier/jobs"),
|
|
274
|
+
params=remove_none_from_dict(
|
|
275
|
+
{
|
|
276
|
+
"project_id": project_id,
|
|
277
|
+
"organization_id": organization_id,
|
|
278
|
+
"page_size": page_size,
|
|
279
|
+
"page_token": page_token,
|
|
280
|
+
}
|
|
281
|
+
),
|
|
282
|
+
headers=self._client_wrapper.get_headers(),
|
|
283
|
+
timeout=60,
|
|
284
|
+
)
|
|
285
|
+
if 200 <= _response.status_code < 300:
|
|
286
|
+
return pydantic.parse_obj_as(PaginatedResponseClassifyJob, _response.json()) # type: ignore
|
|
287
|
+
if _response.status_code == 422:
|
|
288
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
289
|
+
try:
|
|
290
|
+
_response_json = _response.json()
|
|
291
|
+
except JSONDecodeError:
|
|
292
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
293
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
294
|
+
|
|
191
295
|
async def create_classify_job(
|
|
192
296
|
self,
|
|
193
297
|
*,
|
|
@@ -253,7 +357,7 @@ class AsyncClassifierClient:
|
|
|
253
357
|
*,
|
|
254
358
|
project_id: typing.Optional[str] = None,
|
|
255
359
|
organization_id: typing.Optional[str] = None,
|
|
256
|
-
) ->
|
|
360
|
+
) -> ClassifyJob:
|
|
257
361
|
"""
|
|
258
362
|
Get a classify job.
|
|
259
363
|
Experimental: This endpoint is not yet ready for production use and is subject to change at any time.
|
|
@@ -284,7 +388,7 @@ class AsyncClassifierClient:
|
|
|
284
388
|
timeout=60,
|
|
285
389
|
)
|
|
286
390
|
if 200 <= _response.status_code < 300:
|
|
287
|
-
return pydantic.parse_obj_as(
|
|
391
|
+
return pydantic.parse_obj_as(ClassifyJob, _response.json()) # type: ignore
|
|
288
392
|
if _response.status_code == 422:
|
|
289
393
|
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
290
394
|
try:
|
|
@@ -6,6 +6,7 @@ from ....types.cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSo
|
|
|
6
6
|
from ....types.cloud_box_data_source import CloudBoxDataSource
|
|
7
7
|
from ....types.cloud_confluence_data_source import CloudConfluenceDataSource
|
|
8
8
|
from ....types.cloud_jira_data_source import CloudJiraDataSource
|
|
9
|
+
from ....types.cloud_jira_data_source_v_2 import CloudJiraDataSourceV2
|
|
9
10
|
from ....types.cloud_notion_page_data_source import CloudNotionPageDataSource
|
|
10
11
|
from ....types.cloud_one_drive_data_source import CloudOneDriveDataSource
|
|
11
12
|
from ....types.cloud_s_3_data_source import CloudS3DataSource
|
|
@@ -22,5 +23,6 @@ DataSourceUpdateComponent = typing.Union[
|
|
|
22
23
|
CloudNotionPageDataSource,
|
|
23
24
|
CloudConfluenceDataSource,
|
|
24
25
|
CloudJiraDataSource,
|
|
26
|
+
CloudJiraDataSourceV2,
|
|
25
27
|
CloudBoxDataSource,
|
|
26
28
|
]
|
|
@@ -24,7 +24,7 @@ class JobsClient:
|
|
|
24
24
|
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
25
25
|
self._client_wrapper = client_wrapper
|
|
26
26
|
|
|
27
|
-
def
|
|
27
|
+
def get_jobs(
|
|
28
28
|
self,
|
|
29
29
|
*,
|
|
30
30
|
job_name: typing.Optional[str] = None,
|
|
@@ -62,7 +62,7 @@ class JobsClient:
|
|
|
62
62
|
client = LlamaCloud(
|
|
63
63
|
token="YOUR_TOKEN",
|
|
64
64
|
)
|
|
65
|
-
client.jobs.
|
|
65
|
+
client.jobs.get_jobs()
|
|
66
66
|
"""
|
|
67
67
|
_response = self._client_wrapper.httpx_client.request(
|
|
68
68
|
"GET",
|
|
@@ -96,7 +96,7 @@ class AsyncJobsClient:
|
|
|
96
96
|
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
97
97
|
self._client_wrapper = client_wrapper
|
|
98
98
|
|
|
99
|
-
async def
|
|
99
|
+
async def get_jobs(
|
|
100
100
|
self,
|
|
101
101
|
*,
|
|
102
102
|
job_name: typing.Optional[str] = None,
|
|
@@ -134,7 +134,7 @@ class AsyncJobsClient:
|
|
|
134
134
|
client = AsyncLlamaCloud(
|
|
135
135
|
token="YOUR_TOKEN",
|
|
136
136
|
)
|
|
137
|
-
await client.jobs.
|
|
137
|
+
await client.jobs.get_jobs()
|
|
138
138
|
"""
|
|
139
139
|
_response = await self._client_wrapper.httpx_client.request(
|
|
140
140
|
"GET",
|
llama_cloud/types/__init__.py
CHANGED
|
@@ -44,7 +44,6 @@ from .classification_result import ClassificationResult
|
|
|
44
44
|
from .classifier_rule import ClassifierRule
|
|
45
45
|
from .classify_job import ClassifyJob
|
|
46
46
|
from .classify_job_results import ClassifyJobResults
|
|
47
|
-
from .classify_job_with_status import ClassifyJobWithStatus
|
|
48
47
|
from .classify_parsing_configuration import ClassifyParsingConfiguration
|
|
49
48
|
from .cloud_astra_db_vector_store import CloudAstraDbVectorStore
|
|
50
49
|
from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
|
|
@@ -54,6 +53,8 @@ from .cloud_confluence_data_source import CloudConfluenceDataSource
|
|
|
54
53
|
from .cloud_document import CloudDocument
|
|
55
54
|
from .cloud_document_create import CloudDocumentCreate
|
|
56
55
|
from .cloud_jira_data_source import CloudJiraDataSource
|
|
56
|
+
from .cloud_jira_data_source_v_2 import CloudJiraDataSourceV2
|
|
57
|
+
from .cloud_jira_data_source_v_2_api_version import CloudJiraDataSourceV2ApiVersion
|
|
57
58
|
from .cloud_milvus_vector_store import CloudMilvusVectorStore
|
|
58
59
|
from .cloud_mongo_db_atlas_vector_search import CloudMongoDbAtlasVectorSearch
|
|
59
60
|
from .cloud_notion_page_data_source import CloudNotionPageDataSource
|
|
@@ -246,6 +247,7 @@ from .paginated_list_pipeline_files_response import PaginatedListPipelineFilesRe
|
|
|
246
247
|
from .paginated_report_response import PaginatedReportResponse
|
|
247
248
|
from .paginated_response_agent_data import PaginatedResponseAgentData
|
|
248
249
|
from .paginated_response_aggregate_group import PaginatedResponseAggregateGroup
|
|
250
|
+
from .paginated_response_classify_job import PaginatedResponseClassifyJob
|
|
249
251
|
from .paginated_response_quota_configuration import PaginatedResponseQuotaConfiguration
|
|
250
252
|
from .parse_job_config import ParseJobConfig
|
|
251
253
|
from .parse_job_config_priority import ParseJobConfigPriority
|
|
@@ -437,7 +439,6 @@ __all__ = [
|
|
|
437
439
|
"ClassifierRule",
|
|
438
440
|
"ClassifyJob",
|
|
439
441
|
"ClassifyJobResults",
|
|
440
|
-
"ClassifyJobWithStatus",
|
|
441
442
|
"ClassifyParsingConfiguration",
|
|
442
443
|
"CloudAstraDbVectorStore",
|
|
443
444
|
"CloudAzStorageBlobDataSource",
|
|
@@ -447,6 +448,8 @@ __all__ = [
|
|
|
447
448
|
"CloudDocument",
|
|
448
449
|
"CloudDocumentCreate",
|
|
449
450
|
"CloudJiraDataSource",
|
|
451
|
+
"CloudJiraDataSourceV2",
|
|
452
|
+
"CloudJiraDataSourceV2ApiVersion",
|
|
450
453
|
"CloudMilvusVectorStore",
|
|
451
454
|
"CloudMongoDbAtlasVectorSearch",
|
|
452
455
|
"CloudNotionPageDataSource",
|
|
@@ -631,6 +634,7 @@ __all__ = [
|
|
|
631
634
|
"PaginatedReportResponse",
|
|
632
635
|
"PaginatedResponseAgentData",
|
|
633
636
|
"PaginatedResponseAggregateGroup",
|
|
637
|
+
"PaginatedResponseClassifyJob",
|
|
634
638
|
"PaginatedResponseQuotaConfiguration",
|
|
635
639
|
"ParseJobConfig",
|
|
636
640
|
"ParseJobConfigPriority",
|
|
@@ -23,6 +23,7 @@ class AgentDeploymentSummary(pydantic.BaseModel):
|
|
|
23
23
|
display_name: str = pydantic.Field(description="Display name of the deployed app")
|
|
24
24
|
created_at: dt.datetime = pydantic.Field(description="Timestamp when the app deployment was created")
|
|
25
25
|
updated_at: dt.datetime = pydantic.Field(description="Timestamp when the app deployment was last updated")
|
|
26
|
+
api_key_id: typing.Optional[str]
|
|
26
27
|
|
|
27
28
|
def json(self, **kwargs: typing.Any) -> str:
|
|
28
29
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
@@ -6,6 +6,7 @@ import typing
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
7
|
from .classifier_rule import ClassifierRule
|
|
8
8
|
from .classify_parsing_configuration import ClassifyParsingConfiguration
|
|
9
|
+
from .status_enum import StatusEnum
|
|
9
10
|
|
|
10
11
|
try:
|
|
11
12
|
import pydantic
|
|
@@ -27,6 +28,7 @@ class ClassifyJob(pydantic.BaseModel):
|
|
|
27
28
|
rules: typing.List[ClassifierRule] = pydantic.Field(description="The rules to classify the files")
|
|
28
29
|
user_id: str = pydantic.Field(description="The ID of the user")
|
|
29
30
|
project_id: str = pydantic.Field(description="The ID of the project")
|
|
31
|
+
status: StatusEnum = pydantic.Field(description="The status of the classify job")
|
|
30
32
|
parsing_configuration: typing.Optional[ClassifyParsingConfiguration] = pydantic.Field(
|
|
31
33
|
description="The configuration for the parsing job"
|
|
32
34
|
)
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
|
7
|
+
from .cloud_jira_data_source_v_2_api_version import CloudJiraDataSourceV2ApiVersion
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
import pydantic
|
|
11
|
+
if pydantic.__version__.startswith("1."):
|
|
12
|
+
raise ImportError
|
|
13
|
+
import pydantic.v1 as pydantic # type: ignore
|
|
14
|
+
except ImportError:
|
|
15
|
+
import pydantic # type: ignore
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class CloudJiraDataSourceV2(pydantic.BaseModel):
|
|
19
|
+
"""
|
|
20
|
+
Cloud Jira Data Source integrating JiraReaderV2.
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
supports_access_control: typing.Optional[bool]
|
|
24
|
+
email: typing.Optional[str]
|
|
25
|
+
api_token: typing.Optional[str]
|
|
26
|
+
server_url: str = pydantic.Field(description="The server url for Jira Cloud.")
|
|
27
|
+
cloud_id: typing.Optional[str]
|
|
28
|
+
authentication_mechanism: str = pydantic.Field(description="Type of Authentication for connecting to Jira APIs.")
|
|
29
|
+
api_version: typing.Optional[CloudJiraDataSourceV2ApiVersion] = pydantic.Field(
|
|
30
|
+
description="Jira REST API version to use (2 or 3). 3 supports Atlassian Document Format (ADF)."
|
|
31
|
+
)
|
|
32
|
+
query: str = pydantic.Field(description="JQL (Jira Query Language) query to search.")
|
|
33
|
+
fields: typing.Optional[typing.List[str]]
|
|
34
|
+
expand: typing.Optional[str]
|
|
35
|
+
requests_per_minute: typing.Optional[int]
|
|
36
|
+
get_permissions: typing.Optional[bool] = pydantic.Field(
|
|
37
|
+
description="Whether to fetch project role permissions and issue-level security"
|
|
38
|
+
)
|
|
39
|
+
class_name: typing.Optional[str]
|
|
40
|
+
|
|
41
|
+
def json(self, **kwargs: typing.Any) -> str:
|
|
42
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
43
|
+
return super().json(**kwargs_with_defaults)
|
|
44
|
+
|
|
45
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
|
46
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
47
|
+
return super().dict(**kwargs_with_defaults)
|
|
48
|
+
|
|
49
|
+
class Config:
|
|
50
|
+
frozen = True
|
|
51
|
+
smart_union = True
|
|
52
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import enum
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
T_Result = typing.TypeVar("T_Result")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class CloudJiraDataSourceV2ApiVersion(str, enum.Enum):
|
|
10
|
+
"""
|
|
11
|
+
Jira REST API version to use (2 or 3). 3 supports Atlassian Document Format (ADF).
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
TWO = "2"
|
|
15
|
+
THREE = "3"
|
|
16
|
+
|
|
17
|
+
def visit(self, two: typing.Callable[[], T_Result], three: typing.Callable[[], T_Result]) -> T_Result:
|
|
18
|
+
if self is CloudJiraDataSourceV2ApiVersion.TWO:
|
|
19
|
+
return two()
|
|
20
|
+
if self is CloudJiraDataSourceV2ApiVersion.THREE:
|
|
21
|
+
return three()
|
|
@@ -16,6 +16,7 @@ class ConfigurableDataSourceNames(str, enum.Enum):
|
|
|
16
16
|
NOTION_PAGE = "NOTION_PAGE"
|
|
17
17
|
CONFLUENCE = "CONFLUENCE"
|
|
18
18
|
JIRA = "JIRA"
|
|
19
|
+
JIRA_V_2 = "JIRA_V2"
|
|
19
20
|
BOX = "BOX"
|
|
20
21
|
|
|
21
22
|
def visit(
|
|
@@ -29,6 +30,7 @@ class ConfigurableDataSourceNames(str, enum.Enum):
|
|
|
29
30
|
notion_page: typing.Callable[[], T_Result],
|
|
30
31
|
confluence: typing.Callable[[], T_Result],
|
|
31
32
|
jira: typing.Callable[[], T_Result],
|
|
33
|
+
jira_v_2: typing.Callable[[], T_Result],
|
|
32
34
|
box: typing.Callable[[], T_Result],
|
|
33
35
|
) -> T_Result:
|
|
34
36
|
if self is ConfigurableDataSourceNames.S_3:
|
|
@@ -49,5 +51,7 @@ class ConfigurableDataSourceNames(str, enum.Enum):
|
|
|
49
51
|
return confluence()
|
|
50
52
|
if self is ConfigurableDataSourceNames.JIRA:
|
|
51
53
|
return jira()
|
|
54
|
+
if self is ConfigurableDataSourceNames.JIRA_V_2:
|
|
55
|
+
return jira_v_2()
|
|
52
56
|
if self is ConfigurableDataSourceNames.BOX:
|
|
53
57
|
return box()
|
|
@@ -6,6 +6,7 @@ from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
|
|
|
6
6
|
from .cloud_box_data_source import CloudBoxDataSource
|
|
7
7
|
from .cloud_confluence_data_source import CloudConfluenceDataSource
|
|
8
8
|
from .cloud_jira_data_source import CloudJiraDataSource
|
|
9
|
+
from .cloud_jira_data_source_v_2 import CloudJiraDataSourceV2
|
|
9
10
|
from .cloud_notion_page_data_source import CloudNotionPageDataSource
|
|
10
11
|
from .cloud_one_drive_data_source import CloudOneDriveDataSource
|
|
11
12
|
from .cloud_s_3_data_source import CloudS3DataSource
|
|
@@ -22,5 +23,6 @@ DataSourceComponent = typing.Union[
|
|
|
22
23
|
CloudNotionPageDataSource,
|
|
23
24
|
CloudConfluenceDataSource,
|
|
24
25
|
CloudJiraDataSource,
|
|
26
|
+
CloudJiraDataSourceV2,
|
|
25
27
|
CloudBoxDataSource,
|
|
26
28
|
]
|
|
@@ -6,6 +6,7 @@ from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
|
|
|
6
6
|
from .cloud_box_data_source import CloudBoxDataSource
|
|
7
7
|
from .cloud_confluence_data_source import CloudConfluenceDataSource
|
|
8
8
|
from .cloud_jira_data_source import CloudJiraDataSource
|
|
9
|
+
from .cloud_jira_data_source_v_2 import CloudJiraDataSourceV2
|
|
9
10
|
from .cloud_notion_page_data_source import CloudNotionPageDataSource
|
|
10
11
|
from .cloud_one_drive_data_source import CloudOneDriveDataSource
|
|
11
12
|
from .cloud_s_3_data_source import CloudS3DataSource
|
|
@@ -22,5 +23,6 @@ DataSourceCreateComponent = typing.Union[
|
|
|
22
23
|
CloudNotionPageDataSource,
|
|
23
24
|
CloudConfluenceDataSource,
|
|
24
25
|
CloudJiraDataSource,
|
|
26
|
+
CloudJiraDataSourceV2,
|
|
25
27
|
CloudBoxDataSource,
|
|
26
28
|
]
|
|
@@ -9,9 +9,17 @@ T_Result = typing.TypeVar("T_Result")
|
|
|
9
9
|
class DataSourceReaderVersionMetadataReaderVersion(str, enum.Enum):
|
|
10
10
|
ONE_0 = "1.0"
|
|
11
11
|
TWO_0 = "2.0"
|
|
12
|
+
TWO_1 = "2.1"
|
|
12
13
|
|
|
13
|
-
def visit(
|
|
14
|
+
def visit(
|
|
15
|
+
self,
|
|
16
|
+
one_0: typing.Callable[[], T_Result],
|
|
17
|
+
two_0: typing.Callable[[], T_Result],
|
|
18
|
+
two_1: typing.Callable[[], T_Result],
|
|
19
|
+
) -> T_Result:
|
|
14
20
|
if self is DataSourceReaderVersionMetadataReaderVersion.ONE_0:
|
|
15
21
|
return one_0()
|
|
16
22
|
if self is DataSourceReaderVersionMetadataReaderVersion.TWO_0:
|
|
17
23
|
return two_0()
|
|
24
|
+
if self is DataSourceReaderVersionMetadataReaderVersion.TWO_1:
|
|
25
|
+
return two_1()
|
llama_cloud/types/file.py
CHANGED
|
@@ -25,7 +25,7 @@ class File(pydantic.BaseModel):
|
|
|
25
25
|
created_at: typing.Optional[dt.datetime]
|
|
26
26
|
updated_at: typing.Optional[dt.datetime]
|
|
27
27
|
name: str
|
|
28
|
-
external_file_id: str
|
|
28
|
+
external_file_id: typing.Optional[str]
|
|
29
29
|
file_size: typing.Optional[int]
|
|
30
30
|
file_type: typing.Optional[str]
|
|
31
31
|
project_id: str = pydantic.Field(description="The ID of the project that the file belongs to")
|
|
@@ -4,9 +4,7 @@ import datetime as dt
|
|
|
4
4
|
import typing
|
|
5
5
|
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
|
7
|
-
from .
|
|
8
|
-
from .classify_parsing_configuration import ClassifyParsingConfiguration
|
|
9
|
-
from .status_enum import StatusEnum
|
|
7
|
+
from .classify_job import ClassifyJob
|
|
10
8
|
|
|
11
9
|
try:
|
|
12
10
|
import pydantic
|
|
@@ -17,21 +15,10 @@ except ImportError:
|
|
|
17
15
|
import pydantic # type: ignore
|
|
18
16
|
|
|
19
17
|
|
|
20
|
-
class
|
|
21
|
-
""
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
id: str = pydantic.Field(description="Unique identifier")
|
|
26
|
-
created_at: typing.Optional[dt.datetime]
|
|
27
|
-
updated_at: typing.Optional[dt.datetime]
|
|
28
|
-
rules: typing.List[ClassifierRule] = pydantic.Field(description="The rules to classify the files")
|
|
29
|
-
user_id: str = pydantic.Field(description="The ID of the user")
|
|
30
|
-
project_id: str = pydantic.Field(description="The ID of the project")
|
|
31
|
-
parsing_configuration: typing.Optional[ClassifyParsingConfiguration] = pydantic.Field(
|
|
32
|
-
description="The configuration for the parsing job"
|
|
33
|
-
)
|
|
34
|
-
status: StatusEnum = pydantic.Field(description="The status of the classify job")
|
|
18
|
+
class PaginatedResponseClassifyJob(pydantic.BaseModel):
|
|
19
|
+
items: typing.List[ClassifyJob] = pydantic.Field(description="The list of items.")
|
|
20
|
+
next_page_token: typing.Optional[str]
|
|
21
|
+
total_size: typing.Optional[int]
|
|
35
22
|
|
|
36
23
|
def json(self, **kwargs: typing.Any) -> str:
|
|
37
24
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
@@ -6,6 +6,7 @@ from .cloud_az_storage_blob_data_source import CloudAzStorageBlobDataSource
|
|
|
6
6
|
from .cloud_box_data_source import CloudBoxDataSource
|
|
7
7
|
from .cloud_confluence_data_source import CloudConfluenceDataSource
|
|
8
8
|
from .cloud_jira_data_source import CloudJiraDataSource
|
|
9
|
+
from .cloud_jira_data_source_v_2 import CloudJiraDataSourceV2
|
|
9
10
|
from .cloud_notion_page_data_source import CloudNotionPageDataSource
|
|
10
11
|
from .cloud_one_drive_data_source import CloudOneDriveDataSource
|
|
11
12
|
from .cloud_s_3_data_source import CloudS3DataSource
|
|
@@ -22,5 +23,6 @@ PipelineDataSourceComponent = typing.Union[
|
|
|
22
23
|
CloudNotionPageDataSource,
|
|
23
24
|
CloudConfluenceDataSource,
|
|
24
25
|
CloudJiraDataSource,
|
|
26
|
+
CloudJiraDataSourceV2,
|
|
25
27
|
CloudBoxDataSource,
|
|
26
28
|
]
|
|
@@ -31,14 +31,14 @@ class PipelineFile(pydantic.BaseModel):
|
|
|
31
31
|
external_file_id: typing.Optional[str]
|
|
32
32
|
file_size: typing.Optional[int]
|
|
33
33
|
file_type: typing.Optional[str]
|
|
34
|
-
project_id: str
|
|
34
|
+
project_id: typing.Optional[str]
|
|
35
35
|
last_modified_at: typing.Optional[dt.datetime]
|
|
36
|
-
resource_info: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileResourceInfoValue]]]
|
|
37
|
-
permission_info: typing.Optional[typing.Dict[str, typing.Optional[PipelineFilePermissionInfoValue]]]
|
|
38
|
-
data_source_id: typing.Optional[str]
|
|
39
36
|
file_id: typing.Optional[str]
|
|
40
37
|
pipeline_id: str = pydantic.Field(description="The ID of the pipeline that the file is associated with")
|
|
38
|
+
resource_info: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileResourceInfoValue]]]
|
|
39
|
+
permission_info: typing.Optional[typing.Dict[str, typing.Optional[PipelineFilePermissionInfoValue]]]
|
|
41
40
|
custom_metadata: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileCustomMetadataValue]]]
|
|
41
|
+
data_source_id: typing.Optional[str]
|
|
42
42
|
config_hash: typing.Optional[typing.Dict[str, typing.Optional[PipelineFileConfigHashValue]]]
|
|
43
43
|
indexed_page_count: typing.Optional[int]
|
|
44
44
|
status: typing.Optional[PipelineFileStatus]
|
|
@@ -12,6 +12,7 @@ class UsageResponseActiveAlertsItem(str, enum.Enum):
|
|
|
12
12
|
CONFIGURED_SPEND_LIMIT_EXCEEDED = "configured_spend_limit_exceeded"
|
|
13
13
|
FREE_CREDITS_EXHAUSTED = "free_credits_exhausted"
|
|
14
14
|
INTERNAL_SPENDING_ALERT = "internal_spending_alert"
|
|
15
|
+
HAS_SPENDING_ALERT = "has_spending_alert"
|
|
15
16
|
|
|
16
17
|
def visit(
|
|
17
18
|
self,
|
|
@@ -20,6 +21,7 @@ class UsageResponseActiveAlertsItem(str, enum.Enum):
|
|
|
20
21
|
configured_spend_limit_exceeded: typing.Callable[[], T_Result],
|
|
21
22
|
free_credits_exhausted: typing.Callable[[], T_Result],
|
|
22
23
|
internal_spending_alert: typing.Callable[[], T_Result],
|
|
24
|
+
has_spending_alert: typing.Callable[[], T_Result],
|
|
23
25
|
) -> T_Result:
|
|
24
26
|
if self is UsageResponseActiveAlertsItem.PLAN_SPEND_LIMIT_EXCEEDED:
|
|
25
27
|
return plan_spend_limit_exceeded()
|
|
@@ -31,3 +33,5 @@ class UsageResponseActiveAlertsItem(str, enum.Enum):
|
|
|
31
33
|
return free_credits_exhausted()
|
|
32
34
|
if self is UsageResponseActiveAlertsItem.INTERNAL_SPENDING_ALERT:
|
|
33
35
|
return internal_spending_alert()
|
|
36
|
+
if self is UsageResponseActiveAlertsItem.HAS_SPENDING_ALERT:
|
|
37
|
+
return has_spending_alert()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
2
|
Name: llama-cloud
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.39
|
|
4
4
|
Summary:
|
|
5
5
|
License: MIT
|
|
6
6
|
Author: Logan Markewich
|
|
@@ -13,7 +13,6 @@ Classifier: Programming Language :: Python :: 3.9
|
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.10
|
|
14
14
|
Classifier: Programming Language :: Python :: 3.11
|
|
15
15
|
Classifier: Programming Language :: Python :: 3.12
|
|
16
|
-
Classifier: Programming Language :: Python :: 3.13
|
|
17
16
|
Requires-Dist: certifi (>=2024.7.4)
|
|
18
17
|
Requires-Dist: httpx (>=0.20.0)
|
|
19
18
|
Requires-Dist: pydantic (>=1.10)
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
llama_cloud/__init__.py,sha256=
|
|
2
|
-
llama_cloud/client.py,sha256=
|
|
1
|
+
llama_cloud/__init__.py,sha256=WQVQipnD9bxfJRVkSNYKy_S3Mf8Z1COj5HL__5a9Wz0,27857
|
|
2
|
+
llama_cloud/client.py,sha256=u8yj_cznQCKssfheWFugUUUtsM8oVrlWbOyQBFlq5zA,6610
|
|
3
3
|
llama_cloud/core/__init__.py,sha256=QJS3CJ2TYP2E1Tge0CS6Z7r8LTNzJHQVX1hD3558eP0,519
|
|
4
4
|
llama_cloud/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
|
|
5
5
|
llama_cloud/core/client_wrapper.py,sha256=xmj0jCdQ0ySzbSqHUWOkpRRy069y74I_HuXkWltcsVM,1507
|
|
@@ -9,17 +9,19 @@ llama_cloud/core/remove_none_from_dict.py,sha256=8m91FC3YuVem0Gm9_sXhJ2tGvP33owJ
|
|
|
9
9
|
llama_cloud/environment.py,sha256=feTjOebeFZMrBdnHat4RE5aHlpt-sJm4NhK4ntV1htI,167
|
|
10
10
|
llama_cloud/errors/__init__.py,sha256=pbbVUFtB9LCocA1RMWMMF_RKjsy5YkOKX5BAuE49w6g,170
|
|
11
11
|
llama_cloud/errors/unprocessable_entity_error.py,sha256=FvR7XPlV3Xx5nu8HNlmLhBRdk4so_gCHjYT5PyZe6sM,313
|
|
12
|
-
llama_cloud/resources/__init__.py,sha256=
|
|
12
|
+
llama_cloud/resources/__init__.py,sha256=axi8rRsGi4mjyG88TshNydTRZFYmXqWGwCIya6YIHI0,4321
|
|
13
13
|
llama_cloud/resources/admin/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
14
14
|
llama_cloud/resources/admin/client.py,sha256=YIYy9kU1_xaE0gkpmZZbCgLzZj6XSrAUplS7S2uWmwM,8536
|
|
15
15
|
llama_cloud/resources/agent_deployments/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
16
16
|
llama_cloud/resources/agent_deployments/client.py,sha256=3EOzOjmRs4KISgJ566enq3FCuN3YtskjO0OHqQGtkQ0,6122
|
|
17
|
+
llama_cloud/resources/alpha/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
18
|
+
llama_cloud/resources/alpha/client.py,sha256=d5cRIUykNpnVryuxWBPUpmo-2L1vMIDeZIF3DvTIx7E,4322
|
|
17
19
|
llama_cloud/resources/beta/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
18
20
|
llama_cloud/resources/beta/client.py,sha256=_GNkHQxyZxhZOkLIRzfCw6PexQx-E8r_7R-3Wd9Y0uE,63128
|
|
19
21
|
llama_cloud/resources/chat_apps/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
20
22
|
llama_cloud/resources/chat_apps/client.py,sha256=orSI8rpQbUwVEToolEeiEi5Qe--suXFvfu6D9JDii5I,23595
|
|
21
23
|
llama_cloud/resources/classifier/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
22
|
-
llama_cloud/resources/classifier/client.py,sha256=
|
|
24
|
+
llama_cloud/resources/classifier/client.py,sha256=Q1vdRTSe-QnZyFucTlxHIRmdCGa-PFf7XUazik7s59E,17618
|
|
23
25
|
llama_cloud/resources/data_sinks/__init__.py,sha256=ZHUjn3HbKhq_7QS1q74r2m5RGKF5lxcvF2P6pGvpcis,147
|
|
24
26
|
llama_cloud/resources/data_sinks/client.py,sha256=GpD6FhbGqkg2oUToyMG6J8hPxG_iG7W5ZJRo0qg3yzk,20639
|
|
25
27
|
llama_cloud/resources/data_sinks/types/__init__.py,sha256=M1aTcufJwiEZo9B0KmYj9PfkSd6I1ooFt9tpIRGwgg8,168
|
|
@@ -27,7 +29,7 @@ llama_cloud/resources/data_sinks/types/data_sink_update_component.py,sha256=ynPd
|
|
|
27
29
|
llama_cloud/resources/data_sources/__init__.py,sha256=McURkcNBGHXH1hmRDRmZI1dRzJrekCTHZsgv03r2oZI,227
|
|
28
30
|
llama_cloud/resources/data_sources/client.py,sha256=SZFm8bW5nkaXringdSnmxHqvVjKM7cNNOtqVXjgTKhc,21855
|
|
29
31
|
llama_cloud/resources/data_sources/types/__init__.py,sha256=Cd5xEECTzXqQSfJALfJPSjudlSLeb3RENeJVi8vwPbM,303
|
|
30
|
-
llama_cloud/resources/data_sources/types/data_source_update_component.py,sha256=
|
|
32
|
+
llama_cloud/resources/data_sources/types/data_source_update_component.py,sha256=_jQY6FhcvenWdzi27SK1bSY8muXKLRkXlVrTqEWgKKc,1159
|
|
31
33
|
llama_cloud/resources/data_sources/types/data_source_update_custom_metadata_value.py,sha256=3aFC-p8MSxjhOu2nFtqk0pixj6RqNqcFnbOYngUdZUk,215
|
|
32
34
|
llama_cloud/resources/embedding_model_configs/__init__.py,sha256=cXDtKKq-gj7yjFjdQ5GrGyPs-T5tRV_0JjUMGlAbdUs,1115
|
|
33
35
|
llama_cloud/resources/embedding_model_configs/client.py,sha256=2JDvZJtSger9QJ8luPct-2zvwjaJAR8VcKsTZ1wgYTE,17769
|
|
@@ -40,7 +42,7 @@ llama_cloud/resources/files/client.py,sha256=Crd0IR0cV5fld4jUGAHE8VsIbw7vCYrOIyB
|
|
|
40
42
|
llama_cloud/resources/files/types/__init__.py,sha256=ZZuDQsYsxmQ9VwpfN7oqftzGRnFTR2EMYdCa7zARo4g,204
|
|
41
43
|
llama_cloud/resources/files/types/file_create_from_url_resource_info_value.py,sha256=Wc8wFgujOO5pZvbbh2TMMzpa37GKZd14GYNJ9bdq7BE,214
|
|
42
44
|
llama_cloud/resources/jobs/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
43
|
-
llama_cloud/resources/jobs/client.py,sha256=
|
|
45
|
+
llama_cloud/resources/jobs/client.py,sha256=b2R_Oj2OCtcv-IIJNz9aq42hDgrOk_huqTSJhTB9VaA,6202
|
|
44
46
|
llama_cloud/resources/llama_extract/__init__.py,sha256=V6VZ8hQXwAuvOOZyk43nnbINoDQqEr03AjKQPhYKluk,997
|
|
45
47
|
llama_cloud/resources/llama_extract/client.py,sha256=B_qhVsk-Qs81qrFOVgWqcvelSB3TLWFJCibnn--3BjE,83096
|
|
46
48
|
llama_cloud/resources/llama_extract/types/__init__.py,sha256=2Iu4w5LXZY2Govr1RzahIfY0b84y658SQjMDtj7rH_0,1497
|
|
@@ -75,13 +77,13 @@ llama_cloud/resources/retrievers/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-
|
|
|
75
77
|
llama_cloud/resources/retrievers/client.py,sha256=z2LhmA-cZVFzr9P6loeCZYnJbvSIk0QitFeVFp-IyZk,32126
|
|
76
78
|
llama_cloud/resources/users/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
77
79
|
llama_cloud/resources/users/client.py,sha256=A2s8e2syQHkkSwPz-Lrt_Zxp1K-8nqJqj5EafE6NWYs,5545
|
|
78
|
-
llama_cloud/types/__init__.py,sha256=
|
|
80
|
+
llama_cloud/types/__init__.py,sha256=HSHS4fNorQ2l79VYOegTeh4KG7LnIzpSvUOhTELh3pQ,33920
|
|
79
81
|
llama_cloud/types/advanced_mode_transform_config.py,sha256=4xCXye0_cPmVS1F8aNTx81sIaEPjQH9kiCCAIoqUzlI,1502
|
|
80
82
|
llama_cloud/types/advanced_mode_transform_config_chunking_config.py,sha256=wYbJnWLpeQDfhmDZz-wJfYzD1iGT5Jcxb9ga3mzUuvk,1983
|
|
81
83
|
llama_cloud/types/advanced_mode_transform_config_segmentation_config.py,sha256=anNGq0F5-IlbIW3kpC8OilzLJnUq5tdIcWHnRnmlYsg,1303
|
|
82
84
|
llama_cloud/types/agent_data.py,sha256=Onaoc1QeIn3Il-8r1vgEzqvef92gHclCO7AC4kucEMI,1220
|
|
83
85
|
llama_cloud/types/agent_deployment_list.py,sha256=7PWm2GHumo8CfqKU8fDRTJVDV4QQh8My1dhvBPO2zaA,1120
|
|
84
|
-
llama_cloud/types/agent_deployment_summary.py,sha256=
|
|
86
|
+
llama_cloud/types/agent_deployment_summary.py,sha256=9IKjSwu5uNpGfVyilr2W5d0aMApOdWNbtTk5w8GegEY,1642
|
|
85
87
|
llama_cloud/types/aggregate_group.py,sha256=LybxFl_1snA9VgG6f7sogwO7kYAwH_I88pkYc0oMOH0,1164
|
|
86
88
|
llama_cloud/types/audio_block.py,sha256=9JIGjZ8GU3C7ICv6XdNVN6_gWXyF18TJPaDuM9OUoMU,1071
|
|
87
89
|
llama_cloud/types/auto_transform_config.py,sha256=HVeHZM75DMRznScqLTfrMwcZwIdyWPuaEYbPewnHqwc,1168
|
|
@@ -106,9 +108,8 @@ llama_cloud/types/chat_data.py,sha256=ZYqVtjXF6qPGajU4IWZu3InpU54TXJwBFiqxBepylP
|
|
|
106
108
|
llama_cloud/types/chunk_mode.py,sha256=J4vqAQfQG6PWsIv1Fe_99nVsAfDbv_P81_KVsJ9AkU4,790
|
|
107
109
|
llama_cloud/types/classification_result.py,sha256=1faExxbtJLoYjy0h0Gl38Shk2idySEOenJBjQlcRpXs,1309
|
|
108
110
|
llama_cloud/types/classifier_rule.py,sha256=-64iBABkQ_IXN8rA77xA6L4xSsj8epTVT9Z1C7ypGx0,1533
|
|
109
|
-
llama_cloud/types/classify_job.py,sha256=
|
|
111
|
+
llama_cloud/types/classify_job.py,sha256=adYCoiUbHNUFfr_FalvhULikeGmWp4I0P1vziVYrycM,1776
|
|
110
112
|
llama_cloud/types/classify_job_results.py,sha256=gasxmGX4D1cJuAY0z8Sm7PuZ1TIeEPYzp6VggWeelko,1279
|
|
111
|
-
llama_cloud/types/classify_job_with_status.py,sha256=DwJIfzuaUoLZ0KAA2dGj6DhZ9FtrUFtZRRrkH0BmekM,1798
|
|
112
113
|
llama_cloud/types/classify_parsing_configuration.py,sha256=FMbDCtz9gGu557WRtiKkMsKfzPWuDF-XKW8Z2DeOL9g,1270
|
|
113
114
|
llama_cloud/types/cloud_astra_db_vector_store.py,sha256=uvPZcMk0QISyMee0n2Z6QapCIejvibY94XWn5gmieO8,2065
|
|
114
115
|
llama_cloud/types/cloud_az_storage_blob_data_source.py,sha256=NT4cYsD1M868_bSJxKM9cvTMtjQtQxKloE4vRv8_lwg,1534
|
|
@@ -118,6 +119,8 @@ llama_cloud/types/cloud_confluence_data_source.py,sha256=q-bBwkG5L2QZqZdPDlvgzdz
|
|
|
118
119
|
llama_cloud/types/cloud_document.py,sha256=Rg_H8lcz2TzxEAIdU-m5mGpkM7s0j1Cn4JHkXYddmGs,1255
|
|
119
120
|
llama_cloud/types/cloud_document_create.py,sha256=fQ1gZAtLCpr-a-sPbMez_5fK9JMU3uyp2tNvIzWNG3U,1278
|
|
120
121
|
llama_cloud/types/cloud_jira_data_source.py,sha256=9R20k8Ne0Bl9X5dgSxpM_IGOFmC70Llz0pJ93rAKRvw,1458
|
|
122
|
+
llama_cloud/types/cloud_jira_data_source_v_2.py,sha256=t9P9ljuYRA-KY1umvjnbYdLQ0POV2E9lghTgYNCUQHI,2068
|
|
123
|
+
llama_cloud/types/cloud_jira_data_source_v_2_api_version.py,sha256=Ri2fRbVdTZaZK6bmROw5Hgl4q4FPqq_0IHjfHd263aA,595
|
|
121
124
|
llama_cloud/types/cloud_milvus_vector_store.py,sha256=CHFTJSYPZKYPUU-jpB1MG8OwRvnPiT07o7cYCvQMZLA,1235
|
|
122
125
|
llama_cloud/types/cloud_mongo_db_atlas_vector_search.py,sha256=CQ9euGBd3a72dvpTapRBhakme-fQbY2OaSoe0GDSHDo,1771
|
|
123
126
|
llama_cloud/types/cloud_notion_page_data_source.py,sha256=DxYullFctkpd0A75lfTmPzf-9EjBlusMTtNs3RbmIag,1230
|
|
@@ -135,20 +138,20 @@ llama_cloud/types/composite_retrieval_result.py,sha256=EulVseVvpK50kto4wQweLO7jJ
|
|
|
135
138
|
llama_cloud/types/composite_retrieved_text_node.py,sha256=eTQ99cdZ2PASff5n4oVV1oaNiS9Ie3AtY_E55kBYpBs,1702
|
|
136
139
|
llama_cloud/types/composite_retrieved_text_node_with_score.py,sha256=o-HvmyjqODc68zYuobtj10_62FMBAKRLfRoTHGDdmxw,1148
|
|
137
140
|
llama_cloud/types/configurable_data_sink_names.py,sha256=eGSnwk5yWffBBc0C3Iuh8RlynGTmRC1hqVv0JlUfbNE,1385
|
|
138
|
-
llama_cloud/types/configurable_data_source_names.py,sha256=
|
|
141
|
+
llama_cloud/types/configurable_data_source_names.py,sha256=43Dq5fzqrnJekj3PuxTj7Kb0cXM9TyzHmM7PjXS15Js,2132
|
|
139
142
|
llama_cloud/types/credit_type.py,sha256=nwSRKDWgHk_msdWitctqtyeZwj5EFd6VLto6NF2yCd4,971
|
|
140
143
|
llama_cloud/types/data_sink.py,sha256=PeexYHHoD8WkVp9WsFtfC-AIWszcgeJUprG1bwC8WsQ,1498
|
|
141
144
|
llama_cloud/types/data_sink_component.py,sha256=yNX2YbevUd6RIbaAvkB40ttU0VSx2JBF-eCuLB_Au9Y,843
|
|
142
145
|
llama_cloud/types/data_sink_create.py,sha256=dAaFPCwZ5oX0Fbf7ij62dzSaYnrhj3EHmnLnYnw2KgI,1360
|
|
143
146
|
llama_cloud/types/data_sink_create_component.py,sha256=0LWeqGDeQh3cZm2h5_IrSlFoU5VKmIILaOdE1VtPtfc,849
|
|
144
147
|
llama_cloud/types/data_source.py,sha256=QkJsQBlLt7cX0FxYuNF1w9yZw1BnNcGiQTTfMAuxiEM,1852
|
|
145
|
-
llama_cloud/types/data_source_component.py,sha256=
|
|
148
|
+
llama_cloud/types/data_source_component.py,sha256=5sqs683FAvS_yI7Jt9P3Mp3PPAo-moyLn3fvznUKcr8,1063
|
|
146
149
|
llama_cloud/types/data_source_create.py,sha256=s0bAX_GUwiRdrL-PXS9ROrvq3xpmqbqzdMa6thqL2P4,1581
|
|
147
|
-
llama_cloud/types/data_source_create_component.py,sha256=
|
|
150
|
+
llama_cloud/types/data_source_create_component.py,sha256=6Pvoqs1EjvH4ELO-2qiSCgt7agRhO6st954OiXnnKdI,1069
|
|
148
151
|
llama_cloud/types/data_source_create_custom_metadata_value.py,sha256=ejSsQNbszYQaUWFh9r9kQpHf88qbhuRv1SI9J_MOSC0,215
|
|
149
152
|
llama_cloud/types/data_source_custom_metadata_value.py,sha256=pTZn5yjZYmuOhsLABFJOKZblZUkRqo1CqLAuP5tKji4,209
|
|
150
153
|
llama_cloud/types/data_source_reader_version_metadata.py,sha256=hh7Hunen9GHlvtLb8CM58ZD3V3pTYKX7FgNI7sgZHjM,1157
|
|
151
|
-
llama_cloud/types/data_source_reader_version_metadata_reader_version.py,sha256=
|
|
154
|
+
llama_cloud/types/data_source_reader_version_metadata_reader_version.py,sha256=qZtQtHEnpWE48CjBPdljoYSzuk2rdrw5CCpWbLtM6Ps,735
|
|
152
155
|
llama_cloud/types/data_source_update_dispatcher_config.py,sha256=Sh6HhXfEV2Z6PYhkYQucs2MxyKVpL3UPV-I4cbf--bA,1242
|
|
153
156
|
llama_cloud/types/delete_params.py,sha256=1snPrd3WO9C1bKf0WdMslE2HQMF0yYLI3U7N53cmurM,1285
|
|
154
157
|
llama_cloud/types/document_block.py,sha256=OYKd5M3LgJ0Cz0K0YNuVRoHz9HcUdVuf2Vcqku8fck4,1116
|
|
@@ -191,7 +194,7 @@ llama_cloud/types/extract_state.py,sha256=TNeVAXXKZaiM2srlbQlzRSn4_TDpR4xyT_yQhJ
|
|
|
191
194
|
llama_cloud/types/extract_target.py,sha256=Gt-FNqblzcjdfq1hxsqEjWWu-HNLXdKy4w98nog52Ms,478
|
|
192
195
|
llama_cloud/types/fail_page_mode.py,sha256=n4fgPpiEB5siPoEg0Sux4COg7ElNybjshxDoUihZwRU,786
|
|
193
196
|
llama_cloud/types/failure_handling_config.py,sha256=EmAQW0qm7-JTSYFwhmIWxqkVNWym_AyAJIMEmeI9Cqc,1216
|
|
194
|
-
llama_cloud/types/file.py,sha256=
|
|
197
|
+
llama_cloud/types/file.py,sha256=sXdF-cdHL3k1-DPIxAjYpb-kNHzcOAV_earVoYITzUA,1765
|
|
195
198
|
llama_cloud/types/file_classification.py,sha256=jKzAc_3rg0Usyf3TNr-bI5HZn9zGIj9vYH90RKoDtiY,1418
|
|
196
199
|
llama_cloud/types/file_count_by_status_response.py,sha256=WuorbZvKjDs9Ql1hUiQu4gN5iCm8d6fr92KLyHpRvQU,1356
|
|
197
200
|
llama_cloud/types/file_create.py,sha256=eLUC50CzXOdAR_P2mBtX_R7kGteIVbP1V3LzuP1s0Xs,1629
|
|
@@ -275,6 +278,7 @@ llama_cloud/types/paginated_list_pipeline_files_response.py,sha256=2TKR2oHSQRyLM
|
|
|
275
278
|
llama_cloud/types/paginated_report_response.py,sha256=o79QhQi9r0HZZrhvRlA6WGjxtyPuxN0xONhwXSwxtcs,1104
|
|
276
279
|
llama_cloud/types/paginated_response_agent_data.py,sha256=u6Y-Cq9qjGF5tskMOQChUNqyI91Tk-uQ6vQdi69cs80,1159
|
|
277
280
|
llama_cloud/types/paginated_response_aggregate_group.py,sha256=1ajZLZJLU6-GuQ_PPsEVRFZ6bm9he807F_F_DmB2HlQ,1179
|
|
281
|
+
llama_cloud/types/paginated_response_classify_job.py,sha256=ABpHn-ryRS8erj02ncxshAFe2Enw5JvSZqqbZuy0nWA,1167
|
|
278
282
|
llama_cloud/types/paginated_response_quota_configuration.py,sha256=S-miK621O7V6hBB05xcFBKCwa-gBK17iTHh29Saebz8,1123
|
|
279
283
|
llama_cloud/types/parse_job_config.py,sha256=8Rm4jkXIRIwX_muj5YmpMNxXEM4_4mE2RKtuMlboOh8,6975
|
|
280
284
|
llama_cloud/types/parse_job_config_priority.py,sha256=__-gVv1GzktVCYZVyl6zeDt0pAZwYl-mxM0xkIHPEro,800
|
|
@@ -298,13 +302,13 @@ llama_cloud/types/pipeline_create.py,sha256=PKchM5cxkidXVFv2qON0uVh5lv8aqsy5OrZv
|
|
|
298
302
|
llama_cloud/types/pipeline_create_embedding_config.py,sha256=PQqmVBFUyZXYKKBmVQF2zPsGp1L6rje6g3RtXEcdfc8,2811
|
|
299
303
|
llama_cloud/types/pipeline_create_transform_config.py,sha256=HP6tzLsw_pomK1Ye2PYCS_XDZK_TMgg22mz17_zYKFg,303
|
|
300
304
|
llama_cloud/types/pipeline_data_source.py,sha256=iKB2NgpWQTl_rNDCvnXjNyd0gzohqwfCnupzWYT_CTE,2465
|
|
301
|
-
llama_cloud/types/pipeline_data_source_component.py,sha256=
|
|
305
|
+
llama_cloud/types/pipeline_data_source_component.py,sha256=UNKwvGq0_06wdPr2th-xa306lo5BPJzhoHEZ6RfdoPQ,1071
|
|
302
306
|
llama_cloud/types/pipeline_data_source_create.py,sha256=wMsymqB-YGyf3jdQr-N5ODVG6v0w68EMxGBNdQXeJe0,1178
|
|
303
307
|
llama_cloud/types/pipeline_data_source_custom_metadata_value.py,sha256=8n3r60sxMx4_udW0yzJZxzyWeK6L3cc2-jLGZFW4EDs,217
|
|
304
308
|
llama_cloud/types/pipeline_data_source_status.py,sha256=BD4xoftwp9lWC8EjJTnf3boIG_AyzjLPuP4qJxGhmcc,1039
|
|
305
309
|
llama_cloud/types/pipeline_deployment.py,sha256=eVBrz032aPb2cqtIIVYT5MTHQvBNm89XazoNrRWVugo,1356
|
|
306
310
|
llama_cloud/types/pipeline_embedding_config.py,sha256=7NJzlabQLFUFsvj7fye-oKLPasaXCWJBm-XuLxy-xmQ,3112
|
|
307
|
-
llama_cloud/types/pipeline_file.py,sha256=
|
|
311
|
+
llama_cloud/types/pipeline_file.py,sha256=zoE1A4pdD7S4cgDtR_aVToQ08JDt_siUJTYsdayG8s4,2510
|
|
308
312
|
llama_cloud/types/pipeline_file_config_hash_value.py,sha256=4lvLnDpzNAHdiMkGJTTNDTu3p3H7Nxw5MR1Mzte7-_M,201
|
|
309
313
|
llama_cloud/types/pipeline_file_create.py,sha256=yoMIzWED0ktKerE48kgzInBa3d0aNGO5JjTtDTDAn4A,1310
|
|
310
314
|
llama_cloud/types/pipeline_file_create_custom_metadata_value.py,sha256=olVj5yhQFx1QqWO1Wv9d6AtL-YyYO9_OYtOfcD2ZeGY,217
|
|
@@ -380,7 +384,7 @@ llama_cloud/types/update_user_response.py,sha256=NfZSKY3nYSQPrG5pBSFQKbeYXEV1FV5
|
|
|
380
384
|
llama_cloud/types/usage_and_plan.py,sha256=bclc7TE7CTBu7RLiTHG426dziyj--I8m5NVu86I2AV4,1065
|
|
381
385
|
llama_cloud/types/usage_metric_response.py,sha256=ukvtNZLeLacv-5F0-GQ5wTBZOPUPEjAeurgYPc4s7nA,1047
|
|
382
386
|
llama_cloud/types/usage_response.py,sha256=o0u15PGNQmOOie4kJFfc4Rw0jKGLckBJdH0NCAfT8_k,1499
|
|
383
|
-
llama_cloud/types/usage_response_active_alerts_item.py,sha256=
|
|
387
|
+
llama_cloud/types/usage_response_active_alerts_item.py,sha256=ti8H5yZ8X3yAmTZwM-dIWNnraTaUN3aBuhdGNM-WYcc,1708
|
|
384
388
|
llama_cloud/types/user_job_record.py,sha256=mJHdokJsemXJOwM2l7fsW3X0SlwSNcy7yHbcXZHh3I4,1098
|
|
385
389
|
llama_cloud/types/user_organization.py,sha256=yKewpOrMcB-CbujGNTjkX6QiWYr5HVsRIFQ-WX8kp2I,1729
|
|
386
390
|
llama_cloud/types/user_organization_create.py,sha256=Zj57s9xuYVnLW2p8i4j2QORL-G1y7Ab3avXE1baERQY,1189
|
|
@@ -394,7 +398,7 @@ llama_cloud/types/vertex_embedding_mode.py,sha256=yY23FjuWU_DkXjBb3JoKV4SCMqel2B
|
|
|
394
398
|
llama_cloud/types/vertex_text_embedding.py,sha256=-C4fNCYfFl36ATdBMGFVPpiHIKxjk0KB1ERA2Ec20aU,1932
|
|
395
399
|
llama_cloud/types/webhook_configuration.py,sha256=_Xm15whrWoKNBuCoO5y_NunA-ByhCAYK87LnC4W-Pzg,1350
|
|
396
400
|
llama_cloud/types/webhook_configuration_webhook_events_item.py,sha256=OL3moFO_6hsKZYSBQBsSHmWA0NgLcLJgBPZfABwT60c,2544
|
|
397
|
-
llama_cloud-0.1.
|
|
398
|
-
llama_cloud-0.1.
|
|
399
|
-
llama_cloud-0.1.
|
|
400
|
-
llama_cloud-0.1.
|
|
401
|
+
llama_cloud-0.1.39.dist-info/LICENSE,sha256=_iNqtPcw1Ue7dZKwOwgPtbegMUkWVy15hC7bffAdNmY,1067
|
|
402
|
+
llama_cloud-0.1.39.dist-info/METADATA,sha256=WNoZded1_WK8ATxqn2o6TaLGwsn2uijfjy07RnZd8bI,1143
|
|
403
|
+
llama_cloud-0.1.39.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
404
|
+
llama_cloud-0.1.39.dist-info/RECORD,,
|
|
File without changes
|