llama-cloud 0.0.14__py3-none-any.whl → 0.0.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of llama-cloud might be problematic. Click here for more details.
- llama_cloud/resources/pipelines/client.py +170 -0
- llama_cloud/types/pipeline_data_source.py +7 -0
- llama_cloud/types/pipeline_data_source_create.py +3 -0
- {llama_cloud-0.0.14.dist-info → llama_cloud-0.0.15.dist-info}/METADATA +1 -1
- {llama_cloud-0.0.14.dist-info → llama_cloud-0.0.15.dist-info}/RECORD +7 -7
- {llama_cloud-0.0.14.dist-info → llama_cloud-0.0.15.dist-info}/LICENSE +0 -0
- {llama_cloud-0.0.14.dist-info → llama_cloud-0.0.15.dist-info}/WHEEL +0 -0
|
@@ -1040,6 +1040,52 @@ class PipelinesClient:
|
|
|
1040
1040
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1041
1041
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1042
1042
|
|
|
1043
|
+
def update_pipeline_data_source(
|
|
1044
|
+
self, pipeline_id: str, data_source_id: str, *, sync_interval: typing.Optional[float] = OMIT
|
|
1045
|
+
) -> PipelineDataSource:
|
|
1046
|
+
"""
|
|
1047
|
+
Update the configuration of a data source in a pipeline.
|
|
1048
|
+
|
|
1049
|
+
Parameters:
|
|
1050
|
+
- pipeline_id: str.
|
|
1051
|
+
|
|
1052
|
+
- data_source_id: str.
|
|
1053
|
+
|
|
1054
|
+
- sync_interval: typing.Optional[float]. The interval at which the data source should be synced.
|
|
1055
|
+
---
|
|
1056
|
+
from llama_cloud.client import LlamaCloud
|
|
1057
|
+
|
|
1058
|
+
client = LlamaCloud(
|
|
1059
|
+
token="YOUR_TOKEN",
|
|
1060
|
+
)
|
|
1061
|
+
client.pipelines.update_pipeline_data_source(
|
|
1062
|
+
pipeline_id="string",
|
|
1063
|
+
data_source_id="string",
|
|
1064
|
+
)
|
|
1065
|
+
"""
|
|
1066
|
+
_request: typing.Dict[str, typing.Any] = {}
|
|
1067
|
+
if sync_interval is not OMIT:
|
|
1068
|
+
_request["sync_interval"] = sync_interval
|
|
1069
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1070
|
+
"PUT",
|
|
1071
|
+
urllib.parse.urljoin(
|
|
1072
|
+
f"{self._client_wrapper.get_base_url()}/",
|
|
1073
|
+
f"api/v1/pipelines/{pipeline_id}/data-sources/{data_source_id}",
|
|
1074
|
+
),
|
|
1075
|
+
json=jsonable_encoder(_request),
|
|
1076
|
+
headers=self._client_wrapper.get_headers(),
|
|
1077
|
+
timeout=60,
|
|
1078
|
+
)
|
|
1079
|
+
if 200 <= _response.status_code < 300:
|
|
1080
|
+
return pydantic.parse_obj_as(PipelineDataSource, _response.json()) # type: ignore
|
|
1081
|
+
if _response.status_code == 422:
|
|
1082
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1083
|
+
try:
|
|
1084
|
+
_response_json = _response.json()
|
|
1085
|
+
except JSONDecodeError:
|
|
1086
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1087
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1088
|
+
|
|
1043
1089
|
def delete_pipeline_data_source(self, pipeline_id: str, data_source_id: str) -> None:
|
|
1044
1090
|
"""
|
|
1045
1091
|
Delete a data source from a pipeline.
|
|
@@ -1116,6 +1162,44 @@ class PipelinesClient:
|
|
|
1116
1162
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1117
1163
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1118
1164
|
|
|
1165
|
+
def get_pipeline_data_source_status(self, pipeline_id: str, data_source_id: str) -> ManagedIngestionStatusResponse:
|
|
1166
|
+
"""
|
|
1167
|
+
Get the status of a data source for a pipeline.
|
|
1168
|
+
|
|
1169
|
+
Parameters:
|
|
1170
|
+
- pipeline_id: str.
|
|
1171
|
+
|
|
1172
|
+
- data_source_id: str.
|
|
1173
|
+
---
|
|
1174
|
+
from llama_cloud.client import LlamaCloud
|
|
1175
|
+
|
|
1176
|
+
client = LlamaCloud(
|
|
1177
|
+
token="YOUR_TOKEN",
|
|
1178
|
+
)
|
|
1179
|
+
client.pipelines.get_pipeline_data_source_status(
|
|
1180
|
+
pipeline_id="string",
|
|
1181
|
+
data_source_id="string",
|
|
1182
|
+
)
|
|
1183
|
+
"""
|
|
1184
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1185
|
+
"GET",
|
|
1186
|
+
urllib.parse.urljoin(
|
|
1187
|
+
f"{self._client_wrapper.get_base_url()}/",
|
|
1188
|
+
f"api/v1/pipelines/{pipeline_id}/data-sources/{data_source_id}/status",
|
|
1189
|
+
),
|
|
1190
|
+
headers=self._client_wrapper.get_headers(),
|
|
1191
|
+
timeout=60,
|
|
1192
|
+
)
|
|
1193
|
+
if 200 <= _response.status_code < 300:
|
|
1194
|
+
return pydantic.parse_obj_as(ManagedIngestionStatusResponse, _response.json()) # type: ignore
|
|
1195
|
+
if _response.status_code == 422:
|
|
1196
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
1197
|
+
try:
|
|
1198
|
+
_response_json = _response.json()
|
|
1199
|
+
except JSONDecodeError:
|
|
1200
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1201
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1202
|
+
|
|
1119
1203
|
def run_search(
|
|
1120
1204
|
self,
|
|
1121
1205
|
pipeline_id: str,
|
|
@@ -2652,6 +2736,52 @@ class AsyncPipelinesClient:
|
|
|
2652
2736
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2653
2737
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2654
2738
|
|
|
2739
|
+
async def update_pipeline_data_source(
|
|
2740
|
+
self, pipeline_id: str, data_source_id: str, *, sync_interval: typing.Optional[float] = OMIT
|
|
2741
|
+
) -> PipelineDataSource:
|
|
2742
|
+
"""
|
|
2743
|
+
Update the configuration of a data source in a pipeline.
|
|
2744
|
+
|
|
2745
|
+
Parameters:
|
|
2746
|
+
- pipeline_id: str.
|
|
2747
|
+
|
|
2748
|
+
- data_source_id: str.
|
|
2749
|
+
|
|
2750
|
+
- sync_interval: typing.Optional[float]. The interval at which the data source should be synced.
|
|
2751
|
+
---
|
|
2752
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
2753
|
+
|
|
2754
|
+
client = AsyncLlamaCloud(
|
|
2755
|
+
token="YOUR_TOKEN",
|
|
2756
|
+
)
|
|
2757
|
+
await client.pipelines.update_pipeline_data_source(
|
|
2758
|
+
pipeline_id="string",
|
|
2759
|
+
data_source_id="string",
|
|
2760
|
+
)
|
|
2761
|
+
"""
|
|
2762
|
+
_request: typing.Dict[str, typing.Any] = {}
|
|
2763
|
+
if sync_interval is not OMIT:
|
|
2764
|
+
_request["sync_interval"] = sync_interval
|
|
2765
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2766
|
+
"PUT",
|
|
2767
|
+
urllib.parse.urljoin(
|
|
2768
|
+
f"{self._client_wrapper.get_base_url()}/",
|
|
2769
|
+
f"api/v1/pipelines/{pipeline_id}/data-sources/{data_source_id}",
|
|
2770
|
+
),
|
|
2771
|
+
json=jsonable_encoder(_request),
|
|
2772
|
+
headers=self._client_wrapper.get_headers(),
|
|
2773
|
+
timeout=60,
|
|
2774
|
+
)
|
|
2775
|
+
if 200 <= _response.status_code < 300:
|
|
2776
|
+
return pydantic.parse_obj_as(PipelineDataSource, _response.json()) # type: ignore
|
|
2777
|
+
if _response.status_code == 422:
|
|
2778
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
2779
|
+
try:
|
|
2780
|
+
_response_json = _response.json()
|
|
2781
|
+
except JSONDecodeError:
|
|
2782
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2783
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2784
|
+
|
|
2655
2785
|
async def delete_pipeline_data_source(self, pipeline_id: str, data_source_id: str) -> None:
|
|
2656
2786
|
"""
|
|
2657
2787
|
Delete a data source from a pipeline.
|
|
@@ -2728,6 +2858,46 @@ class AsyncPipelinesClient:
|
|
|
2728
2858
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2729
2859
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2730
2860
|
|
|
2861
|
+
async def get_pipeline_data_source_status(
|
|
2862
|
+
self, pipeline_id: str, data_source_id: str
|
|
2863
|
+
) -> ManagedIngestionStatusResponse:
|
|
2864
|
+
"""
|
|
2865
|
+
Get the status of a data source for a pipeline.
|
|
2866
|
+
|
|
2867
|
+
Parameters:
|
|
2868
|
+
- pipeline_id: str.
|
|
2869
|
+
|
|
2870
|
+
- data_source_id: str.
|
|
2871
|
+
---
|
|
2872
|
+
from llama_cloud.client import AsyncLlamaCloud
|
|
2873
|
+
|
|
2874
|
+
client = AsyncLlamaCloud(
|
|
2875
|
+
token="YOUR_TOKEN",
|
|
2876
|
+
)
|
|
2877
|
+
await client.pipelines.get_pipeline_data_source_status(
|
|
2878
|
+
pipeline_id="string",
|
|
2879
|
+
data_source_id="string",
|
|
2880
|
+
)
|
|
2881
|
+
"""
|
|
2882
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2883
|
+
"GET",
|
|
2884
|
+
urllib.parse.urljoin(
|
|
2885
|
+
f"{self._client_wrapper.get_base_url()}/",
|
|
2886
|
+
f"api/v1/pipelines/{pipeline_id}/data-sources/{data_source_id}/status",
|
|
2887
|
+
),
|
|
2888
|
+
headers=self._client_wrapper.get_headers(),
|
|
2889
|
+
timeout=60,
|
|
2890
|
+
)
|
|
2891
|
+
if 200 <= _response.status_code < 300:
|
|
2892
|
+
return pydantic.parse_obj_as(ManagedIngestionStatusResponse, _response.json()) # type: ignore
|
|
2893
|
+
if _response.status_code == 422:
|
|
2894
|
+
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
|
|
2895
|
+
try:
|
|
2896
|
+
_response_json = _response.json()
|
|
2897
|
+
except JSONDecodeError:
|
|
2898
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2899
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2900
|
+
|
|
2731
2901
|
async def run_search(
|
|
2732
2902
|
self,
|
|
2733
2903
|
pipeline_id: str,
|
|
@@ -34,6 +34,13 @@ class PipelineDataSource(pydantic.BaseModel):
|
|
|
34
34
|
project_id: str
|
|
35
35
|
data_source_id: str = pydantic.Field(description="The ID of the data source.")
|
|
36
36
|
pipeline_id: str = pydantic.Field(description="The ID of the pipeline.")
|
|
37
|
+
last_synced_at: dt.datetime = pydantic.Field(description="The last time the data source was automatically synced.")
|
|
38
|
+
sync_interval: typing.Optional[float] = pydantic.Field(
|
|
39
|
+
description="The interval at which the data source should be synced."
|
|
40
|
+
)
|
|
41
|
+
sync_schedule_set_by: typing.Optional[str] = pydantic.Field(
|
|
42
|
+
description="The id of the user who set the sync schedule."
|
|
43
|
+
)
|
|
37
44
|
|
|
38
45
|
def json(self, **kwargs: typing.Any) -> str:
|
|
39
46
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
@@ -20,6 +20,9 @@ class PipelineDataSourceCreate(pydantic.BaseModel):
|
|
|
20
20
|
"""
|
|
21
21
|
|
|
22
22
|
data_source_id: str = pydantic.Field(description="The ID of the data source.")
|
|
23
|
+
sync_interval: typing.Optional[float] = pydantic.Field(
|
|
24
|
+
description="The interval at which the data source should be synced."
|
|
25
|
+
)
|
|
23
26
|
|
|
24
27
|
def json(self, **kwargs: typing.Any) -> str:
|
|
25
28
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
|
@@ -41,7 +41,7 @@ llama_cloud/resources/organizations/client.py,sha256=akn_3sytJW_VhuLVBbP0TKiKKbB
|
|
|
41
41
|
llama_cloud/resources/parsing/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
42
42
|
llama_cloud/resources/parsing/client.py,sha256=epdS59BEsxx9nQryTV_Eemd3RAhESyMN2K4uP2gSpPQ,40700
|
|
43
43
|
llama_cloud/resources/pipelines/__init__.py,sha256=Ww7n75XUkq-aqy7WuxhO9rRbBmi_VsYMpkZpDyw8oYs,1147
|
|
44
|
-
llama_cloud/resources/pipelines/client.py,sha256=
|
|
44
|
+
llama_cloud/resources/pipelines/client.py,sha256=Ek_eSin0f32Fucf7BhfAqKT8xgJWagLt_np9oYZdcqI,136672
|
|
45
45
|
llama_cloud/resources/pipelines/types/__init__.py,sha256=PFl5hsq0GHQRwZLR-L7igQ5NBO8GH_rJPOcRR_16ODk,1275
|
|
46
46
|
llama_cloud/resources/pipelines/types/pipeline_file_update_custom_metadata_value.py,sha256=trI48WLxPcAqV9207Q6-3cj1nl4EGlZpw7En56ZsPgg,217
|
|
47
47
|
llama_cloud/resources/pipelines/types/pipeline_update_embedding_config.py,sha256=0XHOqSS-yiSREOi8-kE9yjgKNzxqzAcxDhHDHfDHEkk,2494
|
|
@@ -177,10 +177,10 @@ llama_cloud/types/pipeline.py,sha256=u1EoHhP9UQab3y0rolNiLEd51JqzxfAZeMyRDPencE8
|
|
|
177
177
|
llama_cloud/types/pipeline_create.py,sha256=Dv9wh5Uu4MoITP6bunhaem1ToRcs3Fw2gr3fw7HO7Fk,3217
|
|
178
178
|
llama_cloud/types/pipeline_create_embedding_config.py,sha256=Lu03mWVS7XrqvhSsjV4H2OE69Qf32kw7k67ZoV-P5Kg,2440
|
|
179
179
|
llama_cloud/types/pipeline_create_transform_config.py,sha256=CiMil0NrwvxR34CAzrSWw9Uo0117tz409sptH1k_r48,854
|
|
180
|
-
llama_cloud/types/pipeline_data_source.py,sha256=
|
|
180
|
+
llama_cloud/types/pipeline_data_source.py,sha256=uiTu6BkXgizhkuqr6GHiS8ZBhtnLcwcitMFkwS6woaE,2465
|
|
181
181
|
llama_cloud/types/pipeline_data_source_component.py,sha256=Pk_K0Gv7xSWe5BKCdxz82EFd6AQDvZGN-6t3zg9h8NY,265
|
|
182
182
|
llama_cloud/types/pipeline_data_source_component_one.py,sha256=W9ntkcrg6bNOJgSe1GCUX8AjnY0RDwBYo9QQiFWGZio,951
|
|
183
|
-
llama_cloud/types/pipeline_data_source_create.py,sha256=
|
|
183
|
+
llama_cloud/types/pipeline_data_source_create.py,sha256=0QPQNT6dvLaO5bZGX4QJWo5-2T44dQRjs2R5HwDaFa4,1280
|
|
184
184
|
llama_cloud/types/pipeline_data_source_custom_metadata_value.py,sha256=8n3r60sxMx4_udW0yzJZxzyWeK6L3cc2-jLGZFW4EDs,217
|
|
185
185
|
llama_cloud/types/pipeline_deployment.py,sha256=3sWAIdeov3CYFZMCAWwCR46ShHA6XAzSqmc18qryHzM,1669
|
|
186
186
|
llama_cloud/types/pipeline_embedding_config.py,sha256=ucK2AZdIgOYCQaR87Wt2H3Jq4OX25iG5JFovZeo4UQo,2362
|
|
@@ -221,7 +221,7 @@ llama_cloud/types/user_organization_create.py,sha256=YESlfcI64710OFdQzgGD4a7aItg
|
|
|
221
221
|
llama_cloud/types/user_organization_delete.py,sha256=Z8RSRXc0AGAuGxv6eQPC2S1XIdRfNCXBggfEefgPseM,1209
|
|
222
222
|
llama_cloud/types/validation_error.py,sha256=yZDLtjUHDY5w82Ra6CW0H9sLAr18R0RY1UNgJKR72DQ,1084
|
|
223
223
|
llama_cloud/types/validation_error_loc_item.py,sha256=LAtjCHIllWRBFXvAZ5QZpp7CPXjdtN9EB7HrLVo6EP0,128
|
|
224
|
-
llama_cloud-0.0.
|
|
225
|
-
llama_cloud-0.0.
|
|
226
|
-
llama_cloud-0.0.
|
|
227
|
-
llama_cloud-0.0.
|
|
224
|
+
llama_cloud-0.0.15.dist-info/LICENSE,sha256=_iNqtPcw1Ue7dZKwOwgPtbegMUkWVy15hC7bffAdNmY,1067
|
|
225
|
+
llama_cloud-0.0.15.dist-info/METADATA,sha256=t8rU9koSxTTowXcLKluWOWVFVIkhKBJXvCU-9Kggfr8,751
|
|
226
|
+
llama_cloud-0.0.15.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
227
|
+
llama_cloud-0.0.15.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|