anyscale 0.26.19__py3-none-any.whl → 0.26.21__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- anyscale/_private/docgen/models.md +1 -1
- anyscale/client/README.md +6 -10
- anyscale/client/openapi_client/__init__.py +3 -3
- anyscale/client/openapi_client/api/default_api.py +238 -671
- anyscale/client/openapi_client/models/__init__.py +3 -3
- anyscale/client/openapi_client/models/decorated_production_job_state_transition.py +2 -2
- anyscale/client/openapi_client/models/{organizationpublicidentifier_response.py → job_queue_sort_directive.py} +49 -22
- anyscale/client/openapi_client/models/{organization_response.py → job_queue_sort_field.py} +20 -34
- anyscale/client/openapi_client/models/job_queues_query.py +31 -3
- anyscale/client/openapi_client/models/production_job_state_transition.py +2 -2
- anyscale/client/openapi_client/models/{organization_public_identifier.py → update_job_queue_request.py} +51 -22
- anyscale/commands/cloud_commands.py +15 -4
- anyscale/commands/command_examples.py +58 -0
- anyscale/commands/job_commands.py +2 -2
- anyscale/commands/job_queue_commands.py +172 -0
- anyscale/controllers/cloud_controller.py +358 -49
- anyscale/controllers/job_controller.py +215 -3
- anyscale/scripts.py +3 -0
- anyscale/sdk/anyscale_client/models/production_job_state_transition.py +2 -2
- anyscale/util.py +3 -1
- anyscale/utils/connect_helpers.py +34 -0
- anyscale/utils/gcp_utils.py +20 -4
- anyscale/version.py +1 -1
- anyscale/workspace/_private/workspace_sdk.py +19 -6
- {anyscale-0.26.19.dist-info → anyscale-0.26.21.dist-info}/METADATA +1 -1
- {anyscale-0.26.19.dist-info → anyscale-0.26.21.dist-info}/RECORD +31 -30
- {anyscale-0.26.19.dist-info → anyscale-0.26.21.dist-info}/LICENSE +0 -0
- {anyscale-0.26.19.dist-info → anyscale-0.26.21.dist-info}/NOTICE +0 -0
- {anyscale-0.26.19.dist-info → anyscale-0.26.21.dist-info}/WHEEL +0 -0
- {anyscale-0.26.19.dist-info → anyscale-0.26.21.dist-info}/entry_points.txt +0 -0
- {anyscale-0.26.19.dist-info → anyscale-0.26.21.dist-info}/top_level.txt +0 -0
@@ -1,9 +1,13 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
1
3
|
import asyncio
|
4
|
+
from collections import defaultdict
|
5
|
+
from enum import Enum
|
2
6
|
import os
|
3
7
|
import random
|
4
8
|
import string
|
5
9
|
import time
|
6
|
-
from typing import Any, cast, Dict, List, Optional
|
10
|
+
from typing import Any, Callable, cast, Dict, Iterable, List, Optional
|
7
11
|
|
8
12
|
import click
|
9
13
|
import tabulate
|
@@ -23,10 +27,21 @@ from anyscale.client.openapi_client.models.create_internal_production_job import
|
|
23
27
|
from anyscale.client.openapi_client.models.create_job_queue_config import (
|
24
28
|
CreateJobQueueConfig,
|
25
29
|
)
|
30
|
+
from anyscale.client.openapi_client.models.decorated_job_queue import DecoratedJobQueue
|
26
31
|
from anyscale.client.openapi_client.models.decorated_production_job import (
|
27
32
|
DecoratedProductionJob,
|
28
33
|
)
|
34
|
+
from anyscale.client.openapi_client.models.decoratedjobqueue_response import (
|
35
|
+
DecoratedjobqueueResponse,
|
36
|
+
)
|
29
37
|
from anyscale.client.openapi_client.models.ha_job_states import HaJobStates
|
38
|
+
from anyscale.client.openapi_client.models.job_queue_sort_directive import (
|
39
|
+
JobQueueSortDirective,
|
40
|
+
)
|
41
|
+
from anyscale.client.openapi_client.models.job_queues_query import JobQueuesQuery
|
42
|
+
from anyscale.client.openapi_client.models.update_job_queue_request import (
|
43
|
+
UpdateJobQueueRequest,
|
44
|
+
)
|
30
45
|
from anyscale.controllers.base_controller import BaseController
|
31
46
|
from anyscale.models.job_model import JobConfig
|
32
47
|
from anyscale.project_utils import infer_project_id
|
@@ -45,7 +60,7 @@ from anyscale.util import (
|
|
45
60
|
populate_unspecified_cluster_configs_from_current_workspace,
|
46
61
|
validate_job_config_dict,
|
47
62
|
)
|
48
|
-
from anyscale.utils.connect_helpers import search_entities
|
63
|
+
from anyscale.utils.connect_helpers import paginate, search_entities
|
49
64
|
from anyscale.utils.runtime_env import override_runtime_env_config
|
50
65
|
from anyscale.utils.workload_types import Workload
|
51
66
|
|
@@ -294,7 +309,7 @@ class JobController(BaseController):
|
|
294
309
|
project_id: Optional[str],
|
295
310
|
include_archived: bool,
|
296
311
|
max_items: int,
|
297
|
-
states: List[
|
312
|
+
states: List[HaJobStates],
|
298
313
|
) -> None:
|
299
314
|
"""
|
300
315
|
This function will list jobs.
|
@@ -338,6 +353,7 @@ class JobController(BaseController):
|
|
338
353
|
)
|
339
354
|
else:
|
340
355
|
creator_id = None
|
356
|
+
|
341
357
|
resp = self.api_client.list_decorated_jobs_api_v2_decorated_ha_jobs_get(
|
342
358
|
project_id=project_id,
|
343
359
|
name=name,
|
@@ -647,3 +663,199 @@ class JobController(BaseController):
|
|
647
663
|
),
|
648
664
|
)
|
649
665
|
return job_runs
|
666
|
+
|
667
|
+
def update_job_queue(
|
668
|
+
self,
|
669
|
+
job_queue_id: str,
|
670
|
+
job_queue_name: str,
|
671
|
+
max_concurrency: Optional[int] = None,
|
672
|
+
idle_timeout_s: Optional[int] = None,
|
673
|
+
):
|
674
|
+
job_queue: DecoratedJobQueue = _resolve_object(
|
675
|
+
fetch_by_id=cast(
|
676
|
+
Callable[[str], DecoratedjobqueueResponse],
|
677
|
+
self.api_client.get_job_queue_api_v2_job_queues_job_queue_id_get,
|
678
|
+
),
|
679
|
+
fetch_by_id_param=job_queue_id,
|
680
|
+
fetch_by_name=cast(
|
681
|
+
Callable[[str], DecoratedjobqueueResponse],
|
682
|
+
self.api_client.list_job_queues_api_v2_job_queues_post,
|
683
|
+
),
|
684
|
+
fetch_by_name_query={
|
685
|
+
"job_queues_query": {"name": {"equals": job_queue_name,}}
|
686
|
+
},
|
687
|
+
object_type_description="job queue",
|
688
|
+
)
|
689
|
+
|
690
|
+
queue: DecoratedJobQueue = self.api_client.update_job_queue_api_v2_job_queues_job_queue_id_put(
|
691
|
+
job_queue_id=job_queue.id,
|
692
|
+
update_job_queue_request=UpdateJobQueueRequest(
|
693
|
+
max_concurrency=max_concurrency, idle_timeout_sec=idle_timeout_s,
|
694
|
+
),
|
695
|
+
).result
|
696
|
+
|
697
|
+
_print_job_queue_vertical(queue, JobQueueView.ALL)
|
698
|
+
|
699
|
+
def get_job_queue(self, job_queue_id: str):
|
700
|
+
queue: DecoratedJobQueue = self.api_client.get_job_queue_api_v2_job_queues_job_queue_id_get(
|
701
|
+
job_queue_id=job_queue_id
|
702
|
+
).result
|
703
|
+
|
704
|
+
_print_job_queue_vertical(queue, JobQueueView.ALL)
|
705
|
+
|
706
|
+
def list_job_queues(
|
707
|
+
self,
|
708
|
+
include_all_users: bool,
|
709
|
+
view: JobQueueView,
|
710
|
+
page_size: int,
|
711
|
+
max_items: Optional[int],
|
712
|
+
sorting_directives: List[JobQueueSortDirective],
|
713
|
+
interactive: bool,
|
714
|
+
):
|
715
|
+
creator_id = (
|
716
|
+
None
|
717
|
+
if include_all_users
|
718
|
+
else self.api_client.get_user_info_api_v2_userinfo_get().result.id
|
719
|
+
)
|
720
|
+
|
721
|
+
def build_query(paging_token: Optional[str], count: int) -> Dict:
|
722
|
+
return {
|
723
|
+
"job_queues_query": JobQueuesQuery(
|
724
|
+
creator_id=creator_id,
|
725
|
+
paging=PageQuery(paging_token=paging_token, count=count),
|
726
|
+
sorting_directives=sorting_directives,
|
727
|
+
)
|
728
|
+
}
|
729
|
+
|
730
|
+
for batch in paginate(
|
731
|
+
search_function=self.api_client.list_job_queues_api_v2_job_queues_post,
|
732
|
+
query_builder=build_query,
|
733
|
+
interactive=interactive,
|
734
|
+
page_size=page_size,
|
735
|
+
max_items=max_items,
|
736
|
+
):
|
737
|
+
_render_job_queues(batch, view)
|
738
|
+
|
739
|
+
|
740
|
+
def _render_jobs(jobs):
|
741
|
+
jobs_table = [
|
742
|
+
[
|
743
|
+
job.name,
|
744
|
+
job.id,
|
745
|
+
job.project.name,
|
746
|
+
job.last_job_run.cluster.name
|
747
|
+
if job.last_job_run and job.last_job_run.cluster
|
748
|
+
else None,
|
749
|
+
job.state.current_state,
|
750
|
+
job.creator.email,
|
751
|
+
job.config.entrypoint
|
752
|
+
if len(job.config.entrypoint) < 50
|
753
|
+
else job.config.entrypoint[:50] + " ...",
|
754
|
+
]
|
755
|
+
for job in jobs
|
756
|
+
]
|
757
|
+
|
758
|
+
table = tabulate.tabulate(
|
759
|
+
jobs_table,
|
760
|
+
headers=[
|
761
|
+
"NAME",
|
762
|
+
"ID",
|
763
|
+
"PROJECT NAME",
|
764
|
+
"CLUSTER NAME",
|
765
|
+
"CURRENT STATE",
|
766
|
+
"CREATOR",
|
767
|
+
"ENTRYPOINT",
|
768
|
+
],
|
769
|
+
tablefmt="plain",
|
770
|
+
)
|
771
|
+
click.echo(f"{table}")
|
772
|
+
|
773
|
+
|
774
|
+
class JobQueueView(Enum):
|
775
|
+
ALL = DecoratedJobQueue.attribute_map.keys()
|
776
|
+
STATS = [
|
777
|
+
"id",
|
778
|
+
"name",
|
779
|
+
"total_jobs",
|
780
|
+
"active_jobs",
|
781
|
+
"successful_jobs",
|
782
|
+
"failed_jobs",
|
783
|
+
]
|
784
|
+
DEFAULT = [
|
785
|
+
"id",
|
786
|
+
"name",
|
787
|
+
"cluster_id",
|
788
|
+
"creator_id",
|
789
|
+
"max_concurrency",
|
790
|
+
"idle_timeout_sec",
|
791
|
+
"current_cluster_state",
|
792
|
+
"created_at",
|
793
|
+
]
|
794
|
+
|
795
|
+
|
796
|
+
def _format_job_queue(queue: DecoratedJobQueue, view: JobQueueView) -> List[str]:
|
797
|
+
formatters: Dict[str, Callable[[Any], Any]] = defaultdict(lambda: (lambda v: v))
|
798
|
+
formatters["created_at"] = lambda k: k.strftime("%Y-%m-%d %H:%M:%S")
|
799
|
+
|
800
|
+
return [formatters[field](getattr(queue, field, "")) or "" for field in view.value]
|
801
|
+
|
802
|
+
|
803
|
+
def _render_job_queues(queues: Iterable[DecoratedJobQueue], view: JobQueueView):
|
804
|
+
if not queues:
|
805
|
+
click.echo("No job queues found!")
|
806
|
+
return
|
807
|
+
table = tabulate.tabulate(
|
808
|
+
[
|
809
|
+
_format_job_queue(queue, view)
|
810
|
+
for queue in cast(Iterable[DecoratedJobQueue], queues)
|
811
|
+
],
|
812
|
+
headers=[field.replace("_", " ").upper() for field in view.value],
|
813
|
+
tablefmt="plain",
|
814
|
+
maxcolwidths=30, # type: ignore
|
815
|
+
numalign="center",
|
816
|
+
stralign="center",
|
817
|
+
)
|
818
|
+
click.echo(table)
|
819
|
+
|
820
|
+
|
821
|
+
def _print_job_queue_vertical(queue: DecoratedJobQueue, job_queue_view: JobQueueView):
|
822
|
+
"""
|
823
|
+
Print single job queue with headers as a vertical table
|
824
|
+
"""
|
825
|
+
for header, value in zip(
|
826
|
+
[field.replace("_", " ").upper() for field in job_queue_view.value],
|
827
|
+
_format_job_queue(queue, job_queue_view),
|
828
|
+
):
|
829
|
+
print(f"{header:<{30}}: {value}")
|
830
|
+
|
831
|
+
|
832
|
+
def _resolve_object(
|
833
|
+
fetch_by_id: Optional[Callable[[str], object]],
|
834
|
+
fetch_by_id_param: Optional[str],
|
835
|
+
fetch_by_name,
|
836
|
+
fetch_by_name_query,
|
837
|
+
object_type_description: str,
|
838
|
+
) -> Any:
|
839
|
+
"""Given job_id or job_name, retrieve decorated ha job spec"""
|
840
|
+
if fetch_by_id_param is None and fetch_by_name_query is None:
|
841
|
+
raise click.ClickException(
|
842
|
+
"Either `--id` or `--name` must be passed in for object."
|
843
|
+
)
|
844
|
+
if fetch_by_id_param:
|
845
|
+
try:
|
846
|
+
return fetch_by_id(fetch_by_id_param).result # type: ignore
|
847
|
+
except Exception as e: # noqa: BLE001
|
848
|
+
raise click.ClickException(
|
849
|
+
f"Could not fetch {object_type_description} by id: {e}"
|
850
|
+
)
|
851
|
+
|
852
|
+
object_list_resp: List[Any] = fetch_by_name(**fetch_by_name_query).results
|
853
|
+
if len(object_list_resp) == 0:
|
854
|
+
raise click.ClickException(
|
855
|
+
f"No {object_type_description} found with the provided name"
|
856
|
+
)
|
857
|
+
if len(object_list_resp) > 1:
|
858
|
+
raise click.ClickException(
|
859
|
+
f"Multiple {object_type_description}s found with the provided name"
|
860
|
+
)
|
861
|
+
return object_list_resp[0]
|
anyscale/scripts.py
CHANGED
@@ -22,6 +22,7 @@ from anyscale.commands.experimental_integrations_commands import (
|
|
22
22
|
)
|
23
23
|
from anyscale.commands.image_commands import image_cli
|
24
24
|
from anyscale.commands.job_commands import job_cli
|
25
|
+
from anyscale.commands.job_queue_commands import job_queue_cli
|
25
26
|
from anyscale.commands.list_commands import list_cli
|
26
27
|
from anyscale.commands.llm.group import llm_cli
|
27
28
|
from anyscale.commands.login_commands import anyscale_login, anyscale_logout
|
@@ -120,6 +121,7 @@ cli.add_command(version_cli)
|
|
120
121
|
cli.add_command(list_cli)
|
121
122
|
cli.add_command(cluster_env_cli)
|
122
123
|
cli.add_command(job_cli)
|
124
|
+
# cli.add_command(job_queue_cli) # TODO will be enabled later
|
123
125
|
cli.add_command(schedule_cli)
|
124
126
|
cli.add_command(service_cli)
|
125
127
|
cli.add_command(cluster_cli)
|
@@ -153,6 +155,7 @@ ALIASES = {
|
|
153
155
|
"h": anyscale_help,
|
154
156
|
"schedules": schedule_cli,
|
155
157
|
"jobs": job_cli,
|
158
|
+
"jq": job_queue_cli,
|
156
159
|
"services": service_cli,
|
157
160
|
"cluster-compute": compute_config_cli,
|
158
161
|
"images": image_cli,
|
@@ -108,7 +108,7 @@ class ProductionJobStateTransition(object):
|
|
108
108
|
def state_transitioned_at(self):
|
109
109
|
"""Gets the state_transitioned_at of this ProductionJobStateTransition. # noqa: E501
|
110
110
|
|
111
|
-
The last time the state of this job was updated
|
111
|
+
The last time the state of this job was updated # noqa: E501
|
112
112
|
|
113
113
|
:return: The state_transitioned_at of this ProductionJobStateTransition. # noqa: E501
|
114
114
|
:rtype: datetime
|
@@ -119,7 +119,7 @@ class ProductionJobStateTransition(object):
|
|
119
119
|
def state_transitioned_at(self, state_transitioned_at):
|
120
120
|
"""Sets the state_transitioned_at of this ProductionJobStateTransition.
|
121
121
|
|
122
|
-
The last time the state of this job was updated
|
122
|
+
The last time the state of this job was updated # noqa: E501
|
123
123
|
|
124
124
|
:param state_transitioned_at: The state_transitioned_at of this ProductionJobStateTransition. # noqa: E501
|
125
125
|
:type: datetime
|
anyscale/util.py
CHANGED
@@ -987,7 +987,9 @@ def validate_job_config_dict(
|
|
987
987
|
)
|
988
988
|
|
989
989
|
|
990
|
-
def validate_list_jobs_state_filter(
|
990
|
+
def validate_list_jobs_state_filter(
|
991
|
+
_, param, value
|
992
|
+
) -> List[HaJobStates]: # noqa: ARG001
|
991
993
|
"""
|
992
994
|
Validate the job state filter for list jobs CLI method
|
993
995
|
"""
|
@@ -1,5 +1,6 @@
|
|
1
1
|
from dataclasses import dataclass
|
2
2
|
import inspect
|
3
|
+
import sys
|
3
4
|
from typing import (
|
4
5
|
Any,
|
5
6
|
Callable,
|
@@ -166,3 +167,36 @@ def search_entities(
|
|
166
167
|
entities = entities[:max_to_return]
|
167
168
|
|
168
169
|
return entities
|
170
|
+
|
171
|
+
|
172
|
+
def paginate(
|
173
|
+
search_function: Callable[..., ListResponse[T]],
|
174
|
+
query_builder: Callable[[Optional[str], int], Dict],
|
175
|
+
page_size: int,
|
176
|
+
max_items: Optional[int] = None,
|
177
|
+
interactive: bool = True,
|
178
|
+
):
|
179
|
+
max_items = max_items or sys.maxsize
|
180
|
+
queues = []
|
181
|
+
has_more, token = True, None
|
182
|
+
|
183
|
+
while has_more:
|
184
|
+
query_kwargs = query_builder(token, min(page_size, max_items))
|
185
|
+
resp = search_function(**query_kwargs)
|
186
|
+
token = resp.metadata.next_paging_token
|
187
|
+
batch, has_more = resp.results, token is not None
|
188
|
+
queues.extend(batch)
|
189
|
+
yield batch
|
190
|
+
|
191
|
+
if (
|
192
|
+
not has_more
|
193
|
+
or (not interactive and len(queues) >= max_items)
|
194
|
+
or (
|
195
|
+
interactive
|
196
|
+
and input("Press Enter to load more, or 'q' to quit: ").strip().lower()
|
197
|
+
== "q"
|
198
|
+
)
|
199
|
+
):
|
200
|
+
break
|
201
|
+
|
202
|
+
max_items -= page_size
|
anyscale/utils/gcp_utils.py
CHANGED
@@ -248,10 +248,26 @@ def get_gcp_filestore_config(
|
|
248
248
|
filestore_instance_id: str,
|
249
249
|
logger: CloudSetupLogger,
|
250
250
|
):
|
251
|
-
client = factory.filestore_v1.CloudFilestoreManagerClient()
|
252
251
|
instance_name = "projects/{}/locations/{}/instances/{}".format(
|
253
252
|
project_id, filestore_location, filestore_instance_id
|
254
253
|
)
|
254
|
+
return get_gcp_filestore_config_from_full_name(
|
255
|
+
factory=factory, vpc_name=vpc_name, instance_name=instance_name, logger=logger,
|
256
|
+
)
|
257
|
+
|
258
|
+
|
259
|
+
def get_gcp_filestore_config_from_full_name(
|
260
|
+
factory: GoogleCloudClientFactory,
|
261
|
+
vpc_name: str,
|
262
|
+
instance_name: str,
|
263
|
+
logger: CloudSetupLogger,
|
264
|
+
):
|
265
|
+
if not re.search("projects/.+/locations/.+/instances/.+", instance_name):
|
266
|
+
raise ValueError(
|
267
|
+
"Please provide the full filestore instance name. Example: projects/<project number>/locations/<location>/instances/<instance id>"
|
268
|
+
)
|
269
|
+
|
270
|
+
client = factory.filestore_v1.CloudFilestoreManagerClient()
|
255
271
|
try:
|
256
272
|
file_store = client.get_instance(name=instance_name)
|
257
273
|
except NotFound as e:
|
@@ -260,7 +276,7 @@ def get_gcp_filestore_config(
|
|
260
276
|
CloudSetupError.RESOURCE_NOT_FOUND,
|
261
277
|
)
|
262
278
|
raise ClickException(
|
263
|
-
f"Could not find Filestore with id {
|
279
|
+
f"Could not find Filestore with id {instance_name}. Please validate that you're using the correct GCP project and that the resource values are correct. Error details: {e}"
|
264
280
|
)
|
265
281
|
root_dir = file_store.file_shares[0].name
|
266
282
|
for v in file_store.networks:
|
@@ -271,7 +287,7 @@ def get_gcp_filestore_config(
|
|
271
287
|
break
|
272
288
|
else:
|
273
289
|
logger.error(
|
274
|
-
f"Filestore {
|
290
|
+
f"Filestore {instance_name} is not connected to {vpc_name}, but to {[v.network for v in file_store.networks]}. "
|
275
291
|
f"This cannot be edited on an existing Filestore instance. Please recreate the filestore and connect it to {vpc_name}."
|
276
292
|
)
|
277
293
|
logger.log_resource_error(
|
@@ -279,7 +295,7 @@ def get_gcp_filestore_config(
|
|
279
295
|
CloudSetupError.FILESTORE_NOT_CONNECTED_TO_VPC,
|
280
296
|
)
|
281
297
|
raise ClickException(
|
282
|
-
f"Filestore {
|
298
|
+
f"Filestore {instance_name} is not connected to {vpc_name}."
|
283
299
|
)
|
284
300
|
return GCPFileStoreConfig(
|
285
301
|
instance_name=instance_name, root_dir=root_dir, mount_target_ip=mount_target_ip,
|
anyscale/version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "0.26.
|
1
|
+
__version__ = "0.26.21"
|
@@ -422,11 +422,18 @@ class PrivateWorkspaceSDK(WorkloadSDK):
|
|
422
422
|
should_warn_delete = True
|
423
423
|
dry_run_options.append("--delete-excluded")
|
424
424
|
|
425
|
-
|
426
|
-
|
427
|
-
|
428
|
-
|
429
|
-
|
425
|
+
try:
|
426
|
+
result = subprocess.run(
|
427
|
+
rsync_command + dry_run_options,
|
428
|
+
capture_output=True,
|
429
|
+
text=True,
|
430
|
+
check=True,
|
431
|
+
)
|
432
|
+
except subprocess.CalledProcessError as e:
|
433
|
+
self._logger.error(f"Error running rsync command: {e}")
|
434
|
+
self._logger.error(f">>> stdout: {e.stdout}")
|
435
|
+
self._logger.error(f">>> stderr: {e.stderr}")
|
436
|
+
raise RuntimeError(f"Rsync failed with return code {e.returncode}")
|
430
437
|
|
431
438
|
_, deleting_files = self._parse_rsync_dry_run_output(result.stdout)
|
432
439
|
|
@@ -514,7 +521,13 @@ class PrivateWorkspaceSDK(WorkloadSDK):
|
|
514
521
|
# Add -v / --verbose to the rsync command to be explicit about what is being transferred
|
515
522
|
args += ["-v"]
|
516
523
|
|
517
|
-
|
524
|
+
try:
|
525
|
+
subprocess.run(args, check=True, capture_output=True, text=True)
|
526
|
+
except subprocess.CalledProcessError as e:
|
527
|
+
self._logger.error(f">>> Error running rsync command: {e}")
|
528
|
+
self._logger.error(f">>> stdout: {e.stdout}")
|
529
|
+
self._logger.error(f">>> stderr: {e.stderr}")
|
530
|
+
raise RuntimeError(f"Rsync failed with return code {e.returncode}")
|
518
531
|
|
519
532
|
def pull(
|
520
533
|
self,
|