prefect-client 2.14.11__py3-none-any.whl → 2.14.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/client/orchestration.py +27 -0
- prefect/client/schemas/objects.py +12 -1
- prefect/engine.py +79 -13
- prefect/events/actions.py +13 -2
- prefect/events/schemas.py +1 -1
- prefect/flows.py +11 -4
- prefect/infrastructure/provisioners/container_instance.py +7 -14
- prefect/input/run_input.py +0 -3
- prefect/runner/runner.py +2 -2
- prefect/runner/storage.py +39 -24
- prefect/server/api/collections_data/views/aggregate-worker-metadata.json +1535 -0
- prefect/tasks.py +28 -0
- prefect/utilities/processutils.py +7 -1
- prefect/workers/process.py +0 -4
- {prefect_client-2.14.11.dist-info → prefect_client-2.14.13.dist-info}/METADATA +38 -16
- {prefect_client-2.14.11.dist-info → prefect_client-2.14.13.dist-info}/RECORD +19 -18
- {prefect_client-2.14.11.dist-info → prefect_client-2.14.13.dist-info}/LICENSE +0 -0
- {prefect_client-2.14.11.dist-info → prefect_client-2.14.13.dist-info}/WHEEL +0 -0
- {prefect_client-2.14.11.dist-info → prefect_client-2.14.13.dist-info}/top_level.txt +0 -0
prefect/client/orchestration.py
CHANGED
@@ -92,6 +92,7 @@ from prefect.client.schemas.objects import (
|
|
92
92
|
Worker,
|
93
93
|
WorkPool,
|
94
94
|
WorkQueue,
|
95
|
+
WorkQueueStatusDetail,
|
95
96
|
)
|
96
97
|
from prefect.client.schemas.responses import (
|
97
98
|
DeploymentResponse,
|
@@ -1036,6 +1037,32 @@ class PrefectClient:
|
|
1036
1037
|
raise
|
1037
1038
|
return WorkQueue.parse_obj(response.json())
|
1038
1039
|
|
1040
|
+
async def read_work_queue_status(
|
1041
|
+
self,
|
1042
|
+
id: UUID,
|
1043
|
+
) -> WorkQueueStatusDetail:
|
1044
|
+
"""
|
1045
|
+
Read a work queue status.
|
1046
|
+
|
1047
|
+
Args:
|
1048
|
+
id: the id of the work queue to load
|
1049
|
+
|
1050
|
+
Raises:
|
1051
|
+
prefect.exceptions.ObjectNotFound: If request returns 404
|
1052
|
+
httpx.RequestError: If request fails
|
1053
|
+
|
1054
|
+
Returns:
|
1055
|
+
WorkQueueStatus: an instantiated WorkQueueStatus object
|
1056
|
+
"""
|
1057
|
+
try:
|
1058
|
+
response = await self._client.get(f"/work_queues/{id}/status")
|
1059
|
+
except httpx.HTTPStatusError as e:
|
1060
|
+
if e.response.status_code == status.HTTP_404_NOT_FOUND:
|
1061
|
+
raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
|
1062
|
+
else:
|
1063
|
+
raise
|
1064
|
+
return WorkQueueStatusDetail.parse_obj(response.json())
|
1065
|
+
|
1039
1066
|
async def match_work_queues(
|
1040
1067
|
self,
|
1041
1068
|
prefixes: List[str],
|
@@ -30,7 +30,7 @@ from prefect._internal.schemas.validators import (
|
|
30
30
|
raise_on_name_with_banned_characters,
|
31
31
|
)
|
32
32
|
from prefect.client.schemas.schedules import SCHEDULE_TYPES
|
33
|
-
from prefect.settings import PREFECT_CLOUD_API_URL
|
33
|
+
from prefect.settings import PREFECT_CLOUD_API_URL, PREFECT_CLOUD_UI_URL
|
34
34
|
from prefect.utilities.collections import AutoEnum, listrepr
|
35
35
|
from prefect.utilities.names import generate_slug
|
36
36
|
|
@@ -110,6 +110,7 @@ class StateDetails(PrefectBaseModel):
|
|
110
110
|
pause_key: str = None
|
111
111
|
run_input_keyset: Optional[Dict[str, str]] = None
|
112
112
|
refresh_cache: bool = None
|
113
|
+
retriable: bool = None
|
113
114
|
|
114
115
|
|
115
116
|
class State(ObjectBaseModel, Generic[R]):
|
@@ -777,6 +778,16 @@ class Workspace(PrefectBaseModel):
|
|
777
778
|
f"/workspaces/{self.workspace_id}"
|
778
779
|
)
|
779
780
|
|
781
|
+
def ui_url(self) -> str:
|
782
|
+
"""
|
783
|
+
Generate the UI URL for accessing this workspace
|
784
|
+
"""
|
785
|
+
return (
|
786
|
+
f"{PREFECT_CLOUD_UI_URL.value()}"
|
787
|
+
f"/account/{self.account_id}"
|
788
|
+
f"/workspace/{self.workspace_id}"
|
789
|
+
)
|
790
|
+
|
780
791
|
def __hash__(self):
|
781
792
|
return hash(self.handle)
|
782
793
|
|
prefect/engine.py
CHANGED
@@ -952,7 +952,7 @@ async def orchestrate_flow_run(
|
|
952
952
|
async def pause_flow_run(
|
953
953
|
wait_for_input: None = None,
|
954
954
|
flow_run_id: UUID = None,
|
955
|
-
timeout: int =
|
955
|
+
timeout: int = 3600,
|
956
956
|
poll_interval: int = 10,
|
957
957
|
reschedule: bool = False,
|
958
958
|
key: str = None,
|
@@ -964,7 +964,7 @@ async def pause_flow_run(
|
|
964
964
|
async def pause_flow_run(
|
965
965
|
wait_for_input: Type[T],
|
966
966
|
flow_run_id: UUID = None,
|
967
|
-
timeout: int =
|
967
|
+
timeout: int = 3600,
|
968
968
|
poll_interval: int = 10,
|
969
969
|
reschedule: bool = False,
|
970
970
|
key: str = None,
|
@@ -988,7 +988,7 @@ async def pause_flow_run(
|
|
988
988
|
async def pause_flow_run(
|
989
989
|
wait_for_input: Optional[Type[T]] = None,
|
990
990
|
flow_run_id: UUID = None,
|
991
|
-
timeout: int =
|
991
|
+
timeout: int = 3600,
|
992
992
|
poll_interval: int = 10,
|
993
993
|
reschedule: bool = False,
|
994
994
|
key: str = None,
|
@@ -1011,7 +1011,7 @@ async def pause_flow_run(
|
|
1011
1011
|
have an associated deployment and results need to be configured with the
|
1012
1012
|
`persist_results` option.
|
1013
1013
|
timeout: the number of seconds to wait for the flow to be resumed before
|
1014
|
-
failing. Defaults to
|
1014
|
+
failing. Defaults to 1 hour (3600 seconds). If the pause timeout exceeds
|
1015
1015
|
any configured flow-level timeout, the flow might fail even after resuming.
|
1016
1016
|
poll_interval: The number of seconds between checking whether the flow has been
|
1017
1017
|
resumed. Defaults to 10 seconds.
|
@@ -1028,6 +1028,24 @@ async def pause_flow_run(
|
|
1028
1028
|
resumed without providing the input, the flow will fail. If the flow is
|
1029
1029
|
resumed with the input, the flow will resume and the input will be loaded
|
1030
1030
|
and returned from this function.
|
1031
|
+
|
1032
|
+
Example:
|
1033
|
+
```python
|
1034
|
+
@task
|
1035
|
+
def task_one():
|
1036
|
+
for i in range(3):
|
1037
|
+
sleep(1)
|
1038
|
+
|
1039
|
+
@flow
|
1040
|
+
def my_flow():
|
1041
|
+
terminal_state = task_one.submit(return_state=True)
|
1042
|
+
if terminal_state.type == StateType.COMPLETED:
|
1043
|
+
print("Task one succeeded! Pausing flow run..")
|
1044
|
+
pause_flow_run(timeout=2)
|
1045
|
+
else:
|
1046
|
+
print("Task one failed. Skipping pause flow run..")
|
1047
|
+
```
|
1048
|
+
|
1031
1049
|
"""
|
1032
1050
|
if flow_run_id:
|
1033
1051
|
if wait_for_input is not None:
|
@@ -1050,11 +1068,8 @@ async def pause_flow_run(
|
|
1050
1068
|
|
1051
1069
|
|
1052
1070
|
@inject_client
|
1053
|
-
@experimental_parameter(
|
1054
|
-
"wait_for_input", group="flow_run_input", when=lambda y: y is not None
|
1055
|
-
)
|
1056
1071
|
async def _in_process_pause(
|
1057
|
-
timeout: int =
|
1072
|
+
timeout: int = 3600,
|
1058
1073
|
poll_interval: int = 10,
|
1059
1074
|
reschedule=False,
|
1060
1075
|
key: str = None,
|
@@ -1149,7 +1164,7 @@ async def _in_process_pause(
|
|
1149
1164
|
@inject_client
|
1150
1165
|
async def _out_of_process_pause(
|
1151
1166
|
flow_run_id: UUID,
|
1152
|
-
timeout: int =
|
1167
|
+
timeout: int = 3600,
|
1153
1168
|
reschedule: bool = True,
|
1154
1169
|
key: str = None,
|
1155
1170
|
client=None,
|
@@ -1172,7 +1187,7 @@ async def _out_of_process_pause(
|
|
1172
1187
|
async def suspend_flow_run(
|
1173
1188
|
wait_for_input: None = None,
|
1174
1189
|
flow_run_id: Optional[UUID] = None,
|
1175
|
-
timeout: Optional[int] =
|
1190
|
+
timeout: Optional[int] = 3600,
|
1176
1191
|
key: Optional[str] = None,
|
1177
1192
|
client: PrefectClient = None,
|
1178
1193
|
) -> None:
|
@@ -1183,7 +1198,7 @@ async def suspend_flow_run(
|
|
1183
1198
|
async def suspend_flow_run(
|
1184
1199
|
wait_for_input: Type[T],
|
1185
1200
|
flow_run_id: Optional[UUID] = None,
|
1186
|
-
timeout: Optional[int] =
|
1201
|
+
timeout: Optional[int] = 3600,
|
1187
1202
|
key: Optional[str] = None,
|
1188
1203
|
client: PrefectClient = None,
|
1189
1204
|
) -> T:
|
@@ -1192,10 +1207,13 @@ async def suspend_flow_run(
|
|
1192
1207
|
|
1193
1208
|
@sync_compatible
|
1194
1209
|
@inject_client
|
1210
|
+
@experimental_parameter(
|
1211
|
+
"wait_for_input", group="flow_run_input", when=lambda y: y is not None
|
1212
|
+
)
|
1195
1213
|
async def suspend_flow_run(
|
1196
1214
|
wait_for_input: Optional[Type[T]] = None,
|
1197
1215
|
flow_run_id: Optional[UUID] = None,
|
1198
|
-
timeout: Optional[int] =
|
1216
|
+
timeout: Optional[int] = 3600,
|
1199
1217
|
key: Optional[str] = None,
|
1200
1218
|
client: PrefectClient = None,
|
1201
1219
|
):
|
@@ -1214,7 +1232,7 @@ async def suspend_flow_run(
|
|
1214
1232
|
suspend the specified flow run. If not supplied will attempt to
|
1215
1233
|
suspend the current flow run.
|
1216
1234
|
timeout: the number of seconds to wait for the flow to be resumed before
|
1217
|
-
failing. Defaults to
|
1235
|
+
failing. Defaults to 1 hour (3600 seconds). If the pause timeout
|
1218
1236
|
exceeds any configured flow-level timeout, the flow might fail even
|
1219
1237
|
after resuming.
|
1220
1238
|
key: An optional key to prevent calling suspend more than once. This
|
@@ -2075,7 +2093,13 @@ async def orchestrate_task_run(
|
|
2075
2093
|
)
|
2076
2094
|
terminal_state.state_details.cache_key = cache_key
|
2077
2095
|
|
2096
|
+
if terminal_state.is_failed():
|
2097
|
+
# Defer to user to decide whether failure is retriable
|
2098
|
+
terminal_state.state_details.retriable = (
|
2099
|
+
await _check_task_failure_retriable(task, task_run, terminal_state)
|
2100
|
+
)
|
2078
2101
|
state = await propose_state(client, terminal_state, task_run_id=task_run.id)
|
2102
|
+
|
2079
2103
|
last_event = _emit_task_run_state_change_event(
|
2080
2104
|
task_run=task_run,
|
2081
2105
|
initial_state=last_state,
|
@@ -2672,6 +2696,48 @@ async def _run_task_hooks(task: Task, task_run: TaskRun, state: State) -> None:
|
|
2672
2696
|
logger.info(f"Hook {hook_name!r} finished running successfully")
|
2673
2697
|
|
2674
2698
|
|
2699
|
+
async def _check_task_failure_retriable(
|
2700
|
+
task: Task, task_run: TaskRun, state: State
|
2701
|
+
) -> bool:
|
2702
|
+
"""Run the `retry_condition_fn` callable for a task, making sure to catch and log any errors
|
2703
|
+
that occur. If None, return True. If not callable, logs an error and returns False.
|
2704
|
+
"""
|
2705
|
+
if task.retry_condition_fn is None:
|
2706
|
+
return True
|
2707
|
+
|
2708
|
+
logger = task_run_logger(task_run)
|
2709
|
+
|
2710
|
+
try:
|
2711
|
+
logger.debug(
|
2712
|
+
f"Running `retry_condition_fn` check {task.retry_condition_fn!r} for task"
|
2713
|
+
f" {task.name!r}"
|
2714
|
+
)
|
2715
|
+
if is_async_fn(task.retry_condition_fn):
|
2716
|
+
return bool(
|
2717
|
+
await task.retry_condition_fn(task=task, task_run=task_run, state=state)
|
2718
|
+
)
|
2719
|
+
else:
|
2720
|
+
return bool(
|
2721
|
+
await from_async.call_in_new_thread(
|
2722
|
+
create_call(
|
2723
|
+
task.retry_condition_fn,
|
2724
|
+
task=task,
|
2725
|
+
task_run=task_run,
|
2726
|
+
state=state,
|
2727
|
+
)
|
2728
|
+
)
|
2729
|
+
)
|
2730
|
+
except Exception:
|
2731
|
+
logger.error(
|
2732
|
+
(
|
2733
|
+
"An error was encountered while running `retry_condition_fn` check"
|
2734
|
+
f" '{task.retry_condition_fn!r}' for task {task.name!r}"
|
2735
|
+
),
|
2736
|
+
exc_info=True,
|
2737
|
+
)
|
2738
|
+
return False
|
2739
|
+
|
2740
|
+
|
2675
2741
|
async def _run_flow_hooks(flow: Flow, flow_run: FlowRun, state: State) -> None:
|
2676
2742
|
"""Run the on_failure, on_completion, on_cancellation, and on_crashed hooks for a flow, making sure to
|
2677
2743
|
catch and log any errors that occur.
|
prefect/events/actions.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
from typing import Any, Dict, Optional
|
1
|
+
from typing import Any, Dict, Optional, Union
|
2
2
|
from uuid import UUID
|
3
3
|
|
4
4
|
from prefect._internal.pydantic import HAS_PYDANTIC_V2
|
@@ -34,4 +34,15 @@ class RunDeployment(Action):
|
|
34
34
|
deployment_id: UUID = Field(..., description="The identifier of the deployment")
|
35
35
|
|
36
36
|
|
37
|
-
|
37
|
+
class SendNotification(Action):
|
38
|
+
"""Send a notification with the given parameters"""
|
39
|
+
|
40
|
+
type: Literal["send-notification"] = "send-notification"
|
41
|
+
block_document_id: UUID = Field(
|
42
|
+
..., description="The identifier of the notification block"
|
43
|
+
)
|
44
|
+
body: str = Field(..., description="Notification body")
|
45
|
+
subject: Optional[str] = Field(None, description="Notification subject")
|
46
|
+
|
47
|
+
|
48
|
+
ActionTypes = Union[RunDeployment, SendNotification]
|
prefect/events/schemas.py
CHANGED
@@ -341,7 +341,7 @@ class DeploymentTrigger(ResourceTrigger):
|
|
341
341
|
def owner_resource(self) -> Optional[str]:
|
342
342
|
return f"prefect.deployment.{self._deployment_id}"
|
343
343
|
|
344
|
-
def actions(self) -> List[
|
344
|
+
def actions(self) -> List[RunDeployment]:
|
345
345
|
assert self._deployment_id
|
346
346
|
return [
|
347
347
|
RunDeployment(
|
prefect/flows.py
CHANGED
@@ -33,6 +33,7 @@ from typing import (
|
|
33
33
|
from uuid import UUID
|
34
34
|
|
35
35
|
from prefect._vendor.fastapi.encoders import jsonable_encoder
|
36
|
+
from typing_extensions import Self
|
36
37
|
|
37
38
|
from prefect._internal.concurrency.api import create_call, from_async
|
38
39
|
from prefect._internal.pydantic import HAS_PYDANTIC_V2
|
@@ -117,6 +118,7 @@ from prefect.utilities.visualization import (
|
|
117
118
|
T = TypeVar("T") # Generic type var for capturing the inner return type of async funcs
|
118
119
|
R = TypeVar("R") # The return type of the user's function
|
119
120
|
P = ParamSpec("P") # The parameters of the flow
|
121
|
+
F = TypeVar("F", bound="Flow") # The type of the flow
|
120
122
|
|
121
123
|
logger = get_logger("flows")
|
122
124
|
|
@@ -373,7 +375,7 @@ class Flow(Generic[P, R]):
|
|
373
375
|
List[Callable[[FlowSchema, FlowRun, State], None]]
|
374
376
|
] = None,
|
375
377
|
on_crashed: Optional[List[Callable[[FlowSchema, FlowRun, State], None]]] = None,
|
376
|
-
):
|
378
|
+
) -> Self:
|
377
379
|
"""
|
378
380
|
Create a new flow from the current object, updating provided options.
|
379
381
|
|
@@ -428,7 +430,7 @@ class Flow(Generic[P, R]):
|
|
428
430
|
>>> assert state.result() == 4
|
429
431
|
|
430
432
|
"""
|
431
|
-
|
433
|
+
new_flow = Flow(
|
432
434
|
fn=self.fn,
|
433
435
|
name=name or self.name,
|
434
436
|
description=description or self.description,
|
@@ -467,6 +469,9 @@ class Flow(Generic[P, R]):
|
|
467
469
|
on_cancellation=on_cancellation or self.on_cancellation,
|
468
470
|
on_crashed=on_crashed or self.on_crashed,
|
469
471
|
)
|
472
|
+
new_flow._storage = self._storage
|
473
|
+
new_flow._entrypoint = self._entrypoint
|
474
|
+
return new_flow
|
470
475
|
|
471
476
|
def validate_parameters(self, parameters: Dict[str, Any]) -> Dict[str, Any]:
|
472
477
|
"""
|
@@ -618,6 +623,8 @@ class Flow(Generic[P, R]):
|
|
618
623
|
"""
|
619
624
|
from prefect.deployments.runner import RunnerDeployment
|
620
625
|
|
626
|
+
if not name.endswith(".py"):
|
627
|
+
raise_on_name_with_banned_characters(name)
|
621
628
|
if self._storage and self._entrypoint:
|
622
629
|
return await RunnerDeployment.from_storage(
|
623
630
|
storage=self._storage,
|
@@ -775,10 +782,10 @@ class Flow(Generic[P, R]):
|
|
775
782
|
@classmethod
|
776
783
|
@sync_compatible
|
777
784
|
async def from_source(
|
778
|
-
cls,
|
785
|
+
cls: Type[F],
|
779
786
|
source: Union[str, RunnerStorage, ReadableDeploymentStorage],
|
780
787
|
entrypoint: str,
|
781
|
-
) ->
|
788
|
+
) -> F:
|
782
789
|
"""
|
783
790
|
Loads a flow from a remote s ource.
|
784
791
|
|
@@ -669,8 +669,9 @@ class ContainerInstancePushProvisioner:
|
|
669
669
|
registry: The registry to grant access to.
|
670
670
|
"""
|
671
671
|
command = (
|
672
|
-
|
673
|
-
f" {
|
672
|
+
"az role assignment create --assignee-object-id"
|
673
|
+
f" {identity['principalId']} --assignee-principal-type ServicePrincipal"
|
674
|
+
f" --scope {registry['id']} --role AcrPull --subscription {subscription_id}"
|
674
675
|
)
|
675
676
|
await self.azure_cli.run_command(
|
676
677
|
command,
|
@@ -830,18 +831,10 @@ class ContainerInstancePushProvisioner:
|
|
830
831
|
"The prefix must be alphanumeric and between 3-50 characters.",
|
831
832
|
style="red",
|
832
833
|
)
|
833
|
-
|
834
|
-
|
835
|
-
|
836
|
-
|
837
|
-
)
|
838
|
-
if self._validate_user_input(self._identity_name):
|
839
|
-
break
|
840
|
-
else:
|
841
|
-
self._console.print(
|
842
|
-
"The identity name must be alphanumeric and at least 3 characters.",
|
843
|
-
style="red",
|
844
|
-
)
|
834
|
+
self._identity_name = prompt(
|
835
|
+
"Please enter a name for the identity (used for ACR access)",
|
836
|
+
default=self._identity_name,
|
837
|
+
)
|
845
838
|
self._credentials_block_name = prompt(
|
846
839
|
"Please enter a name for the ACI credentials block",
|
847
840
|
default=self._credentials_block_name,
|
prefect/input/run_input.py
CHANGED
@@ -56,9 +56,6 @@ class RunInput(pydantic.BaseModel):
|
|
56
56
|
class Config:
|
57
57
|
extra = "forbid"
|
58
58
|
|
59
|
-
title: str = "Run is asking for input"
|
60
|
-
description: Optional[str] = None
|
61
|
-
|
62
59
|
@classmethod
|
63
60
|
@sync_compatible
|
64
61
|
async def save(cls, keyset: Keyset, flow_run_id: Optional[UUID] = None):
|
prefect/runner/runner.py
CHANGED
@@ -86,7 +86,7 @@ from prefect.settings import (
|
|
86
86
|
)
|
87
87
|
from prefect.states import Crashed, Pending, exception_to_failed_state
|
88
88
|
from prefect.utilities.asyncutils import is_async_fn, sync_compatible
|
89
|
-
from prefect.utilities.processutils import run_process
|
89
|
+
from prefect.utilities.processutils import _register_signal, run_process
|
90
90
|
from prefect.utilities.services import critical_service_loop
|
91
91
|
|
92
92
|
__all__ = ["Runner", "serve"]
|
@@ -337,7 +337,7 @@ class Runner:
|
|
337
337
|
runner.start()
|
338
338
|
```
|
339
339
|
"""
|
340
|
-
|
340
|
+
_register_signal(signal.SIGTERM, self.handle_sigterm)
|
341
341
|
|
342
342
|
webserver = webserver if webserver is not None else self.webserver
|
343
343
|
|
prefect/runner/storage.py
CHANGED
@@ -1,3 +1,4 @@
|
|
1
|
+
import shutil
|
1
2
|
import subprocess
|
2
3
|
from copy import deepcopy
|
3
4
|
from pathlib import Path
|
@@ -218,35 +219,49 @@ class GitRepository:
|
|
218
219
|
if self._include_submodules:
|
219
220
|
cmd += ["--recurse-submodules"]
|
220
221
|
cmd += ["--depth", "1"]
|
221
|
-
|
222
|
+
try:
|
223
|
+
await run_process(cmd, cwd=self.destination)
|
224
|
+
self._logger.debug("Successfully pulled latest changes")
|
225
|
+
except subprocess.CalledProcessError as exc:
|
226
|
+
self._logger.error(
|
227
|
+
f"Failed to pull latest changes with exit code {exc}"
|
228
|
+
)
|
229
|
+
shutil.rmtree(self.destination)
|
230
|
+
await self._clone_repo()
|
231
|
+
|
222
232
|
else:
|
223
|
-
self.
|
224
|
-
# Clone the repository if it doesn't exist at the destination
|
233
|
+
await self._clone_repo()
|
225
234
|
|
226
|
-
|
235
|
+
async def _clone_repo(self):
|
236
|
+
"""
|
237
|
+
Clones the repository into the local destination.
|
238
|
+
"""
|
239
|
+
self._logger.debug("Cloning repository %s", self._url)
|
227
240
|
|
228
|
-
|
229
|
-
"git",
|
230
|
-
"clone",
|
231
|
-
repository_url,
|
232
|
-
]
|
233
|
-
if self._branch:
|
234
|
-
cmd += ["--branch", self._branch]
|
235
|
-
if self._include_submodules:
|
236
|
-
cmd += ["--recurse-submodules"]
|
241
|
+
repository_url = self._repository_url_with_credentials
|
237
242
|
|
238
|
-
|
239
|
-
|
243
|
+
cmd = [
|
244
|
+
"git",
|
245
|
+
"clone",
|
246
|
+
repository_url,
|
247
|
+
]
|
248
|
+
if self._branch:
|
249
|
+
cmd += ["--branch", self._branch]
|
250
|
+
if self._include_submodules:
|
251
|
+
cmd += ["--recurse-submodules"]
|
240
252
|
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
253
|
+
# Limit git history and set path to clone to
|
254
|
+
cmd += ["--depth", "1", str(self.destination)]
|
255
|
+
|
256
|
+
try:
|
257
|
+
await run_process(cmd)
|
258
|
+
except subprocess.CalledProcessError as exc:
|
259
|
+
# Hide the command used to avoid leaking the access token
|
260
|
+
exc_chain = None if self._credentials else exc
|
261
|
+
raise RuntimeError(
|
262
|
+
f"Failed to clone repository {self._url!r} with exit code"
|
263
|
+
f" {exc.returncode}."
|
264
|
+
) from exc_chain
|
250
265
|
|
251
266
|
def __eq__(self, __value) -> bool:
|
252
267
|
if isinstance(__value, GitRepository):
|