prefect-client 3.0.0rc15__py3-none-any.whl → 3.0.0rc17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
prefect/flows.py CHANGED
@@ -47,16 +47,12 @@ from pydantic.v1.errors import ConfigError # TODO
47
47
  from rich.console import Console
48
48
  from typing_extensions import Literal, ParamSpec, Self
49
49
 
50
- from prefect._internal.compatibility.deprecated import (
51
- deprecated_parameter,
52
- )
53
50
  from prefect._internal.concurrency.api import create_call, from_async
54
51
  from prefect.blocks.core import Block
55
52
  from prefect.client.orchestration import get_client
56
53
  from prefect.client.schemas.actions import DeploymentScheduleCreate
57
54
  from prefect.client.schemas.objects import Flow as FlowSchema
58
55
  from prefect.client.schemas.objects import FlowRun
59
- from prefect.client.schemas.schedules import SCHEDULE_TYPES
60
56
  from prefect.client.utilities import client_injector
61
57
  from prefect.docker.docker_image import DockerImage
62
58
  from prefect.events import DeploymentTriggerTypes, TriggerTypes
@@ -615,30 +611,23 @@ class Flow(Generic[P, R]):
615
611
  # do not serialize the bound self object
616
612
  if self.ismethod and value is self.fn.__prefect_self__:
617
613
  continue
614
+ if isinstance(value, (PrefectFuture, State)):
615
+ # Don't call jsonable_encoder() on a PrefectFuture or State to
616
+ # avoid triggering a __getitem__ call
617
+ serialized_parameters[key] = f"<{type(value).__name__}>"
618
+ continue
618
619
  try:
619
620
  serialized_parameters[key] = jsonable_encoder(value)
620
621
  except (TypeError, ValueError):
621
622
  logger.debug(
622
- f"Parameter {key!r} for flow {self.name!r} is of unserializable "
623
- f"type {type(value).__name__!r} and will not be stored "
623
+ f"Parameter {key!r} for flow {self.name!r} is unserializable. "
624
+ f"Type {type(value).__name__!r} and will not be stored "
624
625
  "in the backend."
625
626
  )
626
627
  serialized_parameters[key] = f"<{type(value).__name__}>"
627
628
  return serialized_parameters
628
629
 
629
630
  @sync_compatible
630
- @deprecated_parameter(
631
- "schedule",
632
- start_date="Mar 2024",
633
- when=lambda p: p is not None,
634
- help="Use `schedules` instead.",
635
- )
636
- @deprecated_parameter(
637
- "is_schedule_active",
638
- start_date="Mar 2024",
639
- when=lambda p: p is not None,
640
- help="Use `paused` instead.",
641
- )
642
631
  async def to_deployment(
643
632
  self,
644
633
  name: str,
@@ -654,8 +643,6 @@ class Flow(Generic[P, R]):
654
643
  rrule: Optional[Union[Iterable[str], str]] = None,
655
644
  paused: Optional[bool] = None,
656
645
  schedules: Optional[List["FlexibleScheduleList"]] = None,
657
- schedule: Optional[SCHEDULE_TYPES] = None,
658
- is_schedule_active: Optional[bool] = None,
659
646
  parameters: Optional[dict] = None,
660
647
  triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
661
648
  description: Optional[str] = None,
@@ -679,10 +666,6 @@ class Flow(Generic[P, R]):
679
666
  paused: Whether or not to set this deployment as paused.
680
667
  schedules: A list of schedule objects defining when to execute runs of this deployment.
681
668
  Used to define multiple schedules or additional scheduling options such as `timezone`.
682
- schedule: A schedule object defining when to execute runs of this deployment.
683
- is_schedule_active: Whether or not to set the schedule for this deployment as active. If
684
- not provided when creating a deployment, the schedule will be set as active. If not
685
- provided when updating a deployment, the schedule's activation will not be changed.
686
669
  parameters: A dictionary of default parameter values to pass to runs of this deployment.
687
670
  triggers: A list of triggers that will kick off runs of this deployment.
688
671
  description: A description for the created deployment. Defaults to the flow's
@@ -735,8 +718,6 @@ class Flow(Generic[P, R]):
735
718
  rrule=rrule,
736
719
  paused=paused,
737
720
  schedules=schedules,
738
- schedule=schedule,
739
- is_schedule_active=is_schedule_active,
740
721
  tags=tags,
741
722
  triggers=triggers,
742
723
  parameters=parameters or {},
@@ -756,8 +737,6 @@ class Flow(Generic[P, R]):
756
737
  rrule=rrule,
757
738
  paused=paused,
758
739
  schedules=schedules,
759
- schedule=schedule,
760
- is_schedule_active=is_schedule_active,
761
740
  tags=tags,
762
741
  triggers=triggers,
763
742
  parameters=parameters or {},
@@ -815,8 +794,6 @@ class Flow(Generic[P, R]):
815
794
  rrule: Optional[Union[Iterable[str], str]] = None,
816
795
  paused: Optional[bool] = None,
817
796
  schedules: Optional["FlexibleScheduleList"] = None,
818
- schedule: Optional[SCHEDULE_TYPES] = None,
819
- is_schedule_active: Optional[bool] = None,
820
797
  triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
821
798
  parameters: Optional[dict] = None,
822
799
  description: Optional[str] = None,
@@ -846,11 +823,6 @@ class Flow(Generic[P, R]):
846
823
  paused: Whether or not to set this deployment as paused.
847
824
  schedules: A list of schedule objects defining when to execute runs of this deployment.
848
825
  Used to define multiple schedules or additional scheduling options like `timezone`.
849
- schedule: A schedule object defining when to execute runs of this deployment. Used to
850
- define additional scheduling options such as `timezone`.
851
- is_schedule_active: Whether or not to set the schedule for this deployment as active. If
852
- not provided when creating a deployment, the schedule will be set as active. If not
853
- provided when updating a deployment, the schedule's activation will not be changed.
854
826
  parameters: A dictionary of default parameter values to pass to runs of this deployment.
855
827
  description: A description for the created deployment. Defaults to the flow's
856
828
  description if not provided.
@@ -914,8 +886,6 @@ class Flow(Generic[P, R]):
914
886
  rrule=rrule,
915
887
  paused=paused,
916
888
  schedules=schedules,
917
- schedule=schedule,
918
- is_schedule_active=is_schedule_active,
919
889
  parameters=parameters,
920
890
  description=description,
921
891
  tags=tags,
@@ -1085,8 +1055,6 @@ class Flow(Generic[P, R]):
1085
1055
  rrule: Optional[str] = None,
1086
1056
  paused: Optional[bool] = None,
1087
1057
  schedules: Optional[List[DeploymentScheduleCreate]] = None,
1088
- schedule: Optional[SCHEDULE_TYPES] = None,
1089
- is_schedule_active: Optional[bool] = None,
1090
1058
  triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
1091
1059
  parameters: Optional[dict] = None,
1092
1060
  description: Optional[str] = None,
@@ -1133,11 +1101,6 @@ class Flow(Generic[P, R]):
1133
1101
  paused: Whether or not to set this deployment as paused.
1134
1102
  schedules: A list of schedule objects defining when to execute runs of this deployment.
1135
1103
  Used to define multiple schedules or additional scheduling options like `timezone`.
1136
- schedule: A schedule object defining when to execute runs of this deployment. Used to
1137
- define additional scheduling options like `timezone`.
1138
- is_schedule_active: Whether or not to set the schedule for this deployment as active. If
1139
- not provided when creating a deployment, the schedule will be set as active. If not
1140
- provided when updating a deployment, the schedule's activation will not be changed.
1141
1104
  parameters: A dictionary of default parameter values to pass to runs of this deployment.
1142
1105
  description: A description for the created deployment. Defaults to the flow's
1143
1106
  description if not provided.
@@ -1213,8 +1176,6 @@ class Flow(Generic[P, R]):
1213
1176
  rrule=rrule,
1214
1177
  schedules=schedules,
1215
1178
  paused=paused,
1216
- schedule=schedule,
1217
- is_schedule_active=is_schedule_active,
1218
1179
  triggers=triggers,
1219
1180
  parameters=parameters,
1220
1181
  description=description,
prefect/runner/runner.py CHANGED
@@ -67,7 +67,6 @@ from prefect.client.schemas.filters import (
67
67
  )
68
68
  from prefect.client.schemas.objects import Flow as APIFlow
69
69
  from prefect.client.schemas.objects import FlowRun, State, StateType
70
- from prefect.client.schemas.schedules import SCHEDULE_TYPES
71
70
  from prefect.events import DeploymentTriggerTypes, TriggerTypes
72
71
  from prefect.events.related import tags_as_related_resources
73
72
  from prefect.events.schemas.events import RelatedResource
@@ -224,8 +223,6 @@ class Runner:
224
223
  rrule: Optional[Union[Iterable[str], str]] = None,
225
224
  paused: Optional[bool] = None,
226
225
  schedules: Optional["FlexibleScheduleList"] = None,
227
- schedule: Optional[SCHEDULE_TYPES] = None,
228
- is_schedule_active: Optional[bool] = None,
229
226
  parameters: Optional[dict] = None,
230
227
  triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
231
228
  description: Optional[str] = None,
@@ -248,11 +245,6 @@ class Runner:
248
245
  or a timedelta object. If a number is given, it will be interpreted as seconds.
249
246
  cron: A cron schedule of when to execute runs of this flow.
250
247
  rrule: An rrule schedule of when to execute runs of this flow.
251
- schedule: A schedule object of when to execute runs of this flow. Used for
252
- advanced scheduling options like timezone.
253
- is_schedule_active: Whether or not to set the schedule for this deployment as active. If
254
- not provided when creating a deployment, the schedule will be set as active. If not
255
- provided when updating a deployment, the schedule's activation will not be changed.
256
248
  triggers: A list of triggers that should kick of a run of this flow.
257
249
  parameters: A dictionary of default parameter values to pass to runs of this flow.
258
250
  description: A description for the created deployment. Defaults to the flow's
@@ -277,9 +269,7 @@ class Runner:
277
269
  cron=cron,
278
270
  rrule=rrule,
279
271
  schedules=schedules,
280
- schedule=schedule,
281
272
  paused=paused,
282
- is_schedule_active=is_schedule_active,
283
273
  triggers=triggers,
284
274
  parameters=parameters,
285
275
  description=description,
@@ -12,6 +12,7 @@ Available attributes:
12
12
  - `scheduled_start_time`: the flow run's expected scheduled start time; defaults to now if not present
13
13
  - `name`: the name of the flow run
14
14
  - `flow_name`: the name of the flow
15
+ - `flow_version`: the version of the flow
15
16
  - `parameters`: the parameters that were passed to this run; note that these do not necessarily
16
17
  include default values set on the flow function, only the parameter values explicitly passed for the run
17
18
  - `parent_flow_run_id`: the ID of the flow run that triggered this run, if any
@@ -35,6 +36,7 @@ __all__ = [
35
36
  "scheduled_start_time",
36
37
  "name",
37
38
  "flow_name",
39
+ "flow_version",
38
40
  "parameters",
39
41
  "parent_flow_run_id",
40
42
  "parent_deployment_id",
@@ -119,7 +121,7 @@ async def _get_flow_from_run(flow_run_id):
119
121
  return await client.read_flow(flow_run.flow_id)
120
122
 
121
123
 
122
- def get_id() -> str:
124
+ def get_id() -> Optional[str]:
123
125
  flow_run_ctx = FlowRunContext.get()
124
126
  task_run_ctx = TaskRunContext.get()
125
127
  if flow_run_ctx is not None:
@@ -190,6 +192,21 @@ def get_flow_name() -> Optional[str]:
190
192
  return flow_run_ctx.flow.name
191
193
 
192
194
 
195
+ def get_flow_version() -> Optional[str]:
196
+ flow_run_ctx = FlowRunContext.get()
197
+ run_id = get_id()
198
+ if flow_run_ctx is None and run_id is None:
199
+ return None
200
+ elif flow_run_ctx is None:
201
+ flow = from_sync.call_soon_in_loop_thread(
202
+ create_call(_get_flow_from_run, run_id)
203
+ ).result()
204
+
205
+ return flow.version
206
+ else:
207
+ return flow_run_ctx.flow.version
208
+
209
+
193
210
  def get_scheduled_start_time() -> pendulum.DateTime:
194
211
  flow_run_ctx = FlowRunContext.get()
195
212
  run_id = get_id()
@@ -313,4 +330,5 @@ FIELDS = {
313
330
  "run_count": get_run_count,
314
331
  "api_url": get_flow_run_api_url,
315
332
  "ui_url": get_flow_run_ui_url,
333
+ "flow_version": get_flow_version,
316
334
  }
prefect/settings.py CHANGED
@@ -323,7 +323,11 @@ def template_with_settings(*upstream_settings: Setting) -> Callable[["Settings",
323
323
  setting.name: setting.value_from(settings) for setting in upstream_settings
324
324
  }
325
325
  template = string.Template(str(value))
326
- return original_type(template.substitute(template_values))
326
+ # Note the use of `safe_substitute` to avoid raising an exception if a
327
+ # template value is missing. In this case, template values will be left
328
+ # as-is in the string. Using `safe_substitute` prevents us raising when
329
+ # the DB password contains a `$` character.
330
+ return original_type(template.safe_substitute(template_values))
327
331
 
328
332
  return templater
329
333
 
prefect/task_engine.py CHANGED
@@ -5,7 +5,6 @@ import time
5
5
  from asyncio import CancelledError
6
6
  from contextlib import ExitStack, asynccontextmanager, contextmanager
7
7
  from dataclasses import dataclass, field
8
- from functools import wraps
9
8
  from textwrap import dedent
10
9
  from typing import (
11
10
  Any,
@@ -212,6 +211,9 @@ class BaseTaskRunEngine(Generic[P, R]):
212
211
  return task_run.state.is_running() or task_run.state.is_scheduled()
213
212
 
214
213
  def log_finished_message(self):
214
+ if not self.task_run:
215
+ return
216
+
215
217
  # If debugging, use the more complete `repr` than the usual `str` description
216
218
  display_state = repr(self.state) if PREFECT_DEBUG_MODE else str(self.state)
217
219
  level = logging.INFO if self.state.is_completed() else logging.ERROR
@@ -231,11 +233,11 @@ class BaseTaskRunEngine(Generic[P, R]):
231
233
 
232
234
  @task
233
235
  def say_hello(name):
234
- print f"Hello, {name}!"
236
+ print(f"Hello, {name}!")
235
237
 
236
238
  @flow
237
239
  def example_flow():
238
- future = say_hello.submit(name="Marvin)
240
+ future = say_hello.submit(name="Marvin")
239
241
  future.wait()
240
242
 
241
243
  example_flow()
@@ -364,7 +366,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
364
366
  self.task_run.run_count += 1
365
367
 
366
368
  flow_run_context = FlowRunContext.get()
367
- if flow_run_context:
369
+ if flow_run_context and flow_run_context.flow_run:
368
370
  # Carry forward any task run information from the flow run
369
371
  flow_run = flow_run_context.flow_run
370
372
  self.task_run.flow_run_run_count = flow_run.run_count
@@ -496,15 +498,8 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
496
498
  )
497
499
  transaction.stage(
498
500
  terminal_state.data,
499
- on_rollback_hooks=[self.handle_rollback]
500
- + [
501
- _with_transaction_hook_logging(hook, "rollback", self.logger)
502
- for hook in self.task.on_rollback_hooks
503
- ],
504
- on_commit_hooks=[
505
- _with_transaction_hook_logging(hook, "commit", self.logger)
506
- for hook in self.task.on_commit_hooks
507
- ],
501
+ on_rollback_hooks=[self.handle_rollback] + self.task.on_rollback_hooks,
502
+ on_commit_hooks=self.task.on_commit_hooks,
508
503
  )
509
504
  if transaction.is_committed():
510
505
  terminal_state.name = "Cached"
@@ -630,21 +625,24 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
630
625
 
631
626
  self.logger = task_run_logger(task_run=self.task_run, task=self.task) # type: ignore
632
627
 
633
- if not PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION:
634
- # update the task run name if necessary
635
- if not self._task_name_set and self.task.task_run_name:
636
- task_run_name = _resolve_custom_task_run_name(
637
- task=self.task, parameters=self.parameters
638
- )
628
+ # update the task run name if necessary
629
+ if not self._task_name_set and self.task.task_run_name:
630
+ task_run_name = _resolve_custom_task_run_name(
631
+ task=self.task, parameters=self.parameters
632
+ )
633
+
634
+ if not PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION:
635
+ # update the task run name if necessary
639
636
  self.client.set_task_run_name(
640
637
  task_run_id=self.task_run.id, name=task_run_name
641
638
  )
642
- self.logger.extra["task_run_name"] = task_run_name
643
- self.logger.debug(
644
- f"Renamed task run {self.task_run.name!r} to {task_run_name!r}"
645
- )
646
- self.task_run.name = task_run_name
647
- self._task_name_set = True
639
+
640
+ self.logger.extra["task_run_name"] = task_run_name
641
+ self.logger.debug(
642
+ f"Renamed task run {self.task_run.name!r} to {task_run_name!r}"
643
+ )
644
+ self.task_run.name = task_run_name
645
+ self._task_name_set = True
648
646
  yield
649
647
 
650
648
  @contextmanager
@@ -663,21 +661,6 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
663
661
  self._is_started = True
664
662
  try:
665
663
  if PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION:
666
- from prefect.utilities.engine import (
667
- _resolve_custom_task_run_name,
668
- )
669
-
670
- task_run_name = (
671
- _resolve_custom_task_run_name(
672
- task=self.task, parameters=self.parameters
673
- )
674
- if self.task.task_run_name
675
- else None
676
- )
677
-
678
- if self.task_run and task_run_name:
679
- self.task_run.name = task_run_name
680
-
681
664
  if not self.task_run:
682
665
  self.task_run = run_coro_as_sync(
683
666
  self.task.create_local_run(
@@ -687,7 +670,6 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
687
670
  parent_task_run_context=TaskRunContext.get(),
688
671
  wait_for=self.wait_for,
689
672
  extra_task_inputs=dependencies,
690
- task_run_name=task_run_name,
691
673
  )
692
674
  )
693
675
  # Emit an event to capture that the task run was in the `PENDING` state.
@@ -1068,15 +1050,8 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1068
1050
  )
1069
1051
  transaction.stage(
1070
1052
  terminal_state.data,
1071
- on_rollback_hooks=[self.handle_rollback]
1072
- + [
1073
- _with_transaction_hook_logging(hook, "rollback", self.logger)
1074
- for hook in self.task.on_rollback_hooks
1075
- ],
1076
- on_commit_hooks=[
1077
- _with_transaction_hook_logging(hook, "commit", self.logger)
1078
- for hook in self.task.on_commit_hooks
1079
- ],
1053
+ on_rollback_hooks=[self.handle_rollback] + self.task.on_rollback_hooks,
1054
+ on_commit_hooks=self.task.on_commit_hooks,
1080
1055
  )
1081
1056
  if transaction.is_committed():
1082
1057
  terminal_state.name = "Cached"
@@ -1200,21 +1175,21 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1200
1175
 
1201
1176
  self.logger = task_run_logger(task_run=self.task_run, task=self.task) # type: ignore
1202
1177
 
1203
- if not PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION:
1204
- # update the task run name if necessary
1205
- if not self._task_name_set and self.task.task_run_name:
1206
- task_run_name = _resolve_custom_task_run_name(
1207
- task=self.task, parameters=self.parameters
1208
- )
1178
+ if not self._task_name_set and self.task.task_run_name:
1179
+ task_run_name = _resolve_custom_task_run_name(
1180
+ task=self.task, parameters=self.parameters
1181
+ )
1182
+ if not PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION:
1183
+ # update the task run name if necessary
1209
1184
  await self.client.set_task_run_name(
1210
1185
  task_run_id=self.task_run.id, name=task_run_name
1211
1186
  )
1212
- self.logger.extra["task_run_name"] = task_run_name
1213
- self.logger.debug(
1214
- f"Renamed task run {self.task_run.name!r} to {task_run_name!r}"
1215
- )
1216
- self.task_run.name = task_run_name
1217
- self._task_name_set = True
1187
+ self.logger.extra["task_run_name"] = task_run_name
1188
+ self.logger.debug(
1189
+ f"Renamed task run {self.task_run.name!r} to {task_run_name!r}"
1190
+ )
1191
+ self.task_run.name = task_run_name
1192
+ self._task_name_set = True
1218
1193
  yield
1219
1194
 
1220
1195
  @asynccontextmanager
@@ -1233,21 +1208,6 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1233
1208
  self._is_started = True
1234
1209
  try:
1235
1210
  if PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION:
1236
- from prefect.utilities.engine import (
1237
- _resolve_custom_task_run_name,
1238
- )
1239
-
1240
- task_run_name = (
1241
- _resolve_custom_task_run_name(
1242
- task=self.task, parameters=self.parameters
1243
- )
1244
- if self.task.task_run_name
1245
- else None
1246
- )
1247
-
1248
- if self.task_run and task_run_name:
1249
- self.task_run.name = task_run_name
1250
-
1251
1211
  if not self.task_run:
1252
1212
  self.task_run = await self.task.create_local_run(
1253
1213
  id=task_run_id,
@@ -1256,7 +1216,6 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1256
1216
  parent_task_run_context=TaskRunContext.get(),
1257
1217
  wait_for=self.wait_for,
1258
1218
  extra_task_inputs=dependencies,
1259
- task_run_name=task_run_name,
1260
1219
  )
1261
1220
  # Emit an event to capture that the task run was in the `PENDING` state.
1262
1221
  self._last_event = emit_task_run_state_change_event(
@@ -1622,28 +1581,3 @@ def run_task(
1622
1581
  return run_task_async(**kwargs)
1623
1582
  else:
1624
1583
  return run_task_sync(**kwargs)
1625
-
1626
-
1627
- def _with_transaction_hook_logging(
1628
- hook: Callable[[Transaction], None],
1629
- hook_type: Literal["rollback", "commit"],
1630
- logger: logging.Logger,
1631
- ) -> Callable[[Transaction], None]:
1632
- @wraps(hook)
1633
- def _hook(txn: Transaction) -> None:
1634
- hook_name = _get_hook_name(hook)
1635
- logger.info(f"Running {hook_type} hook {hook_name!r}")
1636
-
1637
- try:
1638
- hook(txn)
1639
- except Exception as exc:
1640
- logger.error(
1641
- f"An error was encountered while running {hook_type} hook {hook_name!r}",
1642
- )
1643
- raise exc
1644
- else:
1645
- logger.info(
1646
- f"{hook_type.capitalize()} hook {hook_name!r} finished running successfully"
1647
- )
1648
-
1649
- return _hook
prefect/tasks.py CHANGED
@@ -814,7 +814,6 @@ class Task(Generic[P, R]):
814
814
  wait_for: Optional[Iterable[PrefectFuture]] = None,
815
815
  extra_task_inputs: Optional[Dict[str, Set[TaskRunInput]]] = None,
816
816
  deferred: bool = False,
817
- task_run_name: Optional[str] = None,
818
817
  ) -> TaskRun:
819
818
  if not PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION:
820
819
  raise RuntimeError(
@@ -839,12 +838,12 @@ class Task(Generic[P, R]):
839
838
  async with client:
840
839
  if not flow_run_context:
841
840
  dynamic_key = f"{self.task_key}-{str(uuid4().hex)}"
842
- task_run_name = task_run_name or self.name
841
+ task_run_name = self.name
843
842
  else:
844
843
  dynamic_key = _dynamic_key_for_task_run(
845
844
  context=flow_run_context, task=self
846
845
  )
847
- task_run_name = task_run_name or f"{self.name}-{dynamic_key}"
846
+ task_run_name = f"{self.name}-{dynamic_key}"
848
847
 
849
848
  if deferred:
850
849
  state = Scheduled()
prefect/transactions.py CHANGED
@@ -1,9 +1,11 @@
1
+ import copy
1
2
  import logging
2
3
  from contextlib import contextmanager
3
4
  from contextvars import ContextVar, Token
4
5
  from typing import (
5
6
  Any,
6
7
  Callable,
8
+ Dict,
7
9
  Generator,
8
10
  List,
9
11
  Optional,
@@ -11,7 +13,7 @@ from typing import (
11
13
  Union,
12
14
  )
13
15
 
14
- from pydantic import Field
16
+ from pydantic import Field, PrivateAttr
15
17
  from typing_extensions import Self
16
18
 
17
19
  from prefect.context import ContextModel, FlowRunContext, TaskRunContext
@@ -24,8 +26,10 @@ from prefect.results import (
24
26
  ResultFactory,
25
27
  get_default_result_storage,
26
28
  )
29
+ from prefect.utilities.annotations import NotSet
27
30
  from prefect.utilities.asyncutils import run_coro_as_sync
28
31
  from prefect.utilities.collections import AutoEnum
32
+ from prefect.utilities.engine import _get_hook_name
29
33
 
30
34
 
31
35
  class IsolationLevel(AutoEnum):
@@ -63,9 +67,20 @@ class Transaction(ContextModel):
63
67
  )
64
68
  overwrite: bool = False
65
69
  logger: Union[logging.Logger, logging.LoggerAdapter, None] = None
70
+ _stored_values: Dict[str, Any] = PrivateAttr(default_factory=dict)
66
71
  _staged_value: Any = None
67
72
  __var__: ContextVar = ContextVar("transaction")
68
73
 
74
+ def set(self, name: str, value: Any) -> None:
75
+ self._stored_values[name] = value
76
+
77
+ def get(self, name: str, default: Any = NotSet) -> Any:
78
+ if name not in self._stored_values:
79
+ if default is not NotSet:
80
+ return default
81
+ raise ValueError(f"Could not retrieve value for unknown key: {name}")
82
+ return self._stored_values.get(name)
83
+
69
84
  def is_committed(self) -> bool:
70
85
  return self.state == TransactionState.COMMITTED
71
86
 
@@ -93,6 +108,7 @@ class Transaction(ContextModel):
93
108
  # either inherit from parent or set a default of eager
94
109
  if parent:
95
110
  self.commit_mode = parent.commit_mode
111
+ self._stored_values = copy.deepcopy(parent._stored_values)
96
112
  else:
97
113
  self.commit_mode = CommitMode.LAZY
98
114
 
@@ -183,7 +199,7 @@ class Transaction(ContextModel):
183
199
  child.commit()
184
200
 
185
201
  for hook in self.on_commit_hooks:
186
- hook(self)
202
+ self.run_hook(hook, "commit")
187
203
 
188
204
  if self.store and self.key:
189
205
  self.store.write(key=self.key, value=self._staged_value)
@@ -198,6 +214,22 @@ class Transaction(ContextModel):
198
214
  self.rollback()
199
215
  return False
200
216
 
217
+ def run_hook(self, hook, hook_type: str) -> None:
218
+ hook_name = _get_hook_name(hook)
219
+ self.logger.info(f"Running {hook_type} hook {hook_name!r}")
220
+
221
+ try:
222
+ hook(self)
223
+ except Exception as exc:
224
+ self.logger.error(
225
+ f"An error was encountered while running {hook_type} hook {hook_name!r}",
226
+ )
227
+ raise exc
228
+ else:
229
+ self.logger.info(
230
+ f"{hook_type.capitalize()} hook {hook_name!r} finished running successfully"
231
+ )
232
+
201
233
  def stage(
202
234
  self,
203
235
  value: BaseResult,
@@ -222,7 +254,7 @@ class Transaction(ContextModel):
222
254
 
223
255
  try:
224
256
  for hook in reversed(self.on_rollback_hooks):
225
- hook(self)
257
+ self.run_hook(hook, "rollback")
226
258
 
227
259
  self.state = TransactionState.ROLLED_BACK
228
260
 
prefect/workers/base.py CHANGED
@@ -930,7 +930,12 @@ class BaseWorker(abc.ABC):
930
930
 
931
931
  deployment_vars = deployment.job_variables or {}
932
932
  flow_run_vars = flow_run.job_variables or {}
933
- job_variables = {**deployment_vars, **flow_run_vars}
933
+ job_variables = {**deployment_vars}
934
+
935
+ # merge environment variables carefully, otherwise full override
936
+ if isinstance(job_variables.get("env"), dict):
937
+ job_variables["env"].update(flow_run_vars.pop("env", {}))
938
+ job_variables.update(flow_run_vars)
934
939
 
935
940
  configuration = await self.job_configuration.from_template_and_values(
936
941
  base_job_template=self._work_pool.base_job_template,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: prefect-client
3
- Version: 3.0.0rc15
3
+ Version: 3.0.0rc17
4
4
  Summary: Workflow orchestration and management.
5
5
  Home-page: https://www.prefect.io
6
6
  Author: Prefect Technologies, Inc.