prefect-client 3.1.13__py3-none-any.whl → 3.1.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
prefect/events/filters.py CHANGED
@@ -1,7 +1,6 @@
1
- from typing import List, Optional, Tuple, cast
1
+ from typing import Optional
2
2
  from uuid import UUID
3
3
 
4
- import pendulum
5
4
  from pydantic import Field
6
5
 
7
6
  from prefect._internal.schemas.bases import PrefectBaseModel
@@ -23,7 +22,7 @@ class AutomationFilterCreated(PrefectBaseModel):
23
22
  class AutomationFilterName(PrefectBaseModel):
24
23
  """Filter by `Automation.created`."""
25
24
 
26
- any_: Optional[List[str]] = Field(
25
+ any_: Optional[list[str]] = Field(
27
26
  default=None,
28
27
  description="Only include automations with names that match any of these strings",
29
28
  )
@@ -41,8 +40,8 @@ class AutomationFilter(PrefectBaseModel):
41
40
  class EventDataFilter(PrefectBaseModel, extra="forbid"): # type: ignore[call-arg]
42
41
  """A base class for filtering event data."""
43
42
 
44
- def get_filters(self) -> List["EventDataFilter"]:
45
- filters: List["EventDataFilter"] = [
43
+ def get_filters(self) -> list["EventDataFilter"]:
44
+ filters: list["EventDataFilter"] = [
46
45
  filter
47
46
  for filter in [getattr(self, name) for name in self.model_fields]
48
47
  if isinstance(filter, EventDataFilter)
@@ -60,14 +59,11 @@ class EventDataFilter(PrefectBaseModel, extra="forbid"): # type: ignore[call-ar
60
59
 
61
60
  class EventOccurredFilter(EventDataFilter):
62
61
  since: DateTime = Field(
63
- default_factory=lambda: cast(
64
- DateTime,
65
- pendulum.now("UTC").start_of("day").subtract(days=180),
66
- ),
62
+ default_factory=lambda: DateTime.now("UTC").start_of("day").subtract(days=180),
67
63
  description="Only include events after this time (inclusive)",
68
64
  )
69
65
  until: DateTime = Field(
70
- default_factory=lambda: cast(DateTime, pendulum.now("UTC")),
66
+ default_factory=lambda: DateTime.now("UTC"),
71
67
  description="Only include events prior to this time (inclusive)",
72
68
  )
73
69
 
@@ -76,18 +72,18 @@ class EventOccurredFilter(EventDataFilter):
76
72
 
77
73
 
78
74
  class EventNameFilter(EventDataFilter):
79
- prefix: Optional[List[str]] = Field(
75
+ prefix: Optional[list[str]] = Field(
80
76
  default=None, description="Only include events matching one of these prefixes"
81
77
  )
82
- exclude_prefix: Optional[List[str]] = Field(
78
+ exclude_prefix: Optional[list[str]] = Field(
83
79
  default=None, description="Exclude events matching one of these prefixes"
84
80
  )
85
81
 
86
- name: Optional[List[str]] = Field(
82
+ name: Optional[list[str]] = Field(
87
83
  default=None,
88
84
  description="Only include events matching one of these names exactly",
89
85
  )
90
- exclude_name: Optional[List[str]] = Field(
86
+ exclude_name: Optional[list[str]] = Field(
91
87
  default=None, description="Exclude events matching one of these names exactly"
92
88
  )
93
89
 
@@ -112,20 +108,20 @@ class EventNameFilter(EventDataFilter):
112
108
 
113
109
 
114
110
  class EventResourceFilter(EventDataFilter):
115
- id: Optional[List[str]] = Field(
116
- None, description="Only include events for resources with these IDs"
111
+ id: Optional[list[str]] = Field(
112
+ default=None, description="Only include events for resources with these IDs"
117
113
  )
118
- id_prefix: Optional[List[str]] = Field(
119
- None,
114
+ id_prefix: Optional[list[str]] = Field(
115
+ default=None,
120
116
  description=(
121
117
  "Only include events for resources with IDs starting with these prefixes."
122
118
  ),
123
119
  )
124
120
  labels: Optional[ResourceSpecification] = Field(
125
- None, description="Only include events for resources with these labels"
121
+ default=None, description="Only include events for resources with these labels"
126
122
  )
127
123
  distinct: bool = Field(
128
- False,
124
+ default=False,
129
125
  description="Only include events for distinct resources",
130
126
  )
131
127
 
@@ -148,35 +144,39 @@ class EventResourceFilter(EventDataFilter):
148
144
 
149
145
 
150
146
  class EventRelatedFilter(EventDataFilter):
151
- id: Optional[List[str]] = Field(
152
- None, description="Only include events for related resources with these IDs"
147
+ id: Optional[list[str]] = Field(
148
+ default=None,
149
+ description="Only include events for related resources with these IDs",
153
150
  )
154
- role: Optional[List[str]] = Field(
155
- None, description="Only include events for related resources in these roles"
151
+ role: Optional[list[str]] = Field(
152
+ default=None,
153
+ description="Only include events for related resources in these roles",
156
154
  )
157
- resources_in_roles: Optional[List[Tuple[str, str]]] = Field(
158
- None,
155
+ resources_in_roles: Optional[list[tuple[str, str]]] = Field(
156
+ default=None,
159
157
  description=(
160
158
  "Only include events with specific related resources in specific roles"
161
159
  ),
162
160
  )
163
161
  labels: Optional[ResourceSpecification] = Field(
164
- None, description="Only include events for related resources with these labels"
162
+ default=None,
163
+ description="Only include events for related resources with these labels",
165
164
  )
166
165
 
167
166
 
168
167
  class EventAnyResourceFilter(EventDataFilter):
169
- id: Optional[List[str]] = Field(
170
- None, description="Only include events for resources with these IDs"
168
+ id: Optional[list[str]] = Field(
169
+ default=None, description="Only include events for resources with these IDs"
171
170
  )
172
- id_prefix: Optional[List[str]] = Field(
173
- None,
171
+ id_prefix: Optional[list[str]] = Field(
172
+ default=None,
174
173
  description=(
175
174
  "Only include events for resources with IDs starting with these prefixes"
176
175
  ),
177
176
  )
178
177
  labels: Optional[ResourceSpecification] = Field(
179
- None, description="Only include events for related resources with these labels"
178
+ default=None,
179
+ description="Only include events for related resources with these labels",
180
180
  )
181
181
 
182
182
  def includes(self, event: Event) -> bool:
@@ -202,8 +202,8 @@ class EventAnyResourceFilter(EventDataFilter):
202
202
 
203
203
 
204
204
  class EventIDFilter(EventDataFilter):
205
- id: Optional[List[UUID]] = Field(
206
- None, description="Only include events with one of these IDs"
205
+ id: Optional[list[UUID]] = Field(
206
+ default=None, description="Only include events with one of these IDs"
207
207
  )
208
208
 
209
209
  def includes(self, event: Event) -> bool:
prefect/flow_engine.py CHANGED
@@ -2,10 +2,13 @@ from __future__ import annotations
2
2
 
3
3
  import asyncio
4
4
  import logging
5
+ import multiprocessing
6
+ import multiprocessing.context
5
7
  import os
6
8
  import time
7
9
  from contextlib import ExitStack, asynccontextmanager, contextmanager, nullcontext
8
10
  from dataclasses import dataclass, field
11
+ from functools import wraps
9
12
  from typing import (
10
13
  Any,
11
14
  AsyncGenerator,
@@ -37,8 +40,12 @@ from prefect.concurrency.v1.context import ConcurrencyContext as ConcurrencyCont
37
40
  from prefect.context import (
38
41
  AsyncClientContext,
39
42
  FlowRunContext,
43
+ SettingsContext,
40
44
  SyncClientContext,
41
45
  TagsContext,
46
+ get_settings_context,
47
+ hydrated_context,
48
+ serialize_context,
42
49
  )
43
50
  from prefect.exceptions import (
44
51
  Abort,
@@ -61,6 +68,8 @@ from prefect.results import (
61
68
  should_persist_result,
62
69
  )
63
70
  from prefect.settings import PREFECT_DEBUG_MODE
71
+ from prefect.settings.context import get_current_settings
72
+ from prefect.settings.models.root import Settings
64
73
  from prefect.states import (
65
74
  Failed,
66
75
  Pending,
@@ -82,6 +91,7 @@ from prefect.utilities.annotations import NotSet
82
91
  from prefect.utilities.asyncutils import run_coro_as_sync
83
92
  from prefect.utilities.callables import (
84
93
  call_with_parameters,
94
+ cloudpickle_wrapped_call,
85
95
  get_call_parameters,
86
96
  parameters_to_args_kwargs,
87
97
  )
@@ -137,6 +147,7 @@ class BaseFlowRunEngine(Generic[P, R]):
137
147
  flow_run_id: Optional[UUID] = None
138
148
  logger: logging.Logger = field(default_factory=lambda: get_logger("engine"))
139
149
  wait_for: Optional[Iterable[PrefectFuture[Any]]] = None
150
+ context: Optional[dict[str, Any]] = None
140
151
  # holds the return value from the user code
141
152
  _return_value: Union[R, Type[NotSet]] = NotSet
142
153
  # holds the exception raised by the user code, if any
@@ -647,65 +658,68 @@ class FlowRunEngine(BaseFlowRunEngine[P, R]):
647
658
  """
648
659
  Enters a client context and creates a flow run if needed.
649
660
  """
650
- with SyncClientContext.get_or_create() as client_ctx:
651
- self._client = client_ctx.client
652
- self._is_started = True
661
+ with hydrated_context(self.context):
662
+ with SyncClientContext.get_or_create() as client_ctx:
663
+ self._client = client_ctx.client
664
+ self._is_started = True
653
665
 
654
- if not self.flow_run:
655
- self.flow_run = self.create_flow_run(self.client)
656
- else:
657
- # Update the empirical policy to match the flow if it is not set
658
- if self.flow_run.empirical_policy.retry_delay is None:
659
- self.flow_run.empirical_policy.retry_delay = (
660
- self.flow.retry_delay_seconds
661
- )
666
+ if not self.flow_run:
667
+ self.flow_run = self.create_flow_run(self.client)
668
+ else:
669
+ # Update the empirical policy to match the flow if it is not set
670
+ if self.flow_run.empirical_policy.retry_delay is None:
671
+ self.flow_run.empirical_policy.retry_delay = (
672
+ self.flow.retry_delay_seconds
673
+ )
662
674
 
663
- if self.flow_run.empirical_policy.retries is None:
664
- self.flow_run.empirical_policy.retries = self.flow.retries
675
+ if self.flow_run.empirical_policy.retries is None:
676
+ self.flow_run.empirical_policy.retries = self.flow.retries
665
677
 
666
- self.client.update_flow_run(
667
- flow_run_id=self.flow_run.id,
668
- flow_version=self.flow.version,
669
- empirical_policy=self.flow_run.empirical_policy,
670
- )
678
+ self.client.update_flow_run(
679
+ flow_run_id=self.flow_run.id,
680
+ flow_version=self.flow.version,
681
+ empirical_policy=self.flow_run.empirical_policy,
682
+ )
671
683
 
672
- self._telemetry.start_span(
673
- run=self.flow_run,
674
- client=self.client,
675
- parameters=self.parameters,
676
- )
684
+ self._telemetry.start_span(
685
+ run=self.flow_run,
686
+ client=self.client,
687
+ parameters=self.parameters,
688
+ )
677
689
 
678
- try:
679
- yield self
690
+ try:
691
+ yield self
680
692
 
681
- except TerminationSignal as exc:
682
- self.cancel_all_tasks()
683
- self.handle_crash(exc)
684
- raise
685
- except Exception:
686
- # regular exceptions are caught and re-raised to the user
687
- raise
688
- except (Abort, Pause):
689
- raise
690
- except GeneratorExit:
691
- # Do not capture generator exits as crashes
692
- raise
693
- except BaseException as exc:
694
- # BaseExceptions are caught and handled as crashes
695
- self.handle_crash(exc)
696
- raise
697
- finally:
698
- # If debugging, use the more complete `repr` than the usual `str` description
699
- display_state = (
700
- repr(self.state) if PREFECT_DEBUG_MODE else str(self.state)
701
- )
702
- self.logger.log(
703
- level=logging.INFO if self.state.is_completed() else logging.ERROR,
704
- msg=f"Finished in state {display_state}",
705
- )
693
+ except TerminationSignal as exc:
694
+ self.cancel_all_tasks()
695
+ self.handle_crash(exc)
696
+ raise
697
+ except Exception:
698
+ # regular exceptions are caught and re-raised to the user
699
+ raise
700
+ except (Abort, Pause):
701
+ raise
702
+ except GeneratorExit:
703
+ # Do not capture generator exits as crashes
704
+ raise
705
+ except BaseException as exc:
706
+ # BaseExceptions are caught and handled as crashes
707
+ self.handle_crash(exc)
708
+ raise
709
+ finally:
710
+ # If debugging, use the more complete `repr` than the usual `str` description
711
+ display_state = (
712
+ repr(self.state) if PREFECT_DEBUG_MODE else str(self.state)
713
+ )
714
+ self.logger.log(
715
+ level=logging.INFO
716
+ if self.state.is_completed()
717
+ else logging.ERROR,
718
+ msg=f"Finished in state {display_state}",
719
+ )
706
720
 
707
- self._is_started = False
708
- self._client = None
721
+ self._is_started = False
722
+ self._client = None
709
723
 
710
724
  # --------------------------
711
725
  #
@@ -1208,71 +1222,74 @@ class AsyncFlowRunEngine(BaseFlowRunEngine[P, R]):
1208
1222
  """
1209
1223
  Enters a client context and creates a flow run if needed.
1210
1224
  """
1211
- async with AsyncClientContext.get_or_create() as client_ctx:
1212
- self._client = client_ctx.client
1213
- self._is_started = True
1225
+ with hydrated_context(self.context):
1226
+ async with AsyncClientContext.get_or_create() as client_ctx:
1227
+ self._client = client_ctx.client
1228
+ self._is_started = True
1229
+
1230
+ if not self.flow_run:
1231
+ self.flow_run = await self.create_flow_run(self.client)
1232
+ flow_run_url = url_for(self.flow_run)
1233
+
1234
+ if flow_run_url:
1235
+ self.logger.info(
1236
+ f"View at {flow_run_url}", extra={"send_to_api": False}
1237
+ )
1238
+ else:
1239
+ # Update the empirical policy to match the flow if it is not set
1240
+ if self.flow_run.empirical_policy.retry_delay is None:
1241
+ self.flow_run.empirical_policy.retry_delay = (
1242
+ self.flow.retry_delay_seconds
1243
+ )
1214
1244
 
1215
- if not self.flow_run:
1216
- self.flow_run = await self.create_flow_run(self.client)
1217
- flow_run_url = url_for(self.flow_run)
1245
+ if self.flow_run.empirical_policy.retries is None:
1246
+ self.flow_run.empirical_policy.retries = self.flow.retries
1218
1247
 
1219
- if flow_run_url:
1220
- self.logger.info(
1221
- f"View at {flow_run_url}", extra={"send_to_api": False}
1222
- )
1223
- else:
1224
- # Update the empirical policy to match the flow if it is not set
1225
- if self.flow_run.empirical_policy.retry_delay is None:
1226
- self.flow_run.empirical_policy.retry_delay = (
1227
- self.flow.retry_delay_seconds
1248
+ await self.client.update_flow_run(
1249
+ flow_run_id=self.flow_run.id,
1250
+ flow_version=self.flow.version,
1251
+ empirical_policy=self.flow_run.empirical_policy,
1228
1252
  )
1229
1253
 
1230
- if self.flow_run.empirical_policy.retries is None:
1231
- self.flow_run.empirical_policy.retries = self.flow.retries
1232
-
1233
- await self.client.update_flow_run(
1234
- flow_run_id=self.flow_run.id,
1235
- flow_version=self.flow.version,
1236
- empirical_policy=self.flow_run.empirical_policy,
1254
+ await self._telemetry.async_start_span(
1255
+ run=self.flow_run,
1256
+ client=self.client,
1257
+ parameters=self.parameters,
1237
1258
  )
1238
1259
 
1239
- await self._telemetry.async_start_span(
1240
- run=self.flow_run,
1241
- client=self.client,
1242
- parameters=self.parameters,
1243
- )
1244
-
1245
- try:
1246
- yield self
1260
+ try:
1261
+ yield self
1247
1262
 
1248
- except TerminationSignal as exc:
1249
- self.cancel_all_tasks()
1250
- await self.handle_crash(exc)
1251
- raise
1252
- except Exception:
1253
- # regular exceptions are caught and re-raised to the user
1254
- raise
1255
- except (Abort, Pause):
1256
- raise
1257
- except GeneratorExit:
1258
- # Do not capture generator exits as crashes
1259
- raise
1260
- except BaseException as exc:
1261
- # BaseExceptions are caught and handled as crashes
1262
- await self.handle_crash(exc)
1263
- raise
1264
- finally:
1265
- # If debugging, use the more complete `repr` than the usual `str` description
1266
- display_state = (
1267
- repr(self.state) if PREFECT_DEBUG_MODE else str(self.state)
1268
- )
1269
- self.logger.log(
1270
- level=logging.INFO if self.state.is_completed() else logging.ERROR,
1271
- msg=f"Finished in state {display_state}",
1272
- )
1263
+ except TerminationSignal as exc:
1264
+ self.cancel_all_tasks()
1265
+ await self.handle_crash(exc)
1266
+ raise
1267
+ except Exception:
1268
+ # regular exceptions are caught and re-raised to the user
1269
+ raise
1270
+ except (Abort, Pause):
1271
+ raise
1272
+ except GeneratorExit:
1273
+ # Do not capture generator exits as crashes
1274
+ raise
1275
+ except BaseException as exc:
1276
+ # BaseExceptions are caught and handled as crashes
1277
+ await self.handle_crash(exc)
1278
+ raise
1279
+ finally:
1280
+ # If debugging, use the more complete `repr` than the usual `str` description
1281
+ display_state = (
1282
+ repr(self.state) if PREFECT_DEBUG_MODE else str(self.state)
1283
+ )
1284
+ self.logger.log(
1285
+ level=logging.INFO
1286
+ if self.state.is_completed()
1287
+ else logging.ERROR,
1288
+ msg=f"Finished in state {display_state}",
1289
+ )
1273
1290
 
1274
- self._is_started = False
1275
- self._client = None
1291
+ self._is_started = False
1292
+ self._client = None
1276
1293
 
1277
1294
  # --------------------------
1278
1295
  #
@@ -1330,12 +1347,14 @@ def run_flow_sync(
1330
1347
  parameters: Optional[Dict[str, Any]] = None,
1331
1348
  wait_for: Optional[Iterable[PrefectFuture[Any]]] = None,
1332
1349
  return_type: Literal["state", "result"] = "result",
1350
+ context: Optional[dict[str, Any]] = None,
1333
1351
  ) -> Union[R, State, None]:
1334
1352
  engine = FlowRunEngine[P, R](
1335
1353
  flow=flow,
1336
1354
  parameters=parameters,
1337
1355
  flow_run=flow_run,
1338
1356
  wait_for=wait_for,
1357
+ context=context,
1339
1358
  )
1340
1359
 
1341
1360
  with engine.start():
@@ -1352,9 +1371,14 @@ async def run_flow_async(
1352
1371
  parameters: Optional[Dict[str, Any]] = None,
1353
1372
  wait_for: Optional[Iterable[PrefectFuture[Any]]] = None,
1354
1373
  return_type: Literal["state", "result"] = "result",
1374
+ context: Optional[dict[str, Any]] = None,
1355
1375
  ) -> Union[R, State, None]:
1356
1376
  engine = AsyncFlowRunEngine[P, R](
1357
- flow=flow, parameters=parameters, flow_run=flow_run, wait_for=wait_for
1377
+ flow=flow,
1378
+ parameters=parameters,
1379
+ flow_run=flow_run,
1380
+ wait_for=wait_for,
1381
+ context=context,
1358
1382
  )
1359
1383
 
1360
1384
  async with engine.start():
@@ -1371,12 +1395,17 @@ def run_generator_flow_sync(
1371
1395
  parameters: Optional[Dict[str, Any]] = None,
1372
1396
  wait_for: Optional[Iterable[PrefectFuture[Any]]] = None,
1373
1397
  return_type: Literal["state", "result"] = "result",
1398
+ context: Optional[dict[str, Any]] = None,
1374
1399
  ) -> Generator[R, None, None]:
1375
1400
  if return_type != "result":
1376
1401
  raise ValueError("The return_type for a generator flow must be 'result'")
1377
1402
 
1378
1403
  engine = FlowRunEngine[P, R](
1379
- flow=flow, parameters=parameters, flow_run=flow_run, wait_for=wait_for
1404
+ flow=flow,
1405
+ parameters=parameters,
1406
+ flow_run=flow_run,
1407
+ wait_for=wait_for,
1408
+ context=context,
1380
1409
  )
1381
1410
 
1382
1411
  with engine.start():
@@ -1407,12 +1436,17 @@ async def run_generator_flow_async(
1407
1436
  parameters: Optional[Dict[str, Any]] = None,
1408
1437
  wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
1409
1438
  return_type: Literal["state", "result"] = "result",
1439
+ context: Optional[dict[str, Any]] = None,
1410
1440
  ) -> AsyncGenerator[R, None]:
1411
1441
  if return_type != "result":
1412
1442
  raise ValueError("The return_type for a generator flow must be 'result'")
1413
1443
 
1414
1444
  engine = AsyncFlowRunEngine[P, R](
1415
- flow=flow, parameters=parameters, flow_run=flow_run, wait_for=wait_for
1445
+ flow=flow,
1446
+ parameters=parameters,
1447
+ flow_run=flow_run,
1448
+ wait_for=wait_for,
1449
+ context=context,
1416
1450
  )
1417
1451
 
1418
1452
  async with engine.start():
@@ -1446,8 +1480,23 @@ def run_flow(
1446
1480
  wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
1447
1481
  return_type: Literal["state", "result"] = "result",
1448
1482
  error_logger: Optional[logging.Logger] = None,
1449
- ) -> Union[R, State, None]:
1450
- ret_val: Union[R, State, None] = None
1483
+ context: Optional[dict[str, Any]] = None,
1484
+ ) -> (
1485
+ R
1486
+ | State
1487
+ | None
1488
+ | Coroutine[Any, Any, R | State | None]
1489
+ | Generator[R, None, None]
1490
+ | AsyncGenerator[R, None]
1491
+ ):
1492
+ ret_val: Union[
1493
+ R,
1494
+ State,
1495
+ None,
1496
+ Coroutine[Any, Any, R | State | None],
1497
+ Generator[R, None, None],
1498
+ AsyncGenerator[R, None],
1499
+ ] = None
1451
1500
 
1452
1501
  try:
1453
1502
  kwargs: dict[str, Any] = dict(
@@ -1458,6 +1507,7 @@ def run_flow(
1458
1507
  ),
1459
1508
  wait_for=wait_for,
1460
1509
  return_type=return_type,
1510
+ context=context,
1461
1511
  )
1462
1512
 
1463
1513
  if flow.isasync and flow.isgenerator:
@@ -1492,3 +1542,113 @@ def _flow_parameters(
1492
1542
  parameters = flow_run.parameters if flow_run else {}
1493
1543
  call_args, call_kwargs = parameters_to_args_kwargs(flow.fn, parameters)
1494
1544
  return get_call_parameters(flow.fn, call_args, call_kwargs)
1545
+
1546
+
1547
+ def run_flow_in_subprocess(
1548
+ flow: "Flow[..., Any]",
1549
+ flow_run: "FlowRun | None" = None,
1550
+ parameters: dict[str, Any] | None = None,
1551
+ wait_for: Iterable[PrefectFuture[Any]] | None = None,
1552
+ context: dict[str, Any] | None = None,
1553
+ ) -> multiprocessing.context.SpawnProcess:
1554
+ """
1555
+ Run a flow in a subprocess.
1556
+
1557
+ Note the result of the flow will only be accessible if the flow is configured to
1558
+ persist its result.
1559
+
1560
+ Args:
1561
+ flow: The flow to run.
1562
+ flow_run: The flow run object containing run metadata.
1563
+ parameters: The parameters to use when invoking the flow.
1564
+ wait_for: The futures to wait for before starting the flow.
1565
+ context: A serialized context to hydrate before running the flow. If not provided,
1566
+ the current context will be used. A serialized context should be provided if
1567
+ this function is called in a separate memory space from the parent run (e.g.
1568
+ in a subprocess or on another machine).
1569
+
1570
+ Returns:
1571
+ A multiprocessing.context.SpawnProcess representing the process that is running the flow.
1572
+ """
1573
+ from prefect.flow_engine import run_flow
1574
+
1575
+ @wraps(run_flow)
1576
+ def run_flow_with_env(
1577
+ *args: Any,
1578
+ env: dict[str, str] | None = None,
1579
+ **kwargs: Any,
1580
+ ):
1581
+ """
1582
+ Wrapper function to update environment variables and settings before running the flow.
1583
+ """
1584
+ engine_logger = logging.getLogger("prefect.engine")
1585
+
1586
+ os.environ.update(env or {})
1587
+ settings_context = get_settings_context()
1588
+ # Create a new settings context with a new settings object to pick up the updated
1589
+ # environment variables
1590
+ with SettingsContext(
1591
+ profile=settings_context.profile,
1592
+ settings=Settings(),
1593
+ ):
1594
+ try:
1595
+ maybe_coro = run_flow(*args, **kwargs)
1596
+ if asyncio.iscoroutine(maybe_coro):
1597
+ # This is running in a brand new process, so there won't be an existing
1598
+ # event loop.
1599
+ asyncio.run(maybe_coro)
1600
+ except Abort as abort_signal:
1601
+ abort_signal: Abort
1602
+ if flow_run:
1603
+ msg = f"Execution of flow run '{flow_run.id}' aborted by orchestrator: {abort_signal}"
1604
+ else:
1605
+ msg = f"Execution aborted by orchestrator: {abort_signal}"
1606
+ engine_logger.info(msg)
1607
+ exit(0)
1608
+ except Pause as pause_signal:
1609
+ pause_signal: Pause
1610
+ if flow_run:
1611
+ msg = f"Execution of flow run '{flow_run.id}' is paused: {pause_signal}"
1612
+ else:
1613
+ msg = f"Execution is paused: {pause_signal}"
1614
+ engine_logger.info(msg)
1615
+ exit(0)
1616
+ except Exception:
1617
+ if flow_run:
1618
+ msg = f"Execution of flow run '{flow_run.id}' exited with unexpected exception"
1619
+ else:
1620
+ msg = "Execution exited with unexpected exception"
1621
+ engine_logger.error(msg, exc_info=True)
1622
+ exit(1)
1623
+ except BaseException:
1624
+ if flow_run:
1625
+ msg = f"Execution of flow run '{flow_run.id}' interrupted by base exception"
1626
+ else:
1627
+ msg = "Execution interrupted by base exception"
1628
+ engine_logger.error(msg, exc_info=True)
1629
+ # Let the exit code be determined by the base exception type
1630
+ raise
1631
+
1632
+ ctx = multiprocessing.get_context("spawn")
1633
+
1634
+ context = context or serialize_context()
1635
+
1636
+ process = ctx.Process(
1637
+ target=cloudpickle_wrapped_call(
1638
+ run_flow_with_env,
1639
+ env=get_current_settings().to_environment_variables(exclude_unset=True)
1640
+ | os.environ
1641
+ | {
1642
+ # TODO: make this a thing we can pass into the engine
1643
+ "PREFECT__ENABLE_CANCELLATION_AND_CRASHED_HOOKS": "false",
1644
+ },
1645
+ flow=flow,
1646
+ flow_run=flow_run,
1647
+ parameters=parameters,
1648
+ wait_for=wait_for,
1649
+ context=context,
1650
+ ),
1651
+ )
1652
+ process.start()
1653
+
1654
+ return process