prefect-client 3.0.3__py3-none-any.whl → 3.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,11 +2,9 @@ import asyncio
2
2
  from functools import wraps
3
3
  from typing import Any, Callable, Tuple, Type
4
4
 
5
- from prefect.logging.loggers import get_logger
5
+ from prefect._internal._logging import logger
6
6
  from prefect.utilities.math import clamped_poisson_interval
7
7
 
8
- logger = get_logger("retries")
9
-
10
8
 
11
9
  def exponential_backoff_with_jitter(
12
10
  attempt: int, base_delay: float, max_delay: float
@@ -70,7 +70,7 @@ def validate_schema(schema: dict):
70
70
  try:
71
71
  if schema is not None:
72
72
  # Most closely matches the schemas generated by pydantic
73
- jsonschema.Draft4Validator.check_schema(schema)
73
+ jsonschema.Draft202012Validator.check_schema(schema)
74
74
  except jsonschema.SchemaError as exc:
75
75
  raise ValueError(
76
76
  "The provided schema is not a valid json schema. Schema error:"
prefect/client/cloud.py CHANGED
@@ -17,6 +17,7 @@ from prefect.client.schemas.objects import (
17
17
  from prefect.exceptions import ObjectNotFound, PrefectException
18
18
  from prefect.settings import (
19
19
  PREFECT_API_KEY,
20
+ PREFECT_API_URL,
20
21
  PREFECT_CLOUD_API_URL,
21
22
  PREFECT_UNIT_TEST_MODE,
22
23
  )
@@ -110,6 +111,14 @@ class CloudClient:
110
111
  )
111
112
  return workspaces
112
113
 
114
+ async def read_current_workspace(self) -> Workspace:
115
+ workspaces = await self.read_workspaces()
116
+ current_api_url = PREFECT_API_URL.value()
117
+ for workspace in workspaces:
118
+ if workspace.api_url() == current_api_url.rstrip("/"):
119
+ return workspace
120
+ raise ValueError("Current workspace not found")
121
+
113
122
  async def read_worker_metadata(self) -> Dict[str, Any]:
114
123
  response = await self.get(
115
124
  f"{self.workspace_base_url}/collections/work_pool_types"
@@ -231,7 +231,9 @@ class State(ObjectBaseModel, Generic[R]):
231
231
 
232
232
  Args:
233
233
  raise_on_failure: a boolean specifying whether to raise an exception
234
- if the state is of type `FAILED` and the underlying data is an exception
234
+ if the state is of type `FAILED` and the underlying data is an exception. When flow
235
+ was run in a different memory space (using `run_deployment`), this will only raise
236
+ if `fetch` is `True`.
235
237
  fetch: a boolean specifying whether to resolve references to persisted
236
238
  results into data. For synchronous users, this defaults to `True`.
237
239
  For asynchronous users, this defaults to `False` for backwards
@@ -297,6 +299,15 @@ class State(ObjectBaseModel, Generic[R]):
297
299
  >>> state = await my_flow(return_state=True)
298
300
  >>> await state.result()
299
301
  hello
302
+
303
+ Get the result with `raise_on_failure` from a flow run in a different memory space
304
+
305
+ >>> @flow
306
+ >>> async def my_flow():
307
+ >>> raise ValueError("oh no!")
308
+ >>> my_flow.deploy("my_deployment/my_flow")
309
+ >>> flow_run = run_deployment("my_deployment/my_flow")
310
+ >>> await flow_run.state.result(raise_on_failure=True, fetch=True) # Raises `ValueError("oh no!")`
300
311
  """
301
312
  from prefect.states import get_state_result
302
313
 
@@ -84,13 +84,13 @@ class Subscription(Generic[S]):
84
84
  AssertionError,
85
85
  websockets.exceptions.ConnectionClosedError,
86
86
  ) as e:
87
- if isinstance(e, AssertionError) or e.code == WS_1008_POLICY_VIOLATION:
87
+ if isinstance(e, AssertionError) or e.rcvd.code == WS_1008_POLICY_VIOLATION:
88
88
  if isinstance(e, AssertionError):
89
89
  reason = e.args[0]
90
90
  elif isinstance(e, websockets.exceptions.ConnectionClosedError):
91
- reason = e.reason
91
+ reason = e.rcvd.reason
92
92
 
93
- if isinstance(e, AssertionError) or e.code == WS_1008_POLICY_VIOLATION:
93
+ if isinstance(e, AssertionError) or e.rcvd.code == WS_1008_POLICY_VIOLATION:
94
94
  raise Exception(
95
95
  "Unable to authenticate to the subscription. Please "
96
96
  "ensure the provided `PREFECT_API_KEY` you are using is "
@@ -20,6 +20,7 @@ import yaml
20
20
  from ruamel.yaml import YAML
21
21
 
22
22
  from prefect.client.schemas.actions import DeploymentScheduleCreate
23
+ from prefect.client.schemas.objects import ConcurrencyLimitStrategy
23
24
  from prefect.client.schemas.schedules import IntervalSchedule
24
25
  from prefect.logging import get_logger
25
26
  from prefect.settings import PREFECT_DEBUG_MODE
@@ -277,6 +278,17 @@ def _format_deployment_for_saving_to_prefect_file(
277
278
 
278
279
  deployment["schedules"] = schedules
279
280
 
281
+ if deployment.get("concurrency_limit"):
282
+ concurrency_limit = deployment["concurrency_limit"]
283
+ if isinstance(concurrency_limit, dict):
284
+ if isinstance(
285
+ concurrency_limit["collision_strategy"], ConcurrencyLimitStrategy
286
+ ):
287
+ concurrency_limit["collision_strategy"] = str(
288
+ concurrency_limit["collision_strategy"].value
289
+ )
290
+ deployment["concurrency_limit"] = concurrency_limit
291
+
280
292
  return deployment
281
293
 
282
294
 
prefect/events/clients.py CHANGED
@@ -34,6 +34,7 @@ from prefect.settings import (
34
34
  PREFECT_API_KEY,
35
35
  PREFECT_API_URL,
36
36
  PREFECT_CLOUD_API_URL,
37
+ PREFECT_DEBUG_MODE,
37
38
  PREFECT_SERVER_ALLOW_EPHEMERAL_MODE,
38
39
  )
39
40
 
@@ -67,6 +68,18 @@ EVENT_WEBSOCKET_CHECKPOINTS = Counter(
67
68
  logger = get_logger(__name__)
68
69
 
69
70
 
71
+ def http_to_ws(url: str):
72
+ return url.replace("https://", "wss://").replace("http://", "ws://").rstrip("/")
73
+
74
+
75
+ def events_in_socket_from_api_url(url: str):
76
+ return http_to_ws(url) + "/events/in"
77
+
78
+
79
+ def events_out_socket_from_api_url(url: str):
80
+ return http_to_ws(url) + "/events/out"
81
+
82
+
70
83
  def get_events_client(
71
84
  reconnection_attempts: int = 10,
72
85
  checkpoint_every: int = 700,
@@ -251,12 +264,7 @@ class PrefectEventsClient(EventsClient):
251
264
  "api_url must be provided or set in the Prefect configuration"
252
265
  )
253
266
 
254
- self._events_socket_url = (
255
- api_url.replace("https://", "wss://")
256
- .replace("http://", "ws://")
257
- .rstrip("/")
258
- + "/events/in"
259
- )
267
+ self._events_socket_url = events_in_socket_from_api_url(api_url)
260
268
  self._connect = connect(self._events_socket_url)
261
269
  self._websocket = None
262
270
  self._reconnection_attempts = reconnection_attempts
@@ -285,11 +293,26 @@ class PrefectEventsClient(EventsClient):
285
293
  self._websocket = None
286
294
  await self._connect.__aexit__(None, None, None)
287
295
 
288
- self._websocket = await self._connect.__aenter__()
289
-
290
- # make sure we have actually connected
291
- pong = await self._websocket.ping()
292
- await pong
296
+ try:
297
+ self._websocket = await self._connect.__aenter__()
298
+ # make sure we have actually connected
299
+ pong = await self._websocket.ping()
300
+ await pong
301
+ except Exception as e:
302
+ # The client is frequently run in a background thread
303
+ # so we log an additional warning to ensure
304
+ # surfacing the error to the user.
305
+ logger.warning(
306
+ "Unable to connect to %r. "
307
+ "Please check your network settings to ensure websocket connections "
308
+ "to the API are allowed. Otherwise event data (including task run data) may be lost. "
309
+ "Reason: %s. "
310
+ "Set PREFECT_DEBUG_MODE=1 to see the full error.",
311
+ self._events_socket_url,
312
+ str(e),
313
+ exc_info=PREFECT_DEBUG_MODE,
314
+ )
315
+ raise
293
316
 
294
317
  events_to_resend = self._unconfirmed_events
295
318
  # Clear the unconfirmed events here, because they are going back through emit
@@ -412,7 +435,6 @@ class PrefectCloudEventsClient(PrefectEventsClient):
412
435
  reconnection_attempts=reconnection_attempts,
413
436
  checkpoint_every=checkpoint_every,
414
437
  )
415
-
416
438
  self._connect = connect(
417
439
  self._events_socket_url,
418
440
  extra_headers={"Authorization": f"bearer {api_key}"},
@@ -468,11 +490,7 @@ class PrefectEventSubscriber:
468
490
  self._filter = filter or EventFilter() # type: ignore[call-arg]
469
491
  self._seen_events = TTLCache(maxsize=SEEN_EVENTS_SIZE, ttl=SEEN_EVENTS_TTL)
470
492
 
471
- socket_url = (
472
- api_url.replace("https://", "wss://")
473
- .replace("http://", "ws://")
474
- .rstrip("/")
475
- ) + "/events/out"
493
+ socket_url = events_out_socket_from_api_url(api_url)
476
494
 
477
495
  logger.debug("Connecting to %s", socket_url)
478
496
 
@@ -527,11 +545,11 @@ class PrefectEventSubscriber:
527
545
  f"Reason: {e.args[0]}"
528
546
  )
529
547
  except ConnectionClosedError as e:
530
- raise Exception(
531
- "Unable to authenticate to the event stream. Please ensure the "
532
- "provided api_key you are using is valid for this environment. "
533
- f"Reason: {e.reason}"
534
- ) from e
548
+ reason = getattr(e.rcvd, "reason", None)
549
+ msg = "Unable to authenticate to the event stream. Please ensure the "
550
+ msg += "provided api_key you are using is valid for this environment. "
551
+ msg += f"Reason: {reason}" if reason else ""
552
+ raise Exception(msg) from e
535
553
 
536
554
  from prefect.events.filters import EventOccurredFilter
537
555
 
prefect/filesystems.py CHANGED
@@ -5,7 +5,7 @@ from typing import Any, Dict, Optional
5
5
 
6
6
  import anyio
7
7
  import fsspec
8
- from pydantic import Field, SecretStr, field_validator
8
+ from pydantic import BaseModel, Field, SecretStr, field_validator
9
9
 
10
10
  from prefect._internal.schemas.validators import (
11
11
  stringify_path,
@@ -519,4 +519,29 @@ class SMB(WritableFileSystem, WritableDeploymentStorage):
519
519
  return await self.filesystem.write_path(path=path, content=content)
520
520
 
521
521
 
522
+ class NullFileSystem(BaseModel):
523
+ """
524
+ A file system that does not store any data.
525
+ """
526
+
527
+ async def read_path(self, path: str) -> None:
528
+ pass
529
+
530
+ async def write_path(self, path: str, content: bytes) -> None:
531
+ pass
532
+
533
+ async def get_directory(
534
+ self, from_path: Optional[str] = None, local_path: Optional[str] = None
535
+ ) -> None:
536
+ pass
537
+
538
+ async def put_directory(
539
+ self,
540
+ local_path: Optional[str] = None,
541
+ to_path: Optional[str] = None,
542
+ ignore_file: Optional[str] = None,
543
+ ) -> None:
544
+ pass
545
+
546
+
522
547
  __getattr__ = getattr_migration(__name__)
prefect/flows.py CHANGED
@@ -535,7 +535,7 @@ class Flow(Generic[P, R]):
535
535
 
536
536
  def resolve_block_reference(data: Any) -> Any:
537
537
  if isinstance(data, dict) and "$ref" in data:
538
- return Block.load_from_ref(data["$ref"])
538
+ return Block.load_from_ref(data["$ref"], _sync=True)
539
539
  return data
540
540
 
541
541
  try:
@@ -1256,6 +1256,15 @@ class Flow(Generic[P, R]):
1256
1256
  ) -> T:
1257
1257
  ...
1258
1258
 
1259
+ @overload
1260
+ def __call__(
1261
+ self: "Flow[P, Coroutine[Any, Any, T]]",
1262
+ *args: P.args,
1263
+ return_state: Literal[True],
1264
+ **kwargs: P.kwargs,
1265
+ ) -> Awaitable[State[T]]:
1266
+ ...
1267
+
1259
1268
  @overload
1260
1269
  def __call__(
1261
1270
  self: "Flow[P, T]",
prefect/results.py CHANGED
@@ -8,6 +8,7 @@ from functools import partial
8
8
  from pathlib import Path
9
9
  from typing import (
10
10
  TYPE_CHECKING,
11
+ Annotated,
11
12
  Any,
12
13
  Callable,
13
14
  Dict,
@@ -25,8 +26,10 @@ from cachetools import LRUCache
25
26
  from pydantic import (
26
27
  BaseModel,
27
28
  ConfigDict,
29
+ Discriminator,
28
30
  Field,
29
31
  PrivateAttr,
32
+ Tag,
30
33
  ValidationError,
31
34
  model_serializer,
32
35
  model_validator,
@@ -47,6 +50,7 @@ from prefect.exceptions import (
47
50
  )
48
51
  from prefect.filesystems import (
49
52
  LocalFileSystem,
53
+ NullFileSystem,
50
54
  WritableFileSystem,
51
55
  )
52
56
  from prefect.locking.protocol import LockManager
@@ -218,6 +222,19 @@ def _format_user_supplied_storage_key(key: str) -> str:
218
222
  T = TypeVar("T")
219
223
 
220
224
 
225
+ def result_storage_discriminator(x: Any) -> str:
226
+ if isinstance(x, dict):
227
+ if "block_type_slug" in x:
228
+ return "WritableFileSystem"
229
+ else:
230
+ return "NullFileSystem"
231
+ if isinstance(x, WritableFileSystem):
232
+ return "WritableFileSystem"
233
+ if isinstance(x, NullFileSystem):
234
+ return "NullFileSystem"
235
+ return "None"
236
+
237
+
221
238
  @deprecated_field(
222
239
  "persist_result",
223
240
  when=lambda x: x is not None,
@@ -245,7 +262,14 @@ class ResultStore(BaseModel):
245
262
  model_config = ConfigDict(arbitrary_types_allowed=True)
246
263
 
247
264
  result_storage: Optional[WritableFileSystem] = Field(default=None)
248
- metadata_storage: Optional[WritableFileSystem] = Field(default=None)
265
+ metadata_storage: Annotated[
266
+ Union[
267
+ Annotated[WritableFileSystem, Tag("WritableFileSystem")],
268
+ Annotated[NullFileSystem, Tag("NullFileSystem")],
269
+ Annotated[None, Tag("None")],
270
+ ],
271
+ Discriminator(result_storage_discriminator),
272
+ ] = Field(default=None)
249
273
  lock_manager: Optional[LockManager] = Field(default=None)
250
274
  cache_result_in_memory: bool = Field(default=True)
251
275
  serializer: Serializer = Field(default_factory=get_default_result_serializer)
@@ -281,6 +305,7 @@ class ResultStore(BaseModel):
281
305
  update["cache_result_in_memory"] = flow.cache_result_in_memory
282
306
  if self.result_storage is None and update.get("result_storage") is None:
283
307
  update["result_storage"] = await get_default_result_storage()
308
+ update["metadata_storage"] = NullFileSystem()
284
309
  return self.model_copy(update=update)
285
310
 
286
311
  @sync_compatible
@@ -294,6 +319,8 @@ class ResultStore(BaseModel):
294
319
  Returns:
295
320
  An updated result store.
296
321
  """
322
+ from prefect.transactions import get_transaction
323
+
297
324
  update = {}
298
325
  if task.result_storage is not None:
299
326
  update["result_storage"] = await resolve_result_storage(task.result_storage)
@@ -305,17 +332,30 @@ class ResultStore(BaseModel):
305
332
  update["storage_key_fn"] = partial(
306
333
  _format_user_supplied_storage_key, task.result_storage_key
307
334
  )
335
+
336
+ # use the lock manager from a parent transaction if it exists
337
+ if (current_txn := get_transaction()) and isinstance(
338
+ current_txn.store, ResultStore
339
+ ):
340
+ update["lock_manager"] = current_txn.store.lock_manager
341
+
308
342
  if task.cache_policy is not None and task.cache_policy is not NotSet:
309
343
  if task.cache_policy.key_storage is not None:
310
344
  storage = task.cache_policy.key_storage
311
345
  if isinstance(storage, str) and not len(storage.split("/")) == 2:
312
346
  storage = Path(storage)
313
347
  update["metadata_storage"] = await resolve_result_storage(storage)
348
+ # if the cache policy has a lock manager, it takes precedence over the parent transaction
314
349
  if task.cache_policy.lock_manager is not None:
315
350
  update["lock_manager"] = task.cache_policy.lock_manager
316
351
 
317
352
  if self.result_storage is None and update.get("result_storage") is None:
318
353
  update["result_storage"] = await get_default_result_storage()
354
+ if (
355
+ isinstance(self.metadata_storage, NullFileSystem)
356
+ and update.get("metadata_storage", NotSet) is NotSet
357
+ ):
358
+ update["metadata_storage"] = None
319
359
  return self.model_copy(update=update)
320
360
 
321
361
  @staticmethod
@@ -433,7 +473,9 @@ class ResultStore(BaseModel):
433
473
  )
434
474
  else:
435
475
  content = await self.result_storage.read_path(key)
436
- result_record = ResultRecord.deserialize(content)
476
+ result_record = ResultRecord.deserialize(
477
+ content, backup_serializer=self.serializer
478
+ )
437
479
 
438
480
  if self.cache_result_in_memory:
439
481
  if self.result_storage_block_id is None and hasattr(
@@ -446,26 +488,36 @@ class ResultStore(BaseModel):
446
488
  self.cache[cache_key] = result_record
447
489
  return result_record
448
490
 
449
- def read(self, key: str, holder: Optional[str] = None) -> "ResultRecord":
491
+ def read(
492
+ self,
493
+ key: str,
494
+ holder: Optional[str] = None,
495
+ ) -> "ResultRecord":
450
496
  """
451
497
  Read a result record from storage.
452
498
 
453
499
  Args:
454
500
  key: The key to read the result record from.
455
501
  holder: The holder of the lock if a lock was set on the record.
502
+
456
503
  Returns:
457
504
  A result record.
458
505
  """
459
506
  holder = holder or self.generate_default_holder()
460
507
  return self._read(key=key, holder=holder, _sync=True)
461
508
 
462
- async def aread(self, key: str, holder: Optional[str] = None) -> "ResultRecord":
509
+ async def aread(
510
+ self,
511
+ key: str,
512
+ holder: Optional[str] = None,
513
+ ) -> "ResultRecord":
463
514
  """
464
515
  Read a result record from storage.
465
516
 
466
517
  Args:
467
518
  key: The key to read the result record from.
468
519
  holder: The holder of the lock if a lock was set on the record.
520
+
469
521
  Returns:
470
522
  A result record.
471
523
  """
@@ -1026,17 +1078,31 @@ class ResultRecord(BaseModel, Generic[R]):
1026
1078
  )
1027
1079
 
1028
1080
  @classmethod
1029
- def deserialize(cls, data: bytes) -> "ResultRecord[R]":
1081
+ def deserialize(
1082
+ cls, data: bytes, backup_serializer: Optional[Serializer] = None
1083
+ ) -> "ResultRecord[R]":
1030
1084
  """
1031
1085
  Deserialize a record from bytes.
1032
1086
 
1033
1087
  Args:
1034
1088
  data: the serialized record
1089
+ backup_serializer: The serializer to use to deserialize the result record. Only
1090
+ necessary if the provided data does not specify a serializer.
1035
1091
 
1036
1092
  Returns:
1037
1093
  ResultRecord: the deserialized record
1038
1094
  """
1039
- instance = cls.model_validate_json(data)
1095
+ try:
1096
+ instance = cls.model_validate_json(data)
1097
+ except ValidationError:
1098
+ if backup_serializer is None:
1099
+ raise
1100
+ else:
1101
+ result = backup_serializer.loads(data)
1102
+ return cls(
1103
+ metadata=ResultRecordMetadata(serializer=backup_serializer),
1104
+ result=result,
1105
+ )
1040
1106
  if isinstance(instance.result, bytes):
1041
1107
  instance.result = instance.serializer.loads(instance.result)
1042
1108
  elif isinstance(instance.result, str):
prefect/runner/runner.py CHANGED
@@ -74,7 +74,6 @@ from prefect.client.schemas.objects import Flow as APIFlow
74
74
  from prefect.concurrency.asyncio import (
75
75
  AcquireConcurrencySlotTimeoutError,
76
76
  ConcurrencySlotAcquisitionError,
77
- concurrency,
78
77
  )
79
78
  from prefect.events import DeploymentTriggerTypes, TriggerTypes
80
79
  from prefect.events.related import tags_as_related_resources
@@ -92,7 +91,6 @@ from prefect.settings import (
92
91
  get_current_settings,
93
92
  )
94
93
  from prefect.states import (
95
- AwaitingConcurrencySlot,
96
94
  Crashed,
97
95
  Pending,
98
96
  exception_to_failed_state,
@@ -1047,22 +1045,12 @@ class Runner:
1047
1045
  ) -> Union[Optional[int], Exception]:
1048
1046
  run_logger = self._get_flow_run_logger(flow_run)
1049
1047
 
1050
- if flow_run.deployment_id:
1051
- deployment = await self._client.read_deployment(flow_run.deployment_id)
1052
- if deployment and deployment.global_concurrency_limit:
1053
- limit_name = deployment.global_concurrency_limit.name
1054
- concurrency_ctx = concurrency
1055
- else:
1056
- limit_name = ""
1057
- concurrency_ctx = asyncnullcontext
1058
-
1059
1048
  try:
1060
- async with concurrency_ctx(limit_name, max_retries=0, strict=True):
1061
- status_code = await self._run_process(
1062
- flow_run=flow_run,
1063
- task_status=task_status,
1064
- entrypoint=entrypoint,
1065
- )
1049
+ status_code = await self._run_process(
1050
+ flow_run=flow_run,
1051
+ task_status=task_status,
1052
+ entrypoint=entrypoint,
1053
+ )
1066
1054
  except (
1067
1055
  AcquireConcurrencySlotTimeoutError,
1068
1056
  ConcurrencySlotAcquisitionError,
@@ -1164,26 +1152,6 @@ class Runner:
1164
1152
  exc_info=True,
1165
1153
  )
1166
1154
 
1167
- async def _propose_scheduled_state(self, flow_run: "FlowRun") -> None:
1168
- run_logger = self._get_flow_run_logger(flow_run)
1169
- try:
1170
- state = await propose_state(
1171
- self._client,
1172
- AwaitingConcurrencySlot(),
1173
- flow_run_id=flow_run.id,
1174
- )
1175
- self._logger.info(f"Flow run {flow_run.id} now has state {state.name}")
1176
- except Abort as exc:
1177
- run_logger.info(
1178
- (
1179
- f"Aborted rescheduling of flow run '{flow_run.id}'. "
1180
- f"Server sent an abort signal: {exc}"
1181
- ),
1182
- )
1183
- pass
1184
- except Exception:
1185
- run_logger.exception(f"Failed to update state of flow run '{flow_run.id}'")
1186
-
1187
1155
  async def _propose_crashed_state(self, flow_run: "FlowRun", message: str) -> None:
1188
1156
  run_logger = self._get_flow_run_logger(flow_run)
1189
1157
  try:
prefect/settings.py CHANGED
@@ -1458,6 +1458,12 @@ PREFECT_DEPLOYMENT_SCHEDULE_MAX_SCHEDULED_RUNS = Setting(int, default=50)
1458
1458
  The maximum number of scheduled runs to create for a deployment.
1459
1459
  """
1460
1460
 
1461
+ PREFECT_DEPLOYMENT_CONCURRENCY_SLOT_WAIT_SECONDS = Setting(float, default=30.0)
1462
+ """
1463
+ The number of seconds to wait before retrying when a deployment flow run
1464
+ cannot secure a concurrency slot from the server.
1465
+ """
1466
+
1461
1467
  PREFECT_WORKER_HEARTBEAT_SECONDS = Setting(float, default=30)
1462
1468
  """
1463
1469
  Number of seconds a worker should wait between sending a heartbeat.
prefect/transactions.py CHANGED
@@ -31,7 +31,6 @@ from prefect.results import (
31
31
  ResultRecord,
32
32
  ResultStore,
33
33
  get_result_store,
34
- should_persist_result,
35
34
  )
36
35
  from prefect.utilities.annotations import NotSet
37
36
  from prefect.utilities.collections import AutoEnum
@@ -438,7 +437,7 @@ def transaction(
438
437
  commit_mode: Optional[CommitMode] = None,
439
438
  isolation_level: Optional[IsolationLevel] = None,
440
439
  overwrite: bool = False,
441
- write_on_commit: Optional[bool] = None,
440
+ write_on_commit: bool = True,
442
441
  logger: Union[logging.Logger, logging.LoggerAdapter, None] = None,
443
442
  ) -> Generator[Transaction, None, None]:
444
443
  """
@@ -473,9 +472,7 @@ def transaction(
473
472
  commit_mode=commit_mode,
474
473
  isolation_level=isolation_level,
475
474
  overwrite=overwrite,
476
- write_on_commit=write_on_commit
477
- if write_on_commit is not None
478
- else should_persist_result(),
475
+ write_on_commit=write_on_commit,
479
476
  logger=logger,
480
477
  ) as txn:
481
478
  yield txn
prefect/workers/base.py CHANGED
@@ -19,11 +19,6 @@ from prefect.client.orchestration import PrefectClient, get_client
19
19
  from prefect.client.schemas.actions import WorkPoolCreate, WorkPoolUpdate
20
20
  from prefect.client.schemas.objects import StateType, WorkPool
21
21
  from prefect.client.utilities import inject_client
22
- from prefect.concurrency.asyncio import (
23
- AcquireConcurrencySlotTimeoutError,
24
- ConcurrencySlotAcquisitionError,
25
- concurrency,
26
- )
27
22
  from prefect.events import Event, RelatedResource, emit_event
28
23
  from prefect.events.related import object_as_related_resource, tags_as_related_resources
29
24
  from prefect.exceptions import (
@@ -41,12 +36,10 @@ from prefect.settings import (
41
36
  get_current_settings,
42
37
  )
43
38
  from prefect.states import (
44
- AwaitingConcurrencySlot,
45
39
  Crashed,
46
40
  Pending,
47
41
  exception_to_failed_state,
48
42
  )
49
- from prefect.utilities.asyncutils import asyncnullcontext
50
43
  from prefect.utilities.dispatch import get_registry_for_type, register_base_type
51
44
  from prefect.utilities.engine import propose_state
52
45
  from prefect.utilities.services import critical_service_loop
@@ -221,7 +214,7 @@ class BaseJobConfiguration(BaseModel):
221
214
  env = {
222
215
  **self._base_environment(),
223
216
  **self._base_flow_run_environment(flow_run),
224
- **self.env,
217
+ **(self.env if isinstance(self.env, dict) else {}),
225
218
  }
226
219
  self.env = {key: value for key, value in env.items() if value is not None}
227
220
  self.labels = {
@@ -865,42 +858,15 @@ class BaseWorker(abc.ABC):
865
858
  self, flow_run: "FlowRun", task_status: Optional[anyio.abc.TaskStatus] = None
866
859
  ) -> Union[BaseWorkerResult, Exception]:
867
860
  run_logger = self.get_flow_run_logger(flow_run)
868
- deployment = None
869
-
870
- if flow_run.deployment_id:
871
- deployment = await self._client.read_deployment(flow_run.deployment_id)
872
- if deployment and deployment.global_concurrency_limit:
873
- limit_name = deployment.global_concurrency_limit.name
874
- concurrency_ctx = concurrency
875
- else:
876
- limit_name = ""
877
- concurrency_ctx = asyncnullcontext
878
861
 
879
862
  try:
880
- async with concurrency_ctx(limit_name, max_retries=0, strict=True):
881
- configuration = await self._get_configuration(flow_run, deployment)
882
- submitted_event = self._emit_flow_run_submitted_event(configuration)
883
- result = await self.run(
884
- flow_run=flow_run,
885
- task_status=task_status,
886
- configuration=configuration,
887
- )
888
- except (
889
- AcquireConcurrencySlotTimeoutError,
890
- ConcurrencySlotAcquisitionError,
891
- ) as exc:
892
- self._logger.info(
893
- (
894
- "Deployment %s has reached its concurrency limit when submitting flow run %s"
895
- ),
896
- flow_run.deployment_id,
897
- flow_run.name,
863
+ configuration = await self._get_configuration(flow_run)
864
+ submitted_event = self._emit_flow_run_submitted_event(configuration)
865
+ result = await self.run(
866
+ flow_run=flow_run,
867
+ task_status=task_status,
868
+ configuration=configuration,
898
869
  )
899
- await self._propose_scheduled_state(flow_run)
900
-
901
- if not task_status._future.done():
902
- task_status.started(exc)
903
- return exc
904
870
  except Exception as exc:
905
871
  if not task_status._future.done():
906
872
  # This flow run was being submitted and did not start successfully
@@ -1026,21 +992,6 @@ class BaseWorker(abc.ABC):
1026
992
 
1027
993
  return True
1028
994
 
1029
- async def _propose_scheduled_state(self, flow_run: "FlowRun") -> None:
1030
- run_logger = self.get_flow_run_logger(flow_run)
1031
- try:
1032
- state = await propose_state(
1033
- self._client,
1034
- AwaitingConcurrencySlot(),
1035
- flow_run_id=flow_run.id,
1036
- )
1037
- self._logger.info(f"Flow run {flow_run.id} now has state {state.name}")
1038
- except Abort:
1039
- # Flow run already marked as failed
1040
- pass
1041
- except Exception:
1042
- run_logger.exception(f"Failed to update state of flow run '{flow_run.id}'")
1043
-
1044
995
  async def _propose_failed_state(self, flow_run: "FlowRun", exc: Exception) -> None:
1045
996
  run_logger = self.get_flow_run_logger(flow_run)
1046
997
  try:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: prefect-client
3
- Version: 3.0.3
3
+ Version: 3.0.4
4
4
  Summary: Workflow orchestration and management.
5
5
  Home-page: https://www.prefect.io
6
6
  Author: Prefect Technologies, Inc.
@@ -8,31 +8,31 @@ prefect/cache_policies.py,sha256=thYNj0CcJjM4TJQvXsLKTIQl7t0qjEnSWzxPWPONcRw,911
8
8
  prefect/context.py,sha256=J4GS70ZG_dkJ2v_dQWkdbuiN88iumFpoJhTu3hg7d60,21821
9
9
  prefect/engine.py,sha256=BpmDbe6miZcTl1vRkxfCPYcWSXADLigGPCagFwucMz0,1976
10
10
  prefect/exceptions.py,sha256=ondjUe0fIXXjhoFnqg8twqgLoPMR02HuQv5Az-kSG50,11348
11
- prefect/filesystems.py,sha256=7tqufyXIfEnMs2VE-hov3tJfBiELMhU9Dn9snmDh4B8,17304
11
+ prefect/filesystems.py,sha256=CxwMmKY8LBUed_9IqE2jUqxVCWhXa1r2fjKgLbIC2Vg,17893
12
12
  prefect/flow_engine.py,sha256=Z6xOO1ONAGwVNcvyvEIkJv_LB0VE5iBptV4ZWgTFqbc,30000
13
13
  prefect/flow_runs.py,sha256=EaXRIQTOnwnA0fO7_EjwafFRmS57K_CRy0Xsz3JDIhc,16070
14
- prefect/flows.py,sha256=1NisFNzfK2owGjNdXeYWuJBTqHx7AXIeWFF_t6I1rr8,89364
14
+ prefect/flows.py,sha256=zgTnzasA8a1PiOGRRSUB1P2kIz5kNgaKMkPEI81TXcQ,89588
15
15
  prefect/futures.py,sha256=1Uq-Q3ommCHSku_lsASuP1s3yFuYoL980fGcHdCFg30,16298
16
16
  prefect/main.py,sha256=IdtnJR5-IwP8EZsfhMFKj92ylMhNyau9X_eMcTP2ZjM,2336
17
17
  prefect/plugins.py,sha256=HY7Z7OJlltqzsUiPMEL1Y_hQbHw0CeZKayWiK-k8DP4,2435
18
18
  prefect/profiles.toml,sha256=kTvqDNMzjH3fsm5OEI-NKY4dMmipor5EvQXRB6rPEjY,522
19
19
  prefect/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
- prefect/results.py,sha256=Rq5WQtAvGfvSdOm604LTSEG7PAO3VGl85JTCHLyIqE0,45327
20
+ prefect/results.py,sha256=dHaS_sHHi9CcHk5DmwONey2OQnVJkj7cLqoYmHaUB58,47377
21
21
  prefect/serializers.py,sha256=Lo41EM0_qGzcfB_63390Izeo3DdK6cY6VZfxa9hpSGQ,8712
22
- prefect/settings.py,sha256=LCZEVO0cPzlDG7bR4cbUHVr_J715cRLZ87Pn22FgQcM,73286
22
+ prefect/settings.py,sha256=9T_JY0VIpnFly_BtDDihPT9ppukUQrOdUkqwEtJQa60,73501
23
23
  prefect/states.py,sha256=2lysq6X5AvqPfE3eD3D0HYt-KpFA2OUgA0c4ZQ22A_U,24906
24
24
  prefect/task_engine.py,sha256=rcCPPrX01CxiOPhnf_7WcN0wGHbmB5VV7_OG7PKYOrY,57943
25
25
  prefect/task_runners.py,sha256=W1n0yMwbDIqnvffFVJADo9MGEbLaYkzWk52rqgnkMY4,15019
26
26
  prefect/task_runs.py,sha256=jkaQOkRKOHS8fgHUijteriFpjMSKv4zldn1D8tZHkUI,8777
27
27
  prefect/task_worker.py,sha256=a8Uw78Ms4p3ikt_la50lENmPLIa-jjbuvunvjVXvRKQ,16785
28
28
  prefect/tasks.py,sha256=35eOv7VfhziiC3hL9FxB3spYtG6tpxZBLzk5KP_8Ux8,68371
29
- prefect/transactions.py,sha256=XnP6Jz7uXIyU3mV1QVWii_PdnnsxdJLV238MOCtYoFw,16500
29
+ prefect/transactions.py,sha256=oJKP4w5KjV1PSmN-ByyHN3bagSiKiTKvMVtLvAAfpAg,16387
30
30
  prefect/variables.py,sha256=023cfSj_ydwvz6lyChRKnjHFfkdoYZKK_zdTtuSxrYo,4665
31
31
  prefect/_internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
32
  prefect/_internal/_logging.py,sha256=HvNHY-8P469o5u4LYEDBTem69XZEt1QUeUaLToijpak,810
33
33
  prefect/_internal/integrations.py,sha256=U4cZMDbnilzZSKaMxvzZcSL27a1tzRMjDoTfr2ul_eY,231
34
34
  prefect/_internal/pytz.py,sha256=WWl9x16rKFWequGmcOGs_ljpCDPf2LDHMyZp_4D8e6c,13748
35
- prefect/_internal/retries.py,sha256=8uuagUX32w5YANLHqjM_1hHmVe9b1HxcwuPMXb1G2Qk,2317
35
+ prefect/_internal/retries.py,sha256=xtgj6oPSvYQLbyk451LR6swcRQvRVWEzCxY6GMK7qA4,2284
36
36
  prefect/_internal/compatibility/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
37
  prefect/_internal/compatibility/deprecated.py,sha256=PVME2C3Oe4_8tKIGufx1W4EpGkz5IQY8gFohPVOjNcM,7533
38
38
  prefect/_internal/compatibility/experimental.py,sha256=nrIeeAe1vZ0yMb1cPw5AroVR6_msx-bzTeBLzY4au6o,5634
@@ -58,7 +58,7 @@ prefect/_internal/schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJW
58
58
  prefect/_internal/schemas/bases.py,sha256=L8Lm93Cjfxv6QNu-RXjg59wm6oy97aGRb4niXiha2n4,4124
59
59
  prefect/_internal/schemas/fields.py,sha256=m4LrFNz8rA9uBhMk9VyQT6FIXmV_EVAW92hdXeSvHbY,837
60
60
  prefect/_internal/schemas/serializers.py,sha256=G_RGHfObjisUiRvd29p-zc6W4bwt5rE1OdR6TXNrRhQ,825
61
- prefect/_internal/schemas/validators.py,sha256=Y8bHb3EsLJTiHsffg_TPbknj0Nmln8vd6qySLFbfGzY,26546
61
+ prefect/_internal/schemas/validators.py,sha256=L_NyWhmO76DRTprxXla_FAo3QoGfdbM7uQsVe3gKp4g,26551
62
62
  prefect/blocks/__init__.py,sha256=BUfh6gIwA6HEjRyVCAiv0he3M1zfM-oY-JrlBfeWeY8,182
63
63
  prefect/blocks/abstract.py,sha256=YLzCaf3yXv6wFCF5ZqCIHJNwH7fME1rLxC-SijARHzk,16319
64
64
  prefect/blocks/core.py,sha256=l_56oggt9uJOABHus-NCXLQ4akeY4kzyDUO37ZyosX0,52783
@@ -69,16 +69,16 @@ prefect/blocks/system.py,sha256=OacB-LLXaNiLY49bPx7aAjmvdEdBxNoaOdzsCUcDr2c,4563
69
69
  prefect/blocks/webhook.py,sha256=F0u1WSO17Gda8qwr9gYaA84Nfc8Qkic6HhhJMYXRzug,2496
70
70
  prefect/client/__init__.py,sha256=fFtCXsGIsBCsAMFKlUPgRVUoIeqq_CsGtFE1knhbHlU,593
71
71
  prefect/client/base.py,sha256=2K8UiWzorZNNM4c8c-OiGeZ5i5ViUfZ_Q31oPobbOO0,24956
72
- prefect/client/cloud.py,sha256=SOqPXvXmFxAatubTyRQQe9i3DkAf4-mZZIpSO3Oy-hA,5819
72
+ prefect/client/cloud.py,sha256=Wjm27jUG1K8UHb8sIamOqyAGlM26Oe9_OFpCO6x5s2E,6191
73
73
  prefect/client/collections.py,sha256=u-96saqu0RALAazRI0YaZCJahnuafMppY21KN6ggx80,1059
74
74
  prefect/client/constants.py,sha256=Z_GG8KF70vbbXxpJuqW5pLnwzujTVeHbcYYRikNmGH0,29
75
75
  prefect/client/orchestration.py,sha256=XImn-8TKOYJ8LBAZ83FEC4DOf0RP6WE9BeLpDXfYX4A,149371
76
- prefect/client/subscriptions.py,sha256=J9uK9NGHO4VX4Y3NGgBJ4pIG_0cf-dJWPhF3f3PGYL4,3388
76
+ prefect/client/subscriptions.py,sha256=oqF2MJsgN3psJg-MePfvwMtEWjromfP9StWF00xc1eg,3403
77
77
  prefect/client/utilities.py,sha256=89fmza0cRMOayxgXRdO51TKb11TczJ0ByOZmcZVrt44,3286
78
78
  prefect/client/schemas/__init__.py,sha256=KlyqFV-hMulMkNstBn_0ijoHoIwJZaBj6B1r07UmgvE,607
79
79
  prefect/client/schemas/actions.py,sha256=GT1VlvwV5koV690H7ViGFH3tpW7_PvDf0QJoYTcOLDg,28862
80
80
  prefect/client/schemas/filters.py,sha256=oYUBj59SC6keYHaQ8-qFaVynEAcHV8BABrQaob2mI6c,35864
81
- prefect/client/schemas/objects.py,sha256=UFdNqcHknHstXoVBlu-pP78fxBD1YmJyh1VOfYBJPrk,55564
81
+ prefect/client/schemas/objects.py,sha256=zJGTbmFYiAORxJ3PB4EcRkOIVcQlxH4O91aOtjKBkyU,56149
82
82
  prefect/client/schemas/responses.py,sha256=tV06W8npA8oCjV9d0ZNvjro4QcbHxayb8PC4LmanXjo,15467
83
83
  prefect/client/schemas/schedules.py,sha256=8rpqjOYtknu2-1n5_WD4cOplgu93P3mCyX86B22LfL4,13070
84
84
  prefect/client/schemas/sorting.py,sha256=L-2Mx-igZPtsUoRUguTcG3nIEstMEMPD97NwPM2Ox5s,2579
@@ -97,7 +97,7 @@ prefect/concurrency/v1/events.py,sha256=PhW3iV5z-ez97LBHnte4joHMVPYaZJNRJkNXsZlb
97
97
  prefect/concurrency/v1/services.py,sha256=5IwRepJ4IMC0y-PmqXiDr5rR4wl3BuHbP6Tg6C3rrQg,4426
98
98
  prefect/concurrency/v1/sync.py,sha256=qKE0YzNbrmYooTwP7pz4m1BUz61THCUIF45_PE5IyYg,2375
99
99
  prefect/deployments/__init__.py,sha256=_wb7NxDKhq11z9MjYsPckmT3o6MRhGLRgCV9TmvYtew,1002
100
- prefect/deployments/base.py,sha256=rEMb-AXUuO66a7Qwq0KFUI1L0Xrl_-8z7cgAKaysfwg,16136
100
+ prefect/deployments/base.py,sha256=OyaKZ1Uk16XtvABh5byO6I3jp_1FYG301ryjDq00qJE,16688
101
101
  prefect/deployments/deployments.py,sha256=EvC9qBdvJRc8CHJqRjFTqtzx75SE8bpZOl5C-2eULyA,109
102
102
  prefect/deployments/flow_runs.py,sha256=tH6lpEkgHhQ5Ipr0bhVAjN6AeOoDwY7UKrkbJihJ6D0,6567
103
103
  prefect/deployments/runner.py,sha256=b7jD1DHL7y2jeBXgdBfSsnBMJPHShs4Tt1c5jAeG5Dk,41823
@@ -110,7 +110,7 @@ prefect/docker/__init__.py,sha256=jumlacz2HY9l1ee0L9_kE0PFi9NO3l3pWINm9T5N9hs,52
110
110
  prefect/docker/docker_image.py,sha256=Y84_ooCYA9NGl6FElJul9-FaW3teT-eia2SiNtZ1LG8,2999
111
111
  prefect/events/__init__.py,sha256=GtKl2bE--pJduTxelH2xy7SadlLJmmis8WR1EYixhuA,2094
112
112
  prefect/events/actions.py,sha256=A7jS8bo4zWGnrt3QfSoQs0uYC1xfKXio3IfU0XtTb5s,9129
113
- prefect/events/clients.py,sha256=ym5LM1M69Ar3yKhMrASyqbSYnGsrln6UQgqO-ITDYyY,22136
113
+ prefect/events/clients.py,sha256=fCR64VROlbMfVY5WL7Dy_1UroBKYrKNltll2sIiD8Ek,23028
114
114
  prefect/events/filters.py,sha256=IJ1TF-TCC7Wk2nJsbYW-HyAANToDQ6z1MdD63qE-lfw,8186
115
115
  prefect/events/related.py,sha256=TQPYIPJI_vZlZgZgq3YpsGCmFleiZCMDtn_jMrYBJRg,6537
116
116
  prefect/events/utilities.py,sha256=ajIAiNFTN5Bz57IEq-o-i1BJdUi7P2oYH_6GyQjCKs8,2635
@@ -150,7 +150,7 @@ prefect/records/filesystem.py,sha256=X-h7r5deiHH5IaaDk4ugOCmR5ZKnJeU2cLgp0AkMt0E
150
150
  prefect/records/memory.py,sha256=YdzQvEfb-CX0sKxAZK5TaNxVvAlyYlZse9qdoer6Xbk,6447
151
151
  prefect/records/result_store.py,sha256=3ZUFNHCCv_qBQhmIFdvlK_GMnPZcFacaI9dVdDKWdwA,2431
152
152
  prefect/runner/__init__.py,sha256=7U-vAOXFkzMfRz1q8Uv6Otsvc0OrPYLLP44srwkJ_8s,89
153
- prefect/runner/runner.py,sha256=P1r2X59rlGz7k5QNjKcvajs4-IfaA8fpu6Ag6u2Wpxk,49969
153
+ prefect/runner/runner.py,sha256=G9OfJRRGLaerUAF7Gt1WUwGsdiFIiLLs8t9CXDCiw48,48672
154
154
  prefect/runner/server.py,sha256=2o5vhrL7Zbn-HBStWhCjqqViex5Ye9GiQ1EW9RSEzdo,10500
155
155
  prefect/runner/storage.py,sha256=OsBa4nWdFxOTiAMNLFpexBdi5K3iuxidQx4YWZwditE,24734
156
156
  prefect/runner/submit.py,sha256=RuyDr-ved9wjYYarXiehY5oJVFf_HE3XKKACNWpxpPc,8131
@@ -192,14 +192,14 @@ prefect/utilities/schema_tools/__init__.py,sha256=KsFsTEHQqgp89TkDpjggkgBBywoHQP
192
192
  prefect/utilities/schema_tools/hydration.py,sha256=k12qVCdLLrK-mNo1hPCdhxM5f_N14Nj0vJdtiWYWffk,8858
193
193
  prefect/utilities/schema_tools/validation.py,sha256=2GCjxwApTFwzey40ul9OkcAXrU3r-kWK__9ucMo0qbk,9744
194
194
  prefect/workers/__init__.py,sha256=8dP8SLZbWYyC_l9DRTQSE3dEbDgns5DZDhxkp_NfsbQ,35
195
- prefect/workers/base.py,sha256=p3rZBZ5rmiAkpuR7GYK6O6Qn4emt-pqAKDeMCgEv9Ag,45880
195
+ prefect/workers/base.py,sha256=ALmFjBgTh0S490x6n2Xq674dk5Xm-_AsTdNYGISfhv0,44021
196
196
  prefect/workers/block.py,sha256=BOVVY5z-vUIQ2u8LwMTXDaNys2fjOZSS5YGDwJmTQjI,230
197
197
  prefect/workers/cloud.py,sha256=BOVVY5z-vUIQ2u8LwMTXDaNys2fjOZSS5YGDwJmTQjI,230
198
198
  prefect/workers/process.py,sha256=tcJ3fbiraLCfpVGpv8dOHwMSfVzeD_kyguUOvPuIz6I,19796
199
199
  prefect/workers/server.py,sha256=lgh2FfSuaNU7b6HPxSFm8JtKvAvHsZGkiOo4y4tW1Cw,2022
200
200
  prefect/workers/utilities.py,sha256=VfPfAlGtTuDj0-Kb8WlMgAuOfgXCdrGAnKMapPSBrwc,2483
201
- prefect_client-3.0.3.dist-info/LICENSE,sha256=MCxsn8osAkzfxKC4CC_dLcUkU8DZLkyihZ8mGs3Ah3Q,11357
202
- prefect_client-3.0.3.dist-info/METADATA,sha256=WJVB5YHCoG2EQzn1y8HlCSgGcy2gtWZtNHChFI7CBE4,7332
203
- prefect_client-3.0.3.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
204
- prefect_client-3.0.3.dist-info/top_level.txt,sha256=MJZYJgFdbRc2woQCeB4vM6T33tr01TmkEhRcns6H_H4,8
205
- prefect_client-3.0.3.dist-info/RECORD,,
201
+ prefect_client-3.0.4.dist-info/LICENSE,sha256=MCxsn8osAkzfxKC4CC_dLcUkU8DZLkyihZ8mGs3Ah3Q,11357
202
+ prefect_client-3.0.4.dist-info/METADATA,sha256=ylM5VSYd7hcLhlSEVyvOk4Pmp5LuAfj7Fae0wdGUB0o,7332
203
+ prefect_client-3.0.4.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
204
+ prefect_client-3.0.4.dist-info/top_level.txt,sha256=MJZYJgFdbRc2woQCeB4vM6T33tr01TmkEhRcns6H_H4,8
205
+ prefect_client-3.0.4.dist-info/RECORD,,