prefect-client 3.0.0rc10__py3-none-any.whl → 3.0.0rc12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. prefect/_internal/concurrency/api.py +1 -1
  2. prefect/_internal/concurrency/services.py +9 -0
  3. prefect/_internal/retries.py +61 -0
  4. prefect/artifacts.py +12 -0
  5. prefect/client/cloud.py +1 -1
  6. prefect/client/schemas/actions.py +4 -0
  7. prefect/client/schemas/objects.py +1 -1
  8. prefect/concurrency/asyncio.py +3 -3
  9. prefect/concurrency/events.py +1 -1
  10. prefect/concurrency/services.py +3 -2
  11. prefect/concurrency/sync.py +19 -5
  12. prefect/context.py +8 -2
  13. prefect/deployments/__init__.py +28 -15
  14. prefect/deployments/steps/pull.py +7 -0
  15. prefect/events/schemas/events.py +10 -0
  16. prefect/flow_engine.py +10 -9
  17. prefect/flows.py +194 -68
  18. prefect/futures.py +53 -7
  19. prefect/logging/loggers.py +1 -1
  20. prefect/results.py +1 -46
  21. prefect/runner/runner.py +96 -23
  22. prefect/runner/server.py +20 -22
  23. prefect/runner/submit.py +0 -8
  24. prefect/runtime/flow_run.py +38 -3
  25. prefect/settings.py +9 -30
  26. prefect/task_engine.py +158 -48
  27. prefect/task_worker.py +1 -1
  28. prefect/tasks.py +164 -17
  29. prefect/transactions.py +2 -15
  30. prefect/utilities/asyncutils.py +13 -9
  31. prefect/utilities/engine.py +34 -1
  32. prefect/workers/base.py +98 -208
  33. prefect/workers/process.py +262 -4
  34. prefect/workers/server.py +27 -9
  35. {prefect_client-3.0.0rc10.dist-info → prefect_client-3.0.0rc12.dist-info}/METADATA +4 -4
  36. {prefect_client-3.0.0rc10.dist-info → prefect_client-3.0.0rc12.dist-info}/RECORD +39 -38
  37. {prefect_client-3.0.0rc10.dist-info → prefect_client-3.0.0rc12.dist-info}/LICENSE +0 -0
  38. {prefect_client-3.0.0rc10.dist-info → prefect_client-3.0.0rc12.dist-info}/WHEEL +0 -0
  39. {prefect_client-3.0.0rc10.dist-info → prefect_client-3.0.0rc12.dist-info}/top_level.txt +0 -0
prefect/task_engine.py CHANGED
@@ -5,6 +5,7 @@ import time
5
5
  from asyncio import CancelledError
6
6
  from contextlib import ExitStack, contextmanager
7
7
  from dataclasses import dataclass, field
8
+ from functools import wraps
8
9
  from textwrap import dedent
9
10
  from typing import (
10
11
  Any,
@@ -53,6 +54,7 @@ from prefect.records.result_store import ResultFactoryStore
53
54
  from prefect.results import BaseResult, ResultFactory, _format_user_supplied_storage_key
54
55
  from prefect.settings import (
55
56
  PREFECT_DEBUG_MODE,
57
+ PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION,
56
58
  PREFECT_TASKS_REFRESH_CACHE,
57
59
  )
58
60
  from prefect.states import (
@@ -124,8 +126,7 @@ class TaskRunEngine(Generic[P, R]):
124
126
  raise ValueError("Task run is not set")
125
127
  return self.task_run.state
126
128
 
127
- @property
128
- def can_retry(self) -> bool:
129
+ def can_retry(self, exc: Exception) -> bool:
129
130
  retry_condition: Optional[
130
131
  Callable[[Task[P, Coroutine[Any, Any, R]], TaskRun, State], bool]
131
132
  ] = self.task.retry_condition_fn
@@ -136,9 +137,19 @@ class TaskRunEngine(Generic[P, R]):
136
137
  f"Running `retry_condition_fn` check {retry_condition!r} for task"
137
138
  f" {self.task.name!r}"
138
139
  )
139
- return not retry_condition or retry_condition(
140
- self.task, self.task_run, self.state
140
+ state = Failed(
141
+ data=exc,
142
+ message=f"Task run encountered unexpected exception: {repr(exc)}",
141
143
  )
144
+ if inspect.iscoroutinefunction(retry_condition):
145
+ should_retry = run_coro_as_sync(
146
+ retry_condition(self.task, self.task_run, state)
147
+ )
148
+ elif inspect.isfunction(retry_condition):
149
+ should_retry = retry_condition(self.task, self.task_run, state)
150
+ else:
151
+ should_retry = not retry_condition
152
+ return should_retry
142
153
  except Exception:
143
154
  self.logger.error(
144
155
  (
@@ -269,6 +280,17 @@ class TaskRunEngine(Generic[P, R]):
269
280
  return
270
281
 
271
282
  new_state = Running()
283
+
284
+ if PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION:
285
+ self.task_run.start_time = new_state.timestamp
286
+ self.task_run.run_count += 1
287
+
288
+ flow_run_context = FlowRunContext.get()
289
+ if flow_run_context:
290
+ # Carry forward any task run information from the flow run
291
+ flow_run = flow_run_context.flow_run
292
+ self.task_run.flow_run_run_count = flow_run.run_count
293
+
272
294
  state = self.set_state(new_state)
273
295
 
274
296
  # TODO: this is temporary until the API stops rejecting state transitions
@@ -298,24 +320,37 @@ class TaskRunEngine(Generic[P, R]):
298
320
  last_state = self.state
299
321
  if not self.task_run:
300
322
  raise ValueError("Task run is not set")
301
- try:
302
- new_state = propose_state_sync(
303
- self.client, state, task_run_id=self.task_run.id, force=force
304
- )
305
- except Pause as exc:
306
- # We shouldn't get a pause signal without a state, but if this happens,
307
- # just use a Paused state to assume an in-process pause.
308
- new_state = exc.state if exc.state else Paused()
309
- if new_state.state_details.pause_reschedule:
310
- # If we're being asked to pause and reschedule, we should exit the
311
- # task and expect to be resumed later.
312
- raise
313
323
 
314
- # currently this is a hack to keep a reference to the state object
315
- # that has an in-memory result attached to it; using the API state
324
+ if PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION:
325
+ self.task_run.state = new_state = state
326
+
327
+ # Ensure that the state_details are populated with the current run IDs
328
+ new_state.state_details.task_run_id = self.task_run.id
329
+ new_state.state_details.flow_run_id = self.task_run.flow_run_id
330
+
331
+ # Predictively update the de-normalized task_run.state_* attributes
332
+ self.task_run.state_id = new_state.id
333
+ self.task_run.state_type = new_state.type
334
+ self.task_run.state_name = new_state.name
335
+ else:
336
+ try:
337
+ new_state = propose_state_sync(
338
+ self.client, state, task_run_id=self.task_run.id, force=force
339
+ )
340
+ except Pause as exc:
341
+ # We shouldn't get a pause signal without a state, but if this happens,
342
+ # just use a Paused state to assume an in-process pause.
343
+ new_state = exc.state if exc.state else Paused()
344
+ if new_state.state_details.pause_reschedule:
345
+ # If we're being asked to pause and reschedule, we should exit the
346
+ # task and expect to be resumed later.
347
+ raise
348
+
349
+ # currently this is a hack to keep a reference to the state object
350
+ # that has an in-memory result attached to it; using the API state
351
+ # could result in losing that reference
352
+ self.task_run.state = new_state
316
353
 
317
- # could result in losing that reference
318
- self.task_run.state = new_state
319
354
  # emit a state change event
320
355
  self._last_event = emit_task_run_state_change_event(
321
356
  task_run=self.task_run,
@@ -323,6 +358,7 @@ class TaskRunEngine(Generic[P, R]):
323
358
  validated_state=self.task_run.state,
324
359
  follows=self._last_event,
325
360
  )
361
+
326
362
  return new_state
327
363
 
328
364
  def result(self, raise_on_failure: bool = True) -> "Union[R, State, None]":
@@ -367,11 +403,19 @@ class TaskRunEngine(Generic[P, R]):
367
403
  )
368
404
  transaction.stage(
369
405
  terminal_state.data,
370
- on_rollback_hooks=self.task.on_rollback_hooks,
371
- on_commit_hooks=self.task.on_commit_hooks,
406
+ on_rollback_hooks=[
407
+ _with_transaction_hook_logging(hook, "rollback", self.logger)
408
+ for hook in self.task.on_rollback_hooks
409
+ ],
410
+ on_commit_hooks=[
411
+ _with_transaction_hook_logging(hook, "commit", self.logger)
412
+ for hook in self.task.on_commit_hooks
413
+ ],
372
414
  )
373
415
  if transaction.is_committed():
374
416
  terminal_state.name = "Cached"
417
+
418
+ self.record_terminal_state_timing(terminal_state)
375
419
  self.set_state(terminal_state)
376
420
  self._return_value = result
377
421
  return result
@@ -383,7 +427,7 @@ class TaskRunEngine(Generic[P, R]):
383
427
  - If the task has a retry delay, place in AwaitingRetry state with a delayed scheduled time.
384
428
  - If the task has no retries left, or the retry condition is not met, return False.
385
429
  """
386
- if self.retries < self.task.retries and self.can_retry:
430
+ if self.retries < self.task.retries and self.can_retry(exc):
387
431
  if self.task.retry_delay_seconds:
388
432
  delay = (
389
433
  self.task.retry_delay_seconds[
@@ -398,6 +442,8 @@ class TaskRunEngine(Generic[P, R]):
398
442
  else:
399
443
  delay = None
400
444
  new_state = Retrying()
445
+ if PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION:
446
+ self.task_run.run_count += 1
401
447
 
402
448
  self.logger.info(
403
449
  "Task run failed with exception: %r - " "Retry %s/%s will start %s",
@@ -432,6 +478,7 @@ class TaskRunEngine(Generic[P, R]):
432
478
  result_factory=getattr(context, "result_factory", None),
433
479
  )
434
480
  )
481
+ self.record_terminal_state_timing(state)
435
482
  self.set_state(state)
436
483
  self._raised = exc
437
484
 
@@ -454,9 +501,20 @@ class TaskRunEngine(Generic[P, R]):
454
501
  state = run_coro_as_sync(exception_to_crashed_state(exc))
455
502
  self.logger.error(f"Crash detected! {state.message}")
456
503
  self.logger.debug("Crash details:", exc_info=exc)
504
+ self.record_terminal_state_timing(state)
457
505
  self.set_state(state, force=True)
458
506
  self._raised = exc
459
507
 
508
+ def record_terminal_state_timing(self, state: State) -> None:
509
+ if PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION:
510
+ if self.task_run.start_time and not self.task_run.end_time:
511
+ self.task_run.end_time = state.timestamp
512
+
513
+ if self.task_run.state.is_running():
514
+ self.task_run.total_run_time += (
515
+ state.timestamp - self.task_run.state.timestamp
516
+ )
517
+
460
518
  @contextmanager
461
519
  def setup_run_context(self, client: Optional[SyncPrefectClient] = None):
462
520
  from prefect.utilities.engine import (
@@ -469,7 +527,8 @@ class TaskRunEngine(Generic[P, R]):
469
527
  if not self.task_run:
470
528
  raise ValueError("Task run is not set")
471
529
 
472
- self.task_run = client.read_task_run(self.task_run.id)
530
+ if not PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION:
531
+ self.task_run = client.read_task_run(self.task_run.id)
473
532
  with ExitStack() as stack:
474
533
  if log_prints := should_log_prints(self.task):
475
534
  stack.enter_context(patch_print())
@@ -483,23 +542,24 @@ class TaskRunEngine(Generic[P, R]):
483
542
  client=client,
484
543
  )
485
544
  )
486
- # set the logger to the task run logger
545
+
487
546
  self.logger = task_run_logger(task_run=self.task_run, task=self.task) # type: ignore
488
547
 
489
- # update the task run name if necessary
490
- if not self._task_name_set and self.task.task_run_name:
491
- task_run_name = _resolve_custom_task_run_name(
492
- task=self.task, parameters=self.parameters
493
- )
494
- self.client.set_task_run_name(
495
- task_run_id=self.task_run.id, name=task_run_name
496
- )
497
- self.logger.extra["task_run_name"] = task_run_name
498
- self.logger.debug(
499
- f"Renamed task run {self.task_run.name!r} to {task_run_name!r}"
500
- )
501
- self.task_run.name = task_run_name
502
- self._task_name_set = True
548
+ if not PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION:
549
+ # update the task run name if necessary
550
+ if not self._task_name_set and self.task.task_run_name:
551
+ task_run_name = _resolve_custom_task_run_name(
552
+ task=self.task, parameters=self.parameters
553
+ )
554
+ self.client.set_task_run_name(
555
+ task_run_id=self.task_run.id, name=task_run_name
556
+ )
557
+ self.logger.extra["task_run_name"] = task_run_name
558
+ self.logger.debug(
559
+ f"Renamed task run {self.task_run.name!r} to {task_run_name!r}"
560
+ )
561
+ self.task_run.name = task_run_name
562
+ self._task_name_set = True
503
563
  yield
504
564
 
505
565
  @contextmanager
@@ -511,22 +571,47 @@ class TaskRunEngine(Generic[P, R]):
511
571
  """
512
572
  Enters a client context and creates a task run if needed.
513
573
  """
574
+
514
575
  with hydrated_context(self.context):
515
576
  with ClientContext.get_or_create() as client_ctx:
516
577
  self._client = client_ctx.sync_client
517
578
  self._is_started = True
518
579
  try:
519
580
  if not self.task_run:
520
- self.task_run = run_coro_as_sync(
521
- self.task.create_run(
522
- id=task_run_id,
523
- parameters=self.parameters,
524
- flow_run_context=FlowRunContext.get(),
525
- parent_task_run_context=TaskRunContext.get(),
526
- wait_for=self.wait_for,
527
- extra_task_inputs=dependencies,
581
+ if PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION:
582
+ # TODO - this maybe should be a method on Task?
583
+ from prefect.utilities.engine import (
584
+ _resolve_custom_task_run_name,
585
+ )
586
+
587
+ task_run_name = None
588
+ if not self._task_name_set and self.task.task_run_name:
589
+ task_run_name = _resolve_custom_task_run_name(
590
+ task=self.task, parameters=self.parameters
591
+ )
592
+
593
+ self.task_run = run_coro_as_sync(
594
+ self.task.create_local_run(
595
+ id=task_run_id,
596
+ parameters=self.parameters,
597
+ flow_run_context=FlowRunContext.get(),
598
+ parent_task_run_context=TaskRunContext.get(),
599
+ wait_for=self.wait_for,
600
+ extra_task_inputs=dependencies,
601
+ task_run_name=task_run_name,
602
+ )
603
+ )
604
+ else:
605
+ self.task_run = run_coro_as_sync(
606
+ self.task.create_run(
607
+ id=task_run_id,
608
+ parameters=self.parameters,
609
+ flow_run_context=FlowRunContext.get(),
610
+ parent_task_run_context=TaskRunContext.get(),
611
+ wait_for=self.wait_for,
612
+ extra_task_inputs=dependencies,
613
+ )
528
614
  )
529
- )
530
615
  # Emit an event to capture that the task run was in the `PENDING` state.
531
616
  self._last_event = emit_task_run_state_change_event(
532
617
  task_run=self.task_run,
@@ -916,3 +1001,28 @@ def run_task(
916
1001
  return run_task_async(**kwargs)
917
1002
  else:
918
1003
  return run_task_sync(**kwargs)
1004
+
1005
+
1006
+ def _with_transaction_hook_logging(
1007
+ hook: Callable[[Transaction], None],
1008
+ hook_type: Literal["rollback", "commit"],
1009
+ logger: logging.Logger,
1010
+ ) -> Callable[[Transaction], None]:
1011
+ @wraps(hook)
1012
+ def _hook(txn: Transaction) -> None:
1013
+ hook_name = _get_hook_name(hook)
1014
+ logger.info(f"Running {hook_type} hook {hook_name!r}")
1015
+
1016
+ try:
1017
+ hook(txn)
1018
+ except Exception as exc:
1019
+ logger.error(
1020
+ f"An error was encountered while running {hook_type} hook {hook_name!r}",
1021
+ )
1022
+ raise exc
1023
+ else:
1024
+ logger.info(
1025
+ f"{hook_type.capitalize()} hook {hook_name!r} finished running successfully"
1026
+ )
1027
+
1028
+ return _hook
prefect/task_worker.py CHANGED
@@ -325,7 +325,7 @@ class TaskWorker:
325
325
 
326
326
  if task_run_url := url_for(task_run):
327
327
  logger.info(
328
- f"Submitting task run {task_run.name!r} to engine. View run in the UI at {task_run_url!r}"
328
+ f"Submitting task run {task_run.name!r} to engine. View in the UI: {task_run_url}"
329
329
  )
330
330
 
331
331
  if task.isasync:
prefect/tasks.py CHANGED
@@ -6,7 +6,6 @@ Module containing the base workflow task class and decorator - for most use case
6
6
 
7
7
  import datetime
8
8
  import inspect
9
- import os
10
9
  from copy import copy
11
10
  from functools import partial, update_wrapper
12
11
  from typing import (
@@ -33,13 +32,19 @@ from uuid import UUID, uuid4
33
32
 
34
33
  from typing_extensions import Literal, ParamSpec
35
34
 
35
+ import prefect.states
36
36
  from prefect._internal.compatibility.deprecated import (
37
37
  deprecated_async_method,
38
38
  )
39
39
  from prefect.cache_policies import DEFAULT, NONE, CachePolicy
40
40
  from prefect.client.orchestration import get_client
41
41
  from prefect.client.schemas import TaskRun
42
- from prefect.client.schemas.objects import TaskRunInput, TaskRunResult
42
+ from prefect.client.schemas.objects import (
43
+ StateDetails,
44
+ TaskRunInput,
45
+ TaskRunPolicy,
46
+ TaskRunResult,
47
+ )
43
48
  from prefect.context import (
44
49
  FlowRunContext,
45
50
  TagsContext,
@@ -50,6 +55,7 @@ from prefect.futures import PrefectDistributedFuture, PrefectFuture, PrefectFutu
50
55
  from prefect.logging.loggers import get_logger
51
56
  from prefect.results import ResultFactory, ResultSerializer, ResultStorage
52
57
  from prefect.settings import (
58
+ PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION,
53
59
  PREFECT_TASK_DEFAULT_RETRIES,
54
60
  PREFECT_TASK_DEFAULT_RETRY_DELAY_SECONDS,
55
61
  )
@@ -181,6 +187,31 @@ def _infer_parent_task_runs(
181
187
  return parents
182
188
 
183
189
 
190
+ def _generate_task_key(fn: Callable[..., Any]) -> str:
191
+ """Generate a task key based on the function name and source code.
192
+
193
+ We may eventually want some sort of top-level namespace here to
194
+ disambiguate tasks with the same function name in different modules,
195
+ in a more human-readable way, while avoiding relative import problems (see #12337).
196
+
197
+ As long as the task implementations are unique (even if named the same), we should
198
+ not have any collisions.
199
+
200
+ Args:
201
+ fn: The function to generate a task key for.
202
+ """
203
+ if not hasattr(fn, "__qualname__"):
204
+ return to_qualified_name(type(fn))
205
+
206
+ qualname = fn.__qualname__.split(".")[-1]
207
+
208
+ code_hash = (
209
+ h[:NUM_CHARS_DYNAMIC_KEY] if (h := hash_objects(fn.__code__)) else "unknown"
210
+ )
211
+
212
+ return f"{qualname}-{code_hash}"
213
+
214
+
184
215
  class Task(Generic[P, R]):
185
216
  """
186
217
  A Prefect task definition.
@@ -263,7 +294,7 @@ class Task(Generic[P, R]):
263
294
  description: Optional[str] = None,
264
295
  tags: Optional[Iterable[str]] = None,
265
296
  version: Optional[str] = None,
266
- cache_policy: Optional[CachePolicy] = NotSet,
297
+ cache_policy: Union[CachePolicy, Type[NotSet]] = NotSet,
267
298
  cache_key_fn: Optional[
268
299
  Callable[["TaskRunContext", Dict[str, Any]], Optional[str]]
269
300
  ] = None,
@@ -362,17 +393,7 @@ class Task(Generic[P, R]):
362
393
 
363
394
  self.tags = set(tags if tags else [])
364
395
 
365
- if not hasattr(self.fn, "__qualname__"):
366
- self.task_key = to_qualified_name(type(self.fn))
367
- else:
368
- try:
369
- task_origin_hash = hash_objects(
370
- self.name, os.path.abspath(inspect.getsourcefile(self.fn))
371
- )
372
- except TypeError:
373
- task_origin_hash = "unknown-source-file"
374
-
375
- self.task_key = f"{self.fn.__qualname__}-{task_origin_hash}"
396
+ self.task_key = _generate_task_key(self.fn)
376
397
 
377
398
  if cache_policy is not NotSet and cache_key_fn is not None:
378
399
  logger.warning(
@@ -786,6 +807,130 @@ class Task(Generic[P, R]):
786
807
 
787
808
  return task_run
788
809
 
810
+ async def create_local_run(
811
+ self,
812
+ client: Optional["PrefectClient"] = None,
813
+ id: Optional[UUID] = None,
814
+ parameters: Optional[Dict[str, Any]] = None,
815
+ flow_run_context: Optional[FlowRunContext] = None,
816
+ parent_task_run_context: Optional[TaskRunContext] = None,
817
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
818
+ extra_task_inputs: Optional[Dict[str, Set[TaskRunInput]]] = None,
819
+ deferred: bool = False,
820
+ task_run_name: Optional[str] = None,
821
+ ) -> TaskRun:
822
+ if not PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION:
823
+ raise RuntimeError(
824
+ "Cannot call `Task.create_local_run` unless "
825
+ "PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION is True"
826
+ )
827
+
828
+ from prefect.utilities.engine import (
829
+ _dynamic_key_for_task_run,
830
+ collect_task_run_inputs_sync,
831
+ )
832
+
833
+ if flow_run_context is None:
834
+ flow_run_context = FlowRunContext.get()
835
+ if parent_task_run_context is None:
836
+ parent_task_run_context = TaskRunContext.get()
837
+ if parameters is None:
838
+ parameters = {}
839
+ if client is None:
840
+ client = get_client()
841
+
842
+ async with client:
843
+ if not flow_run_context:
844
+ dynamic_key = f"{self.task_key}-{str(uuid4().hex)}"
845
+ task_run_name = task_run_name or self.name
846
+ else:
847
+ dynamic_key = _dynamic_key_for_task_run(
848
+ context=flow_run_context, task=self
849
+ )
850
+ task_run_name = task_run_name or f"{self.name}-{dynamic_key}"
851
+
852
+ if deferred:
853
+ state = Scheduled()
854
+ state.state_details.deferred = True
855
+ else:
856
+ state = Pending()
857
+
858
+ # store parameters for background tasks so that task worker
859
+ # can retrieve them at runtime
860
+ if deferred and (parameters or wait_for):
861
+ parameters_id = uuid4()
862
+ state.state_details.task_parameters_id = parameters_id
863
+
864
+ # TODO: Improve use of result storage for parameter storage / reference
865
+ self.persist_result = True
866
+
867
+ factory = await ResultFactory.from_autonomous_task(self, client=client)
868
+ context = serialize_context()
869
+ data: Dict[str, Any] = {"context": context}
870
+ if parameters:
871
+ data["parameters"] = parameters
872
+ if wait_for:
873
+ data["wait_for"] = wait_for
874
+ await factory.store_parameters(parameters_id, data)
875
+
876
+ # collect task inputs
877
+ task_inputs = {
878
+ k: collect_task_run_inputs_sync(v) for k, v in parameters.items()
879
+ }
880
+
881
+ # collect all parent dependencies
882
+ if task_parents := _infer_parent_task_runs(
883
+ flow_run_context=flow_run_context,
884
+ task_run_context=parent_task_run_context,
885
+ parameters=parameters,
886
+ ):
887
+ task_inputs["__parents__"] = task_parents
888
+
889
+ # check wait for dependencies
890
+ if wait_for:
891
+ task_inputs["wait_for"] = collect_task_run_inputs_sync(wait_for)
892
+
893
+ # Join extra task inputs
894
+ for k, extras in (extra_task_inputs or {}).items():
895
+ task_inputs[k] = task_inputs[k].union(extras)
896
+
897
+ flow_run_id = (
898
+ getattr(flow_run_context.flow_run, "id", None)
899
+ if flow_run_context and flow_run_context.flow_run
900
+ else None
901
+ )
902
+ task_run_id = id or uuid4()
903
+ state = prefect.states.Pending(
904
+ state_details=StateDetails(
905
+ task_run_id=task_run_id,
906
+ flow_run_id=flow_run_id,
907
+ )
908
+ )
909
+ task_run = TaskRun(
910
+ id=task_run_id,
911
+ name=task_run_name,
912
+ flow_run_id=flow_run_id,
913
+ task_key=self.task_key,
914
+ dynamic_key=str(dynamic_key),
915
+ task_version=self.version,
916
+ empirical_policy=TaskRunPolicy(
917
+ retries=self.retries,
918
+ retry_delay=self.retry_delay_seconds,
919
+ retry_jitter_factor=self.retry_jitter_factor,
920
+ ),
921
+ tags=list(set(self.tags).union(TagsContext.get().current_tags or [])),
922
+ task_inputs=task_inputs or {},
923
+ expected_start_time=state.timestamp,
924
+ state_id=state.id,
925
+ state_type=state.type,
926
+ state_name=state.name,
927
+ state=state,
928
+ created=state.timestamp,
929
+ updated=state.timestamp,
930
+ )
931
+
932
+ return task_run
933
+
789
934
  @overload
790
935
  def __call__(
791
936
  self: "Task[P, NoReturn]",
@@ -1365,7 +1510,7 @@ class Task(Generic[P, R]):
1365
1510
 
1366
1511
  Args:
1367
1512
  task_runner: The task runner to use for serving the task. If not provided,
1368
- the default ConcurrentTaskRunner will be used.
1513
+ the default task runner will be used.
1369
1514
 
1370
1515
  Examples:
1371
1516
  Serve a task using the default task runner
@@ -1392,7 +1537,7 @@ def task(
1392
1537
  description: Optional[str] = None,
1393
1538
  tags: Optional[Iterable[str]] = None,
1394
1539
  version: Optional[str] = None,
1395
- cache_policy: CachePolicy = NotSet,
1540
+ cache_policy: Union[CachePolicy, Type[NotSet]] = NotSet,
1396
1541
  cache_key_fn: Optional[
1397
1542
  Callable[["TaskRunContext", Dict[str, Any]], Optional[str]]
1398
1543
  ] = None,
@@ -1430,7 +1575,9 @@ def task(
1430
1575
  tags: Optional[Iterable[str]] = None,
1431
1576
  version: Optional[str] = None,
1432
1577
  cache_policy: Union[CachePolicy, Type[NotSet]] = NotSet,
1433
- cache_key_fn: Callable[["TaskRunContext", Dict[str, Any]], Optional[str]] = None,
1578
+ cache_key_fn: Union[
1579
+ Callable[["TaskRunContext", Dict[str, Any]], Optional[str]], None
1580
+ ] = None,
1434
1581
  cache_expiration: Optional[datetime.timedelta] = None,
1435
1582
  task_run_name: Optional[Union[Callable[[], str], str]] = None,
1436
1583
  retries: Optional[int] = None,
prefect/transactions.py CHANGED
@@ -26,7 +26,6 @@ from prefect.results import (
26
26
  )
27
27
  from prefect.utilities.asyncutils import run_coro_as_sync
28
28
  from prefect.utilities.collections import AutoEnum
29
- from prefect.utilities.engine import _get_hook_name
30
29
 
31
30
 
32
31
  class IsolationLevel(AutoEnum):
@@ -180,13 +179,10 @@ class Transaction(ContextModel):
180
179
  return False
181
180
 
182
181
  try:
183
- hook_name = None
184
-
185
182
  for child in self.children:
186
183
  child.commit()
187
184
 
188
185
  for hook in self.on_commit_hooks:
189
- hook_name = _get_hook_name(hook)
190
186
  hook(self)
191
187
 
192
188
  if self.store and self.key:
@@ -195,16 +191,8 @@ class Transaction(ContextModel):
195
191
  return True
196
192
  except Exception:
197
193
  if self.logger:
198
- if hook_name:
199
- msg = (
200
- f"An error was encountered while running commit hook {hook_name!r}",
201
- )
202
- else:
203
- msg = (
204
- f"An error was encountered while committing transaction {self.key!r}",
205
- )
206
194
  self.logger.exception(
207
- msg,
195
+ f"An error was encountered while committing transaction {self.key!r}",
208
196
  exc_info=True,
209
197
  )
210
198
  self.rollback()
@@ -234,7 +222,6 @@ class Transaction(ContextModel):
234
222
 
235
223
  try:
236
224
  for hook in reversed(self.on_rollback_hooks):
237
- hook_name = _get_hook_name(hook)
238
225
  hook(self)
239
226
 
240
227
  self.state = TransactionState.ROLLED_BACK
@@ -246,7 +233,7 @@ class Transaction(ContextModel):
246
233
  except Exception:
247
234
  if self.logger:
248
235
  self.logger.exception(
249
- f"An error was encountered while running rollback hook {hook_name!r}",
236
+ f"An error was encountered while rolling back transaction {self.key!r}",
250
237
  exc_info=True,
251
238
  )
252
239
  return False