prefect 3.6.7__py3-none-any.whl → 3.6.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
prefect/_build_info.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # Generated by versioningit
2
- __version__ = "3.6.7"
3
- __build_date__ = "2025-12-18 19:54:23.981675+00:00"
4
- __git_commit__ = "ebfef643e5d55b20a7b0b6a80e9756987ac415cb"
2
+ __version__ = "3.6.8"
3
+ __build_date__ = "2025-12-24 19:25:13.006611+00:00"
4
+ __git_commit__ = "8ae19ff770f3b9095c478314f845b0d699ba852f"
5
5
  __dirty__ = False
@@ -844,11 +844,12 @@ class CustomWebhookNotificationBlock(NotificationBlock):
844
844
  ' "{{tokenFromSecrets}}"}'
845
845
  ],
846
846
  )
847
- form_data: Optional[dict[str, str]] = Field(
847
+ form_data: Optional[dict[str, str] | str] = Field(
848
848
  default=None,
849
849
  title="Form Data",
850
850
  description=(
851
- "Send form data as payload. Should not be used together with _JSON Data_."
851
+ "Send form data as payload. Should not be used together with _JSON Data_. "
852
+ "Can be a dictionary for form-encoded data or a string for raw body content."
852
853
  ),
853
854
  examples=[
854
855
  '{"text": "{{subject}}\\n{{body}}", "title": "{{name}}", "token":'
@@ -882,13 +883,18 @@ class CustomWebhookNotificationBlock(NotificationBlock):
882
883
  "name": self.name,
883
884
  }
884
885
  )
886
+ # httpx uses 'data' for form-encoded dicts, 'content' for raw string/bytes
887
+ if isinstance(self.form_data, str):
888
+ data_key = "content"
889
+ else:
890
+ data_key = "data"
885
891
  # do substution
886
892
  return apply_values(
887
893
  {
888
894
  "method": self.method,
889
895
  "url": self.url,
890
896
  "params": self.params,
891
- "data": self.form_data,
897
+ data_key: self.form_data,
892
898
  "json": self.json_data,
893
899
  "headers": self.headers,
894
900
  "cookies": self.cookies,
prefect/cache_policies.py CHANGED
@@ -1,6 +1,7 @@
1
1
  import inspect
2
2
  from copy import deepcopy
3
3
  from dataclasses import dataclass, field
4
+ from logging import Logger
4
5
  from pathlib import Path
5
6
  from typing import (
6
7
  TYPE_CHECKING,
@@ -16,6 +17,7 @@ from typing_extensions import Self
16
17
 
17
18
  from prefect.context import TaskRunContext
18
19
  from prefect.exceptions import HashError
20
+ from prefect.logging import get_logger
19
21
  from prefect.utilities.hashing import hash_objects
20
22
 
21
23
  if TYPE_CHECKING:
@@ -25,6 +27,8 @@ if TYPE_CHECKING:
25
27
 
26
28
  STABLE_TRANSFORMS: dict[type, Callable[[Any], Any]] = {}
27
29
 
30
+ logger: Logger = get_logger(__name__)
31
+
28
32
 
29
33
  def _register_stable_transforms() -> None:
30
34
  """
@@ -291,6 +295,13 @@ class TaskSource(CachePolicy):
291
295
  ) -> Optional[str]:
292
296
  if not task_ctx:
293
297
  return None
298
+
299
+ # Use stored source code if available (works after cloudpickle serialization)
300
+ lines = getattr(task_ctx.task, "source_code", None)
301
+ if lines is not None:
302
+ return hash_objects(lines, raise_on_failure=True)
303
+
304
+ # Fall back to inspect.getsource for local execution
294
305
  try:
295
306
  lines = inspect.getsource(task_ctx.task)
296
307
  except TypeError:
@@ -300,6 +311,7 @@ class TaskSource(CachePolicy):
300
311
  lines = task_ctx.task.fn.__code__.co_code
301
312
  else:
302
313
  raise
314
+
303
315
  return hash_objects(lines, raise_on_failure=True)
304
316
 
305
317
 
prefect/cli/flow_run.py CHANGED
@@ -10,9 +10,15 @@ import signal
10
10
  import threading
11
11
  import webbrowser
12
12
  from types import FrameType
13
- from typing import List, Optional
13
+ from typing import TYPE_CHECKING, List, Optional
14
14
  from uuid import UUID
15
15
 
16
+ from prefect.utilities.callables import get_call_parameters, parameters_to_args_kwargs
17
+
18
+ if TYPE_CHECKING:
19
+ from prefect.client.orchestration import PrefectClient
20
+ from prefect.client.schemas.objects import FlowRun
21
+
16
22
  import httpx
17
23
  import orjson
18
24
  import typer
@@ -32,7 +38,7 @@ from prefect.client.schemas.sorting import FlowRunSort, LogSort
32
38
  from prefect.exceptions import ObjectNotFound
33
39
  from prefect.logging import get_logger
34
40
  from prefect.runner import Runner
35
- from prefect.states import State
41
+ from prefect.states import State, exception_to_crashed_state
36
42
  from prefect.types._datetime import human_friendly_diff
37
43
  from prefect.utilities.asyncutils import run_sync_in_worker_thread
38
44
  from prefect.utilities.urls import url_for
@@ -43,6 +49,65 @@ flow_run_app: PrefectTyper = PrefectTyper(
43
49
  app.add_typer(flow_run_app, aliases=["flow-runs"])
44
50
 
45
51
  LOGS_DEFAULT_PAGE_SIZE = 200
52
+
53
+
54
+ async def _get_flow_run_by_id_or_name(
55
+ client: "PrefectClient",
56
+ id_or_name: str,
57
+ ) -> "FlowRun":
58
+ """
59
+ Resolve a flow run identifier that could be either a UUID or a name.
60
+
61
+ Flow run names are not guaranteed to be unique, so this function will
62
+ error if multiple flow runs match the given name.
63
+
64
+ Args:
65
+ client: The Prefect client to use for API calls
66
+ id_or_name: Either a UUID string or a flow run name
67
+
68
+ Returns:
69
+ The matching FlowRun object
70
+
71
+ Raises:
72
+ typer.Exit: If flow run not found, or if multiple flow runs match the name
73
+ """
74
+ from prefect.client.schemas.filters import FlowRunFilterName
75
+
76
+ # First, try parsing as UUID
77
+ try:
78
+ flow_run_id = UUID(id_or_name)
79
+ try:
80
+ return await client.read_flow_run(flow_run_id)
81
+ except ObjectNotFound:
82
+ exit_with_error(f"Flow run '{id_or_name}' not found!")
83
+ except ValueError:
84
+ # Not a valid UUID, treat as a name
85
+ pass
86
+
87
+ # Query by name (exact match)
88
+ flow_runs = await client.read_flow_runs(
89
+ flow_run_filter=FlowRunFilter(name=FlowRunFilterName(any_=[id_or_name])),
90
+ limit=100, # Reasonable limit for displaying matches
91
+ )
92
+
93
+ if not flow_runs:
94
+ exit_with_error(f"Flow run '{id_or_name}' not found!")
95
+
96
+ if len(flow_runs) == 1:
97
+ return flow_runs[0]
98
+
99
+ # Multiple matches - show all and exit with error
100
+ lines = [f"Multiple flow runs found with name '{id_or_name}':\n"]
101
+ for fr in flow_runs:
102
+ state_name = fr.state.name if fr.state else "unknown"
103
+ timestamp = fr.start_time or fr.created
104
+ timestamp_str = timestamp.strftime("%Y-%m-%d %H:%M:%S") if timestamp else "N/A"
105
+ lines.append(f" - {fr.id} ({state_name}, {timestamp_str})")
106
+
107
+ lines.append("\nPlease retry using an explicit flow run ID.")
108
+ exit_with_error("\n".join(lines))
109
+
110
+
46
111
  LOGS_WITH_LIMIT_FLAG_DEFAULT_NUM_LOGS = 20
47
112
 
48
113
  logger: "logging.Logger" = get_logger(__name__)
@@ -276,6 +341,189 @@ async def cancel(id: UUID):
276
341
  exit_with_success(f"Flow run '{id}' was successfully scheduled for cancellation.")
277
342
 
278
343
 
344
+ @flow_run_app.command()
345
+ async def retry(
346
+ id_or_name: str = typer.Argument(
347
+ ...,
348
+ help="The flow run ID (UUID) or name to retry.",
349
+ ),
350
+ entrypoint: Optional[str] = typer.Option(
351
+ None,
352
+ "--entrypoint",
353
+ "-e",
354
+ help=(
355
+ "The path to a file containing the flow to run, and the name of the flow "
356
+ "function, in the format `path/to/file.py:flow_function_name`. "
357
+ "Required if the flow run does not have an associated deployment."
358
+ ),
359
+ ),
360
+ ):
361
+ """
362
+ Retry a failed or completed flow run.
363
+
364
+ The flow run can be specified by either its UUID or its name. If multiple
365
+ flow runs have the same name, you must use the UUID to disambiguate.
366
+
367
+ If the flow run has an associated deployment, it will be scheduled for retry
368
+ and a worker will pick it up. If there is no deployment, you must provide
369
+ an --entrypoint to the flow code, and the flow will execute locally.
370
+
371
+ \b
372
+ Examples:
373
+ $ prefect flow-run retry abc123-def456-7890-...
374
+ $ prefect flow-run retry my-flow-run-name
375
+ $ prefect flow-run retry abc123 --entrypoint ./flows/my_flow.py:my_flow
376
+ """
377
+ from prefect.flow_engine import run_flow
378
+ from prefect.flows import load_flow_from_entrypoint
379
+ from prefect.states import Scheduled
380
+
381
+ terminal_states = {
382
+ StateType.COMPLETED,
383
+ StateType.FAILED,
384
+ StateType.CANCELLED,
385
+ StateType.CRASHED,
386
+ }
387
+
388
+ async with get_client() as client:
389
+ # Resolve flow run by ID or name
390
+ flow_run = await _get_flow_run_by_id_or_name(client, id_or_name)
391
+ flow_run_id = flow_run.id
392
+
393
+ # Validate flow run is in terminal state
394
+ if flow_run.state is None or flow_run.state.type not in terminal_states:
395
+ current_state = flow_run.state.type.value if flow_run.state else "unknown"
396
+ exit_with_error(
397
+ f"Flow run '{flow_run_id}' is in state '{current_state}' and cannot be retried. "
398
+ f"Only flow runs in terminal states (COMPLETED, FAILED, CANCELLED, CRASHED) can be retried."
399
+ )
400
+
401
+ # Branch based on deployment association
402
+ if flow_run.deployment_id:
403
+ # Deployment-based retry: set state to Scheduled and exit
404
+ # Use force=True to bypass orchestration rules that prevent state transitions
405
+ # from terminal states (e.g., CANCELLED -> SCHEDULED)
406
+ scheduled_state = Scheduled(message="Retried via CLI")
407
+ try:
408
+ result = await client.set_flow_run_state(
409
+ flow_run_id=flow_run_id, state=scheduled_state, force=True
410
+ )
411
+ except ObjectNotFound:
412
+ exit_with_error(f"Flow run '{flow_run_id}' not found!")
413
+
414
+ if result.status == SetStateStatus.ABORT:
415
+ exit_with_error(
416
+ f"Flow run '{flow_run_id}' could not be retried. Reason: '{result.details.reason}'"
417
+ )
418
+
419
+ exit_with_success(
420
+ f"Flow run '{flow_run_id}' has been scheduled for retry. "
421
+ "A worker will pick it up shortly."
422
+ )
423
+ else:
424
+ # Local retry: require entrypoint and execute synchronously
425
+ if not entrypoint:
426
+ exit_with_error(
427
+ f"Flow run '{flow_run_id}' does not have an associated deployment. "
428
+ "Please provide an --entrypoint to the flow code.\n\n"
429
+ f"Example: prefect flow-run retry {flow_run_id} --entrypoint ./flows/my_flow.py:my_flow"
430
+ )
431
+
432
+ # Load the flow from entrypoint
433
+ try:
434
+ flow = load_flow_from_entrypoint(entrypoint, use_placeholder_flow=False)
435
+ except Exception as exc:
436
+ exit_with_error(
437
+ f"Failed to load flow from entrypoint '{entrypoint}': {exc}"
438
+ )
439
+
440
+ # Check if this is an infrastructure-bound flow
441
+ from prefect.flows import InfrastructureBoundFlow
442
+
443
+ if isinstance(flow, InfrastructureBoundFlow):
444
+ app.console.print(
445
+ f"Retrying flow run '{flow_run_id}' on remote infrastructure "
446
+ f"(work pool: {flow.work_pool})..."
447
+ )
448
+
449
+ try:
450
+ # Use the retry method which handles remote execution
451
+ await flow.retry(flow_run)
452
+ except Exception as exc:
453
+ exit_with_error(f"Flow run failed: {exc}")
454
+
455
+ # Re-fetch to get final state
456
+ flow_run = await client.read_flow_run(flow_run_id)
457
+ final_state = flow_run.state.type.value if flow_run.state else "unknown"
458
+
459
+ if flow_run.state and flow_run.state.is_completed():
460
+ exit_with_success(
461
+ f"Flow run '{flow_run_id}' completed successfully."
462
+ )
463
+ else:
464
+ exit_with_error(
465
+ f"Flow run '{flow_run_id}' finished with state: {final_state}"
466
+ )
467
+ else:
468
+ # Regular local execution path
469
+ # Set state to Scheduled with force=True to bypass deployment check
470
+ scheduled_state = Scheduled(message="Retried via CLI (local execution)")
471
+ try:
472
+ result = await client.set_flow_run_state(
473
+ flow_run_id=flow_run_id, state=scheduled_state, force=True
474
+ )
475
+ except ObjectNotFound:
476
+ exit_with_error(f"Flow run '{flow_run_id}' not found!")
477
+
478
+ if result.status == SetStateStatus.ABORT:
479
+ exit_with_error(
480
+ f"Flow run '{flow_run_id}' could not be retried. Reason: '{result.details.reason}'"
481
+ )
482
+
483
+ app.console.print(f"Executing flow run '{flow_run_id}' locally...")
484
+
485
+ # Re-fetch the flow run to get updated state
486
+ flow_run = await client.read_flow_run(flow_run_id)
487
+
488
+ try:
489
+ call_args, call_kwargs = parameters_to_args_kwargs(
490
+ flow.fn, flow_run.parameters if flow_run else {}
491
+ )
492
+ parameters = get_call_parameters(flow.fn, call_args, call_kwargs)
493
+ except Exception as exc:
494
+ state = await exception_to_crashed_state(exc)
495
+ await client.set_flow_run_state(
496
+ flow_run_id=flow_run_id, state=state, force=True
497
+ )
498
+ exit_with_error(
499
+ "Failed to use parameters from previous attempt. Please ensure the flow signature has not changed since the last run."
500
+ )
501
+
502
+ # Execute the flow synchronously, reusing the existing flow run
503
+ try:
504
+ run_flow(
505
+ flow=flow,
506
+ flow_run=flow_run,
507
+ return_type="state",
508
+ parameters=parameters,
509
+ )
510
+ except Exception as exc:
511
+ exit_with_error(f"Flow run failed: {exc}")
512
+
513
+ # Re-fetch to get final state
514
+ flow_run = await client.read_flow_run(flow_run_id)
515
+ final_state = flow_run.state.type.value if flow_run.state else "unknown"
516
+
517
+ if flow_run.state and flow_run.state.is_completed():
518
+ exit_with_success(
519
+ f"Flow run '{flow_run_id}' completed successfully."
520
+ )
521
+ else:
522
+ exit_with_error(
523
+ f"Flow run '{flow_run_id}' finished with state: {final_state}"
524
+ )
525
+
526
+
279
527
  @flow_run_app.command()
280
528
  async def logs(
281
529
  id: UUID,
prefect/flows.py CHANGED
@@ -2204,6 +2204,61 @@ class InfrastructureBoundFlow(Flow[P, R]):
2204
2204
 
2205
2205
  return run_coro_as_sync(submit_func())
2206
2206
 
2207
+ async def retry(
2208
+ self,
2209
+ flow_run: "FlowRun",
2210
+ ) -> R | State[R]:
2211
+ """
2212
+ EXPERIMENTAL: This method is experimental and may be removed or changed in future
2213
+ releases.
2214
+
2215
+ Retry an existing flow run on remote infrastructure.
2216
+
2217
+ This method allows retrying a flow run that was previously executed,
2218
+ reusing the same flow run ID and incrementing the run_count.
2219
+
2220
+ Args:
2221
+ flow_run: The existing flow run to retry
2222
+ return_state: If True, return the final state instead of the result
2223
+
2224
+ Returns:
2225
+ The flow result or final state
2226
+
2227
+ Example:
2228
+ ```python
2229
+ from prefect import flow
2230
+ from prefect_aws.experimental import ecs
2231
+
2232
+ @ecs(work_pool="my-pool")
2233
+ @flow
2234
+ def my_flow():
2235
+ ...
2236
+
2237
+ # Original run
2238
+ my_flow() # Creates flow run abc123
2239
+
2240
+ # Later, retry the same flow run
2241
+ flow_run = client.read_flow_run("abc123")
2242
+ await my_flow.retry(flow_run)
2243
+ ```
2244
+ """
2245
+ try:
2246
+ async with self.worker_cls(work_pool_name=self.work_pool) as worker:
2247
+ future = await worker.submit(
2248
+ flow=self,
2249
+ parameters=flow_run.parameters,
2250
+ job_variables=self.job_variables,
2251
+ flow_run=flow_run,
2252
+ )
2253
+ return await future.aresult()
2254
+ except (ExceptionGroup, BaseExceptionGroup) as exc:
2255
+ # For less verbose tracebacks
2256
+ exceptions = exc.exceptions
2257
+ if len(exceptions) == 1:
2258
+ raise exceptions[0] from None
2259
+ else:
2260
+ raise
2261
+
2207
2262
  def submit_to_work_pool(
2208
2263
  self, *args: P.args, **kwargs: P.kwargs
2209
2264
  ) -> PrefectFlowRunFuture[R]:
prefect/futures.py CHANGED
@@ -373,7 +373,7 @@ class PrefectFlowRunFuture(PrefectFuture[R]):
373
373
  async def wait_async(self, timeout: float | None = None) -> None:
374
374
  if self._final_state:
375
375
  logger.debug(
376
- "Final state already set for %s. Returning...", self.task_run_id
376
+ "Final state already set for %s. Returning...", self.flow_run_id
377
377
  )
378
378
  return
379
379
 
@@ -425,7 +425,7 @@ class PrefectFlowRunFuture(PrefectFuture[R]):
425
425
  await self.wait_async(timeout=timeout)
426
426
  if not self._final_state:
427
427
  raise TimeoutError(
428
- f"Task run {self.task_run_id} did not complete within {timeout} seconds"
428
+ f"Flow run {self.flow_run_id} did not complete within {timeout} seconds"
429
429
  )
430
430
 
431
431
  return await self._final_state.aresult(raise_on_failure=raise_on_failure)
@@ -1,9 +1,11 @@
1
1
  import asyncio
2
2
  from contextlib import asynccontextmanager
3
+ from logging import Logger
3
4
  from typing import Any, AsyncGenerator, Callable
4
5
 
5
6
  from docket import Docket, Worker
6
7
 
8
+ from prefect.logging import get_logger
7
9
  from prefect.server.api.flow_runs import delete_flow_run_logs
8
10
  from prefect.server.api.task_runs import delete_task_run_logs
9
11
  from prefect.server.events.services import triggers as _triggers_module # noqa: F401
@@ -20,6 +22,8 @@ from prefect.server.services.perpetual_services import (
20
22
  )
21
23
  from prefect.server.services.repossessor import revoke_expired_lease
22
24
 
25
+ logger: Logger = get_logger(__name__)
26
+
23
27
  # Task functions to register with docket for background processing
24
28
  task_functions: list[Callable[..., Any]] = [
25
29
  # Simple background tasks (from Alex's PR #19377)
@@ -43,7 +47,7 @@ async def background_worker(
43
47
  webserver_only: bool = False,
44
48
  ) -> AsyncGenerator[None, None]:
45
49
  worker_task: asyncio.Task[None] | None = None
46
- try:
50
+ async with Worker(docket) as worker:
47
51
  # Register background task functions
48
52
  docket.register_collection(
49
53
  "prefect.server.api.background_workers:task_functions"
@@ -54,14 +58,14 @@ async def background_worker(
54
58
  docket, ephemeral=ephemeral, webserver_only=webserver_only
55
59
  )
56
60
 
57
- async with Worker(docket) as worker:
61
+ try:
58
62
  worker_task = asyncio.create_task(worker.run_forever())
59
63
  yield
60
64
 
61
- finally:
62
- if worker_task:
63
- worker_task.cancel()
64
- try:
65
- await worker_task
66
- except asyncio.CancelledError:
67
- pass
65
+ finally:
66
+ if worker_task:
67
+ worker_task.cancel()
68
+ try:
69
+ await worker_task
70
+ except asyncio.CancelledError:
71
+ pass
@@ -34,6 +34,7 @@ from prefect.settings import (
34
34
  PREFECT_EVENTS_MAXIMUM_LABELS_PER_RESOURCE,
35
35
  PREFECT_EVENTS_MAXIMUM_RELATED_RESOURCES,
36
36
  )
37
+ from prefect.utilities.urls import url_for
37
38
 
38
39
  if TYPE_CHECKING:
39
40
  import logging
@@ -212,6 +213,12 @@ class ReceivedEvent(Event):
212
213
  description="When the event was received by Prefect Cloud",
213
214
  )
214
215
 
216
+ @property
217
+ def url(self) -> Optional[str]:
218
+ """Returns the UI URL for this event, allowing users to link to events
219
+ in automation templates without parsing date strings."""
220
+ return url_for(self, url_type="ui")
221
+
215
222
  def as_database_row(self) -> dict[str, Any]:
216
223
  row = self.model_dump()
217
224
  row["resource_id"] = self.resource.id
prefect/tasks.py CHANGED
@@ -456,6 +456,14 @@ class Task(Generic[P, R]):
456
456
  update_wrapper(self, fn)
457
457
  self.fn = fn
458
458
 
459
+ # Capture source code for cache key computation
460
+ # This is stored on the task so it survives cloudpickle serialization
461
+ # to remote environments where the source file is not available
462
+ try:
463
+ self.source_code: str | None = inspect.getsource(fn)
464
+ except (TypeError, OSError):
465
+ self.source_code = None
466
+
459
467
  # the task is considered async if its function is async or an async
460
468
  # generator
461
469
  self.isasync: bool = inspect.iscoroutinefunction(
@@ -257,7 +257,7 @@ def preprocess_schema(
257
257
 
258
258
  if "definitions" in schema: # Also process definitions for reused models
259
259
  definitions = cast(dict[str, Any], schema["definitions"])
260
- for definition in definitions.values():
260
+ for definition in list(definitions.values()):
261
261
  if "properties" in definition:
262
262
  required_fields = definition.get("required", [])
263
263
  process_properties(
prefect/utilities/urls.py CHANGED
@@ -4,7 +4,7 @@ import socket
4
4
  import urllib.parse
5
5
  from logging import Logger
6
6
  from string import Formatter
7
- from typing import TYPE_CHECKING, Any, Literal, Optional, Union
7
+ from typing import TYPE_CHECKING, Any, Literal, Optional, Union, cast
8
8
  from urllib.parse import urlparse
9
9
  from uuid import UUID
10
10
 
@@ -251,9 +251,13 @@ def url_for(
251
251
  )
252
252
  assert url_format is not None
253
253
 
254
- if isinstance(obj, ReceivedEvent):
254
+ # Use duck-typing to handle both client-side and server-side ReceivedEvent
255
+ if name == "received-event" and hasattr(obj, "occurred"):
256
+ # Cast to ReceivedEvent for type checking - we've verified it has the
257
+ # required attributes via hasattr and name check above
258
+ event = cast(ReceivedEvent, obj)
255
259
  url = url_format.format(
256
- occurred=obj.occurred.strftime("%Y-%m-%d"), obj_id=obj_id
260
+ occurred=event.occurred.strftime("%Y-%m-%d"), obj_id=obj_id
257
261
  )
258
262
  else:
259
263
  obj_keys = [
prefect/workers/base.py CHANGED
@@ -766,6 +766,7 @@ class BaseWorker(abc.ABC, Generic[C, V, R]):
766
766
  flow: "Flow[..., FR]",
767
767
  parameters: dict[str, Any] | None = None,
768
768
  job_variables: dict[str, Any] | None = None,
769
+ flow_run: "FlowRun | None" = None,
769
770
  ) -> "PrefectFlowRunFuture[FR]":
770
771
  """
771
772
  EXPERIMENTAL: The interface for this method is subject to change.
@@ -775,9 +776,11 @@ class BaseWorker(abc.ABC, Generic[C, V, R]):
775
776
  Args:
776
777
  flow: The flow to submit
777
778
  parameters: The parameters to pass to the flow
779
+ job_variables: Job variables for infrastructure configuration
780
+ flow_run: Optional existing flow run to retry (reuses ID instead of creating new)
778
781
 
779
782
  Returns:
780
- A flow run object
783
+ A flow run future
781
784
  """
782
785
  warnings.warn(
783
786
  "Ad-hoc flow submission via workers is experimental. The interface "
@@ -793,6 +796,7 @@ class BaseWorker(abc.ABC, Generic[C, V, R]):
793
796
  flow=flow,
794
797
  parameters=parameters,
795
798
  job_variables=job_variables,
799
+ flow_run=flow_run,
796
800
  ),
797
801
  )
798
802
  return PrefectFlowRunFuture(flow_run_id=flow_run.id)
@@ -803,9 +807,17 @@ class BaseWorker(abc.ABC, Generic[C, V, R]):
803
807
  parameters: dict[str, Any] | None = None,
804
808
  job_variables: dict[str, Any] | None = None,
805
809
  task_status: anyio.abc.TaskStatus["FlowRun"] | None = None,
810
+ flow_run: "FlowRun | None" = None,
806
811
  ):
807
812
  """
808
813
  Submits a flow for the worker to kick off execution for.
814
+
815
+ Args:
816
+ flow: The flow to submit
817
+ parameters: The parameters to pass to the flow
818
+ job_variables: Job variables for infrastructure configuration
819
+ task_status: Task status for signaling when the flow run is ready
820
+ flow_run: Optional existing flow run to retry (reuses ID instead of creating new)
809
821
  """
810
822
  from prefect._experimental.bundles import (
811
823
  aupload_bundle_to_storage,
@@ -862,28 +874,40 @@ class BaseWorker(abc.ABC, Generic[C, V, R]):
862
874
  job_variables = (job_variables or {}) | {"command": " ".join(execute_command)}
863
875
  parameters = parameters or {}
864
876
 
865
- # Create a parent task run if this is a child flow run to ensure it shows up as a child flow in the UI
866
- parent_task_run = None
867
- if flow_run_ctx := FlowRunContext.get():
868
- parent_task = Task[Any, Any](
869
- name=flow.name,
870
- fn=flow.fn,
871
- version=flow.version,
877
+ if flow_run is None:
878
+ # Create new flow run (standard behavior)
879
+ # Create a parent task run if this is a child flow run to ensure it shows up as a child flow in the UI
880
+ parent_task_run = None
881
+ if flow_run_ctx := FlowRunContext.get():
882
+ parent_task = Task[Any, Any](
883
+ name=flow.name,
884
+ fn=flow.fn,
885
+ version=flow.version,
886
+ )
887
+ parent_task_run = await parent_task.create_run(
888
+ flow_run_context=flow_run_ctx,
889
+ parameters=parameters,
890
+ )
891
+
892
+ flow_run = await self.client.create_flow_run(
893
+ flow,
894
+ parameters=flow.serialize_parameters(parameters),
895
+ state=Pending(),
896
+ job_variables=job_variables,
897
+ work_pool_name=self.work_pool.name,
898
+ tags=TagsContext.get().current_tags,
899
+ parent_task_run_id=getattr(parent_task_run, "id", None),
872
900
  )
873
- parent_task_run = await parent_task.create_run(
874
- flow_run_context=flow_run_ctx,
875
- parameters=parameters,
901
+ else:
902
+ # Reuse existing flow run - set state to Pending for retry
903
+ await self.client.set_flow_run_state(
904
+ flow_run_id=flow_run.id,
905
+ state=Pending(message="Retrying on remote infrastructure"),
906
+ force=True,
876
907
  )
908
+ # Re-fetch to get updated state
909
+ flow_run = await self.client.read_flow_run(flow_run.id)
877
910
 
878
- flow_run = await self.client.create_flow_run(
879
- flow,
880
- parameters=flow.serialize_parameters(parameters),
881
- state=Pending(),
882
- job_variables=job_variables,
883
- work_pool_name=self.work_pool.name,
884
- tags=TagsContext.get().current_tags,
885
- parent_task_run_id=getattr(parent_task_run, "id", None),
886
- )
887
911
  if task_status is not None:
888
912
  # Emit the flow run object to .submit to allow it to return a future as soon as possible
889
913
  task_status.started(flow_run)
@@ -263,18 +263,27 @@ class ProcessWorker(
263
263
  parameters: dict[str, Any] | None = None,
264
264
  job_variables: dict[str, Any] | None = None,
265
265
  task_status: anyio.abc.TaskStatus["FlowRun"] | None = None,
266
+ flow_run: "FlowRun | None" = None,
266
267
  ):
267
268
  from prefect._experimental.bundles import (
268
269
  create_bundle_for_flow_run,
269
270
  )
270
271
 
271
- flow_run = await self.client.create_flow_run(
272
- flow,
273
- parameters=parameters,
274
- state=Pending(),
275
- job_variables=job_variables,
276
- work_pool_name=self.work_pool.name,
277
- )
272
+ if flow_run is None:
273
+ flow_run = await self.client.create_flow_run(
274
+ flow,
275
+ parameters=parameters,
276
+ state=Pending(),
277
+ job_variables=job_variables,
278
+ work_pool_name=self.work_pool.name,
279
+ )
280
+ else:
281
+ # Reuse existing flow run - set state to Pending for retry
282
+ await self.client.set_flow_run_state(
283
+ flow_run.id,
284
+ Pending(),
285
+ force=True,
286
+ )
278
287
  if task_status is not None:
279
288
  # Emit the flow run object to .submit to allow it to return a future as soon as possible
280
289
  task_status.started(flow_run)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: prefect
3
- Version: 3.6.7
3
+ Version: 3.6.8
4
4
  Summary: Workflow orchestration and management.
5
5
  Project-URL: Changelog, https://github.com/PrefectHQ/prefect/releases
6
6
  Project-URL: Documentation, https://docs.prefect.io
@@ -57,7 +57,7 @@ Requires-Dist: pydantic!=2.11.0,!=2.11.1,!=2.11.2,!=2.11.3,!=2.11.4,<3.0.0,>=2.1
57
57
  Requires-Dist: pydantic-core<3.0.0,>=2.12.0
58
58
  Requires-Dist: pydantic-extra-types<3.0.0,>=2.8.2
59
59
  Requires-Dist: pydantic-settings!=2.9.0,<3.0.0,>2.2.1
60
- Requires-Dist: pydocket>=0.13.0
60
+ Requires-Dist: pydocket>=0.16.2
61
61
  Requires-Dist: python-dateutil<3.0.0,>=2.8.2
62
62
  Requires-Dist: python-slugify<9.0,>=5.0
63
63
  Requires-Dist: pytz<2026,>=2021.1
@@ -70,7 +70,7 @@ Requires-Dist: semver>=3.0.4
70
70
  Requires-Dist: sniffio<2.0.0,>=1.3.0
71
71
  Requires-Dist: sqlalchemy[asyncio]<3.0.0,>=2.0
72
72
  Requires-Dist: toml>=0.10.0
73
- Requires-Dist: typer<0.20.0,>=0.16.0
73
+ Requires-Dist: typer<0.21.0,>=0.16.0
74
74
  Requires-Dist: typing-extensions<5.0.0,>=4.10.0
75
75
  Requires-Dist: uvicorn!=0.29.0,>=0.14.0
76
76
  Requires-Dist: websockets<16.0,>=15.0.1
@@ -2,7 +2,7 @@ prefect/.prefectignore,sha256=awSprvKT0vI8a64mEOLrMxhxqcO-b0ERQeYpA2rNKVQ,390
2
2
  prefect/AGENTS.md,sha256=qmCZAuKIF9jQyp5TrW_T8bsM_97-QaiCoQp71A_b2Lg,1008
3
3
  prefect/__init__.py,sha256=-cFYePxZEyhJ4zSCwCu9QhTgvA6yRoUYMlAluL8voyE,5441
4
4
  prefect/__main__.py,sha256=WFjw3kaYJY6pOTA7WDOgqjsz8zUEUZHCcj3P5wyVa-g,66
5
- prefect/_build_info.py,sha256=95B1mmlb6G30YylFu5MG8GuW0dSygM8wu90-AA1FpeQ,180
5
+ prefect/_build_info.py,sha256=wDAQcSSV0PgD03tSk9mYPD8xeBX61wboIbNbFQJypLw,180
6
6
  prefect/_result_records.py,sha256=S6QmsODkehGVSzbMm6ig022PYbI6gNKz671p_8kBYx4,7789
7
7
  prefect/_states.py,sha256=_BIcTNbnExppu4sRIbeOHxdt5DeaIUAa9qKE8sNIkQ0,9775
8
8
  prefect/_versioning.py,sha256=YqR5cxXrY4P6LM1Pmhd8iMo7v_G2KJpGNdsf4EvDFQ0,14132
@@ -10,15 +10,15 @@ prefect/_waiters.py,sha256=Ia2ITaXdHzevtyWIgJoOg95lrEXQqNEOquHvw3T33UQ,9026
10
10
  prefect/agent.py,sha256=dPvG1jDGD5HSH7aM2utwtk6RaJ9qg13XjkA0lAIgQmY,287
11
11
  prefect/artifacts.py,sha256=ZdMLJeJGK82hibtRzbsVa-g95dMa0D2UP1LiESoXmf4,23951
12
12
  prefect/automations.py,sha256=ZzPxn2tINdlXTQo805V4rIlbXuNWxd7cdb3gTJxZIeY,12567
13
- prefect/cache_policies.py,sha256=jH1aDW6vItTcsEytuTCrNYyjbq87IQPwdOgF0yxiUts,12749
13
+ prefect/cache_policies.py,sha256=K0cymkMbEQBuwY1fSehefllN4Y4RblpNzYsh-LtjdXM,13156
14
14
  prefect/context.py,sha256=zA5j-7HJIdfOft92NW8dxkvgqnKkC-MhhOHfAmM5DoE,34706
15
15
  prefect/engine.py,sha256=uB5JN4l045i5JTlRQNT1x7MwlSiGQ5Bop2Q6jHHOgxY,3699
16
16
  prefect/exceptions.py,sha256=E67przt2JKW2hzXQGC2XMIMxM8PNtnDunwhW55VUsM8,12598
17
17
  prefect/filesystems.py,sha256=PrDsWxT5mfmJSs3ib3pNaKkjEEBExY2kGrhh5fPWFIM,22393
18
18
  prefect/flow_engine.py,sha256=bWGHTJ3WZs7taLAzbDvkr8Gvg_NgB7riHmjPtstj8xA,61774
19
19
  prefect/flow_runs.py,sha256=gZBtxwSk2k-bUXUvb_Eipg00F2hEm5_batIse85ngXE,18495
20
- prefect/flows.py,sha256=9lacxyuc2K16KcwcauOdsggt3f21NZDjJ4g8F9AxVI0,128521
21
- prefect/futures.py,sha256=HwxR17RRuBHT_r5c7XuhmuYG8NnreFCUEuO_FpRBxPw,25670
20
+ prefect/flows.py,sha256=mV7B-WJJU7oBa_qv1-V4fvuv-luShPpDU253M0xh2us,130277
21
+ prefect/futures.py,sha256=F0cLoC_DDqNMCgjFDcvHNt2jjusqndTt9S0dQd1DQ-k,25670
22
22
  prefect/main.py,sha256=swco0ugs87OPf0nVFtHSmFUAyyMkNFUKC_6Gncp-FFU,2547
23
23
  prefect/plugins.py,sha256=6bPNLs5Cab1VqwFAlfwJi7Wj3ASx0ZLdT5_-s8_Blyo,2510
24
24
  prefect/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -30,7 +30,7 @@ prefect/task_engine.py,sha256=OtcVWHcp45TLLraz-gK5a6exlYQhbSxLSSHpdRM9NFY,70691
30
30
  prefect/task_runners.py,sha256=UlDBdwIB0YBcTG6rMf3ICnGUOZt9WNscaXt2QonigBM,34524
31
31
  prefect/task_runs.py,sha256=2wPYMCJet_9PoT8rbacvwt7B7noGR3ksHEP1xXUiXpU,10093
32
32
  prefect/task_worker.py,sha256=RifZ3bOl6ppoYPiOAd4TQp2_GEw9eDQoW483rq1q52Q,20805
33
- prefect/tasks.py,sha256=Vip7I60wvW-ZF-hAx1-ObtQ3sw1JUhC0dK22S0gChQI,82741
33
+ prefect/tasks.py,sha256=Twbmri_cw3Uq2FVwXzpcaytHFhZPtBWrYpisxD1TPNs,83099
34
34
  prefect/transactions.py,sha256=JzRHA_B6Oy-XwmMqZUeiBLOvaf-aCEAdxPEQsYI2opM,26819
35
35
  prefect/variables.py,sha256=L1WrzyGF47GMACI8Jji8nlCcAAl1wtLhJmhf4qV0-CQ,9526
36
36
  prefect/_experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -90,7 +90,7 @@ prefect/blocks/__init__.py,sha256=D0hB72qMfgqnBB2EMZRxUxlX9yLfkab5zDChOwJZmkY,22
90
90
  prefect/blocks/abstract.py,sha256=mpOAWopSR_RrzdxeurBTXVSKisP8ne-k8LYos-tp7go,17021
91
91
  prefect/blocks/core.py,sha256=V7a00ewzL37wPkSOrAtdty0gxpVxtqTt3RYOQUfiW9I,67055
92
92
  prefect/blocks/fields.py,sha256=1m507VVmkpOnMF_7N-qboRjtw4_ceIuDneX3jZ3Jm54,63
93
- prefect/blocks/notifications.py,sha256=3Rbicq2IGBLM9B0nfHOLeEs9GXuVWW1VyOjmli6ieFs,37042
93
+ prefect/blocks/notifications.py,sha256=5GCwOyg3lnQepSbLFwnLg0hxN2To9MExwytFI7KWjuI,37345
94
94
  prefect/blocks/redis.py,sha256=fn6nR_ZoFfJqO_sWc-4WUY3D2No7RSXUntgZtRFNXYo,7682
95
95
  prefect/blocks/system.py,sha256=9OE9gs0cH0bH6DV5dvf4oprthWBKA6Zh56aSnn2pIU4,2325
96
96
  prefect/blocks/webhook.py,sha256=xylFigbDOsn-YzxahkTzNqYwrIA7wwS6204P0goLY3A,2907
@@ -109,7 +109,7 @@ prefect/cli/dev.py,sha256=1toGWrEvskTjir1agQXsCVWezFuvZjCkjNsFYExBmNE,11996
109
109
  prefect/cli/events.py,sha256=E_QDm9eEDKxS64QRdmYsp7qPXQXfgIpNq2h575foDxw,6076
110
110
  prefect/cli/experimental.py,sha256=skZ_c-agYqMT-YlDzfqjt6AvoXussbaIVGByYlUw12Q,5267
111
111
  prefect/cli/flow.py,sha256=lBHC9y6ZeLgeZifsfd7CukGYqp7XrbaLBE7X1yh6OME,5789
112
- prefect/cli/flow_run.py,sha256=7Ju-3k8Px_-Xm0yRkHubtVu8gGbJdl-bMrY9INLBGEM,14684
112
+ prefect/cli/flow_run.py,sha256=8I-9UkJBzp_hLx8QS1e78O7aJOrXp0m5tasao_2HVHI,24685
113
113
  prefect/cli/flow_runs_watching.py,sha256=nOeLoTn1a-jzR4A1PTjC6reaFO8Qds3bQhhQ40xlUFA,6805
114
114
  prefect/cli/global_concurrency_limit.py,sha256=AiFBv2tqrs4QI5mASDa-TGodXSIpvpuRbwcJek8nmr0,14080
115
115
  prefect/cli/profile.py,sha256=v1nc2oaWFAwlZNv3yKjcY3qMhencgHkN4-QF-AEQBsA,14712
@@ -290,7 +290,7 @@ prefect/server/api/__init__.py,sha256=SpRTXHC6ApqR4_Y3wNHM7TvvH9FZ8N3tH-RZIgmubg
290
290
  prefect/server/api/admin.py,sha256=nINYSrux7XPAV4MMDQUts3X2dddrc3mJtd3iPl5N-jI,2644
291
291
  prefect/server/api/artifacts.py,sha256=B19bxxnADiVTo0XtzvsbAHHtsJwr7S1IunjGl-qsSd4,7314
292
292
  prefect/server/api/automations.py,sha256=KlQuhEOOKkxAI6f5aZedCKMELi99MdknulIleRP-JVE,8285
293
- prefect/server/api/background_workers.py,sha256=yMoNgw2thesg56_Uxjihio9rA9Ngikgi3TLOHmSKwA0,2281
293
+ prefect/server/api/background_workers.py,sha256=ywc5bWP9x_b-zBgSu1MJPwwbfzcU-jR5iR0p5mgxj9E,2414
294
294
  prefect/server/api/block_capabilities.py,sha256=0x1vtC2CtSRVcCWydgNbylmfv_LnJHIoPnt7hRiJkN4,828
295
295
  prefect/server/api/block_documents.py,sha256=Hj-J10XMnfJGa2-AbJfZHun-iZrUwzxG8h7cXGfBXjg,5838
296
296
  prefect/server/api/block_schemas.py,sha256=H9kxnKPlTvz62SMcX1ssHnQ8KACU2728ebCtSp2iLcs,5508
@@ -577,7 +577,7 @@ prefect/server/events/ordering/db.py,sha256=5yf1Hk1qz4GsKPBG95YsGn2r5UEOG0hhJxld
577
577
  prefect/server/events/ordering/memory.py,sha256=il-LWxYFEVpj-HgwvIJL_8MgpO4IVk5g1dOLLg8Fd08,12601
578
578
  prefect/server/events/schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
579
579
  prefect/server/events/schemas/automations.py,sha256=0oEbdn4oRuHKMiqVaeWJdhDcDpiKmSqvVJvx_hSOFNU,27249
580
- prefect/server/events/schemas/events.py,sha256=R5gJZ9dowyuwZcNPyRWzjH0iZGMad1QuykWy_ludLzk,12137
580
+ prefect/server/events/schemas/events.py,sha256=k10wMIBPEUTMDFQCJy1frT8LEP7O0EvZxvL5blzBkAU,12419
581
581
  prefect/server/events/schemas/labelling.py,sha256=McGy7dq6Ry2GY3ejnMQnkuL_h77F5MnHXQkyCdePlLU,3103
582
582
  prefect/server/events/services/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
583
583
  prefect/server/events/services/actions.py,sha256=o8m89I740YCZfO7DQgyngvHgIXOzpT86pjOvvvDeRmA,1850
@@ -883,20 +883,20 @@ prefect/utilities/slugify.py,sha256=57Vb14t13F3zm1P65KAu8nVeAz0iJCd1Qc5eMG-R5y8,
883
883
  prefect/utilities/templating.py,sha256=AnJ-w7wqVl2nfqKHQawEGhloxCU_zihrCtfx5pWFcYU,16017
884
884
  prefect/utilities/text.py,sha256=cuXb5EwRP5qFV7w-3_axEft4rDIJAMS8jgCg0kqNGKQ,758
885
885
  prefect/utilities/timeout.py,sha256=y7ILFZDpQMnmKB6XloulGDRUYRwnJR_b-XecQLNS7Ts,1265
886
- prefect/utilities/urls.py,sha256=AtwAt_uBjixUsOqDPTkS23eIbLdvDsWct32ewgcq5uY,9184
886
+ prefect/utilities/urls.py,sha256=fUcUKnVQkzpfcARBdUIbfZGnvd6hy2tS31Jqhiy2u44,9476
887
887
  prefect/utilities/visualization.py,sha256=5rywcA_q2D0KHb5ZoZnrZ_A-cXaAFcQ9-wcZaCn99fM,7421
888
888
  prefect/utilities/schema_tools/__init__.py,sha256=At3rMHd2g_Em2P3_dFQlFgqR_EpBwrYtU2N2OJd0pDE,345
889
889
  prefect/utilities/schema_tools/hydration.py,sha256=NkRhWkNfxxFmVGhNDfmxdK_xeKaEhs3a42q83Sg9cT4,9436
890
- prefect/utilities/schema_tools/validation.py,sha256=Wix26IVR-ZJ32-6MX2pHhrwm3reB-Q4iB6_phn85OKE,10743
890
+ prefect/utilities/schema_tools/validation.py,sha256=UhRLWStdT9__u4yz-mnc3lcdt-VJmRGJBGV_f_9um-Y,10749
891
891
  prefect/workers/__init__.py,sha256=EaM1F0RZ-XIJaGeTKLsXDnfOPHzVWk5bk0_c4BVS44M,64
892
- prefect/workers/base.py,sha256=n0tcQWGg_gQX1zJ-0WHnSCQMzYyg69Lx8fuwSduoGik,64663
892
+ prefect/workers/base.py,sha256=uZFMqtIs1I4WtTXIUjeD9Sd-KQ8hy30a-Gl3-EWOuyc,65883
893
893
  prefect/workers/block.py,sha256=dPvG1jDGD5HSH7aM2utwtk6RaJ9qg13XjkA0lAIgQmY,287
894
894
  prefect/workers/cloud.py,sha256=dPvG1jDGD5HSH7aM2utwtk6RaJ9qg13XjkA0lAIgQmY,287
895
- prefect/workers/process.py,sha256=jvxBQyR8-G4Svt0as-BNg94YpvRMYZBIXKtNwC0igNY,11902
895
+ prefect/workers/process.py,sha256=O_HZ39lBjedQat4gmIh2SiC2N_Q5q7grBGD13HXB2eo,12235
896
896
  prefect/workers/server.py,sha256=bWnYfMfJf5_IO3y3aJOpia7p9lFKC3ZZjiMvHox-UKY,1992
897
897
  prefect/workers/utilities.py,sha256=VfPfAlGtTuDj0-Kb8WlMgAuOfgXCdrGAnKMapPSBrwc,2483
898
- prefect-3.6.7.dist-info/METADATA,sha256=gC6jkDLoTHWSorFX05xqxpOdh0aphEjs3z__A511F2E,13654
899
- prefect-3.6.7.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
900
- prefect-3.6.7.dist-info/entry_points.txt,sha256=HlY8up83iIq2vU2r33a0qSis4eOFSyb1mRH4l7Xt9X8,126
901
- prefect-3.6.7.dist-info/licenses/LICENSE,sha256=MCxsn8osAkzfxKC4CC_dLcUkU8DZLkyihZ8mGs3Ah3Q,11357
902
- prefect-3.6.7.dist-info/RECORD,,
898
+ prefect-3.6.8.dist-info/METADATA,sha256=5PYAj4eUkiXV0gSWisj1g3yw6i-FDltadp-xyjMhzn4,13654
899
+ prefect-3.6.8.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
900
+ prefect-3.6.8.dist-info/entry_points.txt,sha256=HlY8up83iIq2vU2r33a0qSis4eOFSyb1mRH4l7Xt9X8,126
901
+ prefect-3.6.8.dist-info/licenses/LICENSE,sha256=MCxsn8osAkzfxKC4CC_dLcUkU8DZLkyihZ8mGs3Ah3Q,11357
902
+ prefect-3.6.8.dist-info/RECORD,,