prefect-client 2.18.0__py3-none-any.whl → 2.18.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. prefect/_internal/schemas/fields.py +31 -12
  2. prefect/blocks/core.py +1 -1
  3. prefect/blocks/notifications.py +2 -2
  4. prefect/blocks/system.py +2 -3
  5. prefect/client/orchestration.py +283 -22
  6. prefect/client/schemas/sorting.py +9 -0
  7. prefect/client/utilities.py +25 -3
  8. prefect/concurrency/asyncio.py +11 -5
  9. prefect/concurrency/events.py +3 -3
  10. prefect/concurrency/services.py +1 -1
  11. prefect/concurrency/sync.py +9 -5
  12. prefect/deployments/deployments.py +27 -18
  13. prefect/deployments/runner.py +34 -26
  14. prefect/engine.py +3 -1
  15. prefect/events/actions.py +2 -1
  16. prefect/events/cli/automations.py +47 -9
  17. prefect/events/clients.py +50 -18
  18. prefect/events/filters.py +30 -3
  19. prefect/events/instrument.py +40 -40
  20. prefect/events/related.py +2 -1
  21. prefect/events/schemas/automations.py +50 -5
  22. prefect/events/schemas/deployment_triggers.py +15 -227
  23. prefect/events/schemas/events.py +7 -7
  24. prefect/events/utilities.py +1 -1
  25. prefect/events/worker.py +10 -7
  26. prefect/flows.py +33 -18
  27. prefect/input/actions.py +9 -9
  28. prefect/input/run_input.py +49 -37
  29. prefect/new_flow_engine.py +293 -0
  30. prefect/new_task_engine.py +374 -0
  31. prefect/results.py +3 -2
  32. prefect/runner/runner.py +3 -2
  33. prefect/server/api/collections_data/views/aggregate-worker-metadata.json +44 -3
  34. prefect/settings.py +26 -0
  35. prefect/states.py +25 -19
  36. prefect/tasks.py +17 -0
  37. prefect/utilities/asyncutils.py +37 -0
  38. prefect/utilities/engine.py +6 -4
  39. prefect/utilities/schema_tools/validation.py +1 -1
  40. {prefect_client-2.18.0.dist-info → prefect_client-2.18.1.dist-info}/METADATA +1 -1
  41. {prefect_client-2.18.0.dist-info → prefect_client-2.18.1.dist-info}/RECORD +44 -43
  42. prefect/concurrency/common.py +0 -0
  43. {prefect_client-2.18.0.dist-info → prefect_client-2.18.1.dist-info}/LICENSE +0 -0
  44. {prefect_client-2.18.0.dist-info → prefect_client-2.18.1.dist-info}/WHEEL +0 -0
  45. {prefect_client-2.18.0.dist-info → prefect_client-2.18.1.dist-info}/top_level.txt +0 -0
@@ -16,6 +16,13 @@ import anyio
16
16
  import pendulum
17
17
  import yaml
18
18
 
19
+ from prefect._internal.pydantic import HAS_PYDANTIC_V2
20
+
21
+ if HAS_PYDANTIC_V2:
22
+ from pydantic.v1 import BaseModel, Field, parse_obj_as, root_validator, validator
23
+ else:
24
+ from pydantic import BaseModel, Field, parse_obj_as, root_validator, validator
25
+
19
26
  from prefect._internal.compatibility.deprecated import (
20
27
  DeprecatedInfraOverridesField,
21
28
  deprecated_callable,
@@ -23,7 +30,6 @@ from prefect._internal.compatibility.deprecated import (
23
30
  deprecated_parameter,
24
31
  handle_deprecated_infra_overrides_parameter,
25
32
  )
26
- from prefect._internal.pydantic import HAS_PYDANTIC_V2
27
33
  from prefect._internal.schemas.validators import (
28
34
  handle_openapi_schema,
29
35
  infrastructure_must_have_capabilities,
@@ -32,16 +38,10 @@ from prefect._internal.schemas.validators import (
32
38
  validate_automation_names,
33
39
  validate_deprecated_schedule_fields,
34
40
  )
35
- from prefect.client.schemas.actions import DeploymentScheduleCreate
36
-
37
- if HAS_PYDANTIC_V2:
38
- from pydantic.v1 import BaseModel, Field, parse_obj_as, root_validator, validator
39
- else:
40
- from pydantic import BaseModel, Field, parse_obj_as, root_validator, validator
41
-
42
41
  from prefect.blocks.core import Block
43
42
  from prefect.blocks.fields import SecretDict
44
43
  from prefect.client.orchestration import PrefectClient, get_client
44
+ from prefect.client.schemas.actions import DeploymentScheduleCreate
45
45
  from prefect.client.schemas.objects import (
46
46
  FlowRun,
47
47
  MinimalDeploymentSchedule,
@@ -53,11 +53,12 @@ from prefect.deployments.schedules import (
53
53
  FlexibleScheduleList,
54
54
  )
55
55
  from prefect.deployments.steps.core import run_steps
56
- from prefect.events import DeploymentTriggerTypes
56
+ from prefect.events import DeploymentTriggerTypes, TriggerTypes
57
57
  from prefect.exceptions import (
58
58
  BlockMissingCapabilities,
59
59
  ObjectAlreadyExists,
60
60
  ObjectNotFound,
61
+ PrefectHTTPStatusError,
61
62
  )
62
63
  from prefect.filesystems import LocalFileSystem
63
64
  from prefect.flows import Flow, load_flow_from_entrypoint
@@ -609,7 +610,7 @@ class Deployment(DeprecatedInfraOverridesField, BaseModel):
609
610
  description="The parameter schema of the flow, including defaults.",
610
611
  )
611
612
  timestamp: datetime = Field(default_factory=partial(pendulum.now, "UTC"))
612
- triggers: List[DeploymentTriggerTypes] = Field(
613
+ triggers: List[Union[DeploymentTriggerTypes, TriggerTypes]] = Field(
613
614
  default_factory=list,
614
615
  description="The triggers that should cause this deployment to run.",
615
616
  )
@@ -902,14 +903,22 @@ class Deployment(DeprecatedInfraOverridesField, BaseModel):
902
903
  )
903
904
 
904
905
  if client.server_type.supports_automations():
905
- # The triggers defined in the deployment spec are, essentially,
906
- # anonymous and attempting truly sync them with cloud is not
907
- # feasible. Instead, we remove all automations that are owned
908
- # by the deployment, meaning that they were created via this
909
- # mechanism below, and then recreate them.
910
- await client.delete_resource_owned_automations(
911
- f"prefect.deployment.{deployment_id}"
912
- )
906
+ try:
907
+ # The triggers defined in the deployment spec are, essentially,
908
+ # anonymous and attempting truly sync them with cloud is not
909
+ # feasible. Instead, we remove all automations that are owned
910
+ # by the deployment, meaning that they were created via this
911
+ # mechanism below, and then recreate them.
912
+ await client.delete_resource_owned_automations(
913
+ f"prefect.deployment.{deployment_id}"
914
+ )
915
+ except PrefectHTTPStatusError as e:
916
+ if e.response.status_code == 404:
917
+ # This Prefect server does not support automations, so we can safely
918
+ # ignore this 404 and move on.
919
+ return deployment_id
920
+ raise e
921
+
913
922
  for trigger in self.triggers:
914
923
  trigger.set_deployment_id(deployment_id)
915
924
  await client.create_automation(trigger.as_automation())
@@ -42,26 +42,19 @@ from rich.console import Console
42
42
  from rich.progress import Progress, SpinnerColumn, TextColumn, track
43
43
  from rich.table import Table
44
44
 
45
- from prefect._internal.concurrency.api import create_call, from_async
46
45
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
47
- from prefect._internal.schemas.validators import (
48
- reconcile_paused_deployment,
49
- reconcile_schedules_runner,
50
- validate_automation_names,
51
- )
52
- from prefect.runner.storage import RunnerStorage
53
- from prefect.settings import (
54
- PREFECT_DEFAULT_DOCKER_BUILD_NAMESPACE,
55
- PREFECT_DEFAULT_WORK_POOL_NAME,
56
- PREFECT_UI_URL,
57
- )
58
- from prefect.utilities.collections import get_from_dict, isiterable
59
46
 
60
47
  if HAS_PYDANTIC_V2:
61
48
  from pydantic.v1 import BaseModel, Field, PrivateAttr, root_validator, validator
62
49
  else:
63
50
  from pydantic import BaseModel, Field, PrivateAttr, root_validator, validator
64
51
 
52
+ from prefect._internal.concurrency.api import create_call, from_async
53
+ from prefect._internal.schemas.validators import (
54
+ reconcile_paused_deployment,
55
+ reconcile_schedules_runner,
56
+ validate_automation_names,
57
+ )
65
58
  from prefect.client.orchestration import get_client
66
59
  from prefect.client.schemas.objects import MinimalDeploymentSchedule
67
60
  from prefect.client.schemas.schedules import (
@@ -72,13 +65,20 @@ from prefect.deployments.schedules import (
72
65
  FlexibleScheduleList,
73
66
  create_minimal_deployment_schedule,
74
67
  )
75
- from prefect.events import DeploymentTriggerTypes
68
+ from prefect.events import DeploymentTriggerTypes, TriggerTypes
76
69
  from prefect.exceptions import (
77
70
  ObjectNotFound,
78
71
  PrefectHTTPStatusError,
79
72
  )
73
+ from prefect.runner.storage import RunnerStorage
74
+ from prefect.settings import (
75
+ PREFECT_DEFAULT_DOCKER_BUILD_NAMESPACE,
76
+ PREFECT_DEFAULT_WORK_POOL_NAME,
77
+ PREFECT_UI_URL,
78
+ )
80
79
  from prefect.utilities.asyncutils import sync_compatible
81
80
  from prefect.utilities.callables import ParameterSchema, parameter_schema
81
+ from prefect.utilities.collections import get_from_dict, isiterable
82
82
  from prefect.utilities.dockerutils import (
83
83
  PushError,
84
84
  build_image,
@@ -179,7 +179,7 @@ class RunnerDeployment(BaseModel):
179
179
  "The path to the entrypoint for the workflow, relative to the `path`."
180
180
  ),
181
181
  )
182
- triggers: List[DeploymentTriggerTypes] = Field(
182
+ triggers: List[Union[DeploymentTriggerTypes, TriggerTypes]] = Field(
183
183
  default_factory=list,
184
184
  description="The triggers that should cause this deployment to run.",
185
185
  )
@@ -326,14 +326,22 @@ class RunnerDeployment(BaseModel):
326
326
  ) from exc
327
327
 
328
328
  if client.server_type.supports_automations():
329
- # The triggers defined in the deployment spec are, essentially,
330
- # anonymous and attempting truly sync them with cloud is not
331
- # feasible. Instead, we remove all automations that are owned
332
- # by the deployment, meaning that they were created via this
333
- # mechanism below, and then recreate them.
334
- await client.delete_resource_owned_automations(
335
- f"prefect.deployment.{deployment_id}"
336
- )
329
+ try:
330
+ # The triggers defined in the deployment spec are, essentially,
331
+ # anonymous and attempting truly sync them with cloud is not
332
+ # feasible. Instead, we remove all automations that are owned
333
+ # by the deployment, meaning that they were created via this
334
+ # mechanism below, and then recreate them.
335
+ await client.delete_resource_owned_automations(
336
+ f"prefect.deployment.{deployment_id}"
337
+ )
338
+ except PrefectHTTPStatusError as e:
339
+ if e.response.status_code == 404:
340
+ # This Prefect server does not support automations, so we can safely
341
+ # ignore this 404 and move on.
342
+ return deployment_id
343
+ raise e
344
+
337
345
  for trigger in self.triggers:
338
346
  trigger.set_deployment_id(deployment_id)
339
347
  await client.create_automation(trigger.as_automation())
@@ -446,7 +454,7 @@ class RunnerDeployment(BaseModel):
446
454
  schedule: Optional[SCHEDULE_TYPES] = None,
447
455
  is_schedule_active: Optional[bool] = None,
448
456
  parameters: Optional[dict] = None,
449
- triggers: Optional[List[DeploymentTriggerTypes]] = None,
457
+ triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
450
458
  description: Optional[str] = None,
451
459
  tags: Optional[List[str]] = None,
452
460
  version: Optional[str] = None,
@@ -582,7 +590,7 @@ class RunnerDeployment(BaseModel):
582
590
  schedule: Optional[SCHEDULE_TYPES] = None,
583
591
  is_schedule_active: Optional[bool] = None,
584
592
  parameters: Optional[dict] = None,
585
- triggers: Optional[List[DeploymentTriggerTypes]] = None,
593
+ triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
586
594
  description: Optional[str] = None,
587
595
  tags: Optional[List[str]] = None,
588
596
  version: Optional[str] = None,
@@ -680,7 +688,7 @@ class RunnerDeployment(BaseModel):
680
688
  schedule: Optional[SCHEDULE_TYPES] = None,
681
689
  is_schedule_active: Optional[bool] = None,
682
690
  parameters: Optional[dict] = None,
683
- triggers: Optional[List[DeploymentTriggerTypes]] = None,
691
+ triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
684
692
  description: Optional[str] = None,
685
693
  tags: Optional[List[str]] = None,
686
694
  version: Optional[str] = None,
prefect/engine.py CHANGED
@@ -1410,7 +1410,9 @@ def enter_task_run_engine(
1410
1410
  task_runner=task_runner,
1411
1411
  )
1412
1412
 
1413
- if task.isasync and flow_run_context.flow.isasync:
1413
+ if task.isasync and (
1414
+ flow_run_context.flow is None or flow_run_context.flow.isasync
1415
+ ):
1414
1416
  # return a coro for the user to await if an async task in an async flow
1415
1417
  return from_async.wait_for_call_in_loop_thread(begin_run)
1416
1418
  else:
prefect/events/actions.py CHANGED
@@ -9,7 +9,8 @@ from prefect._internal.pydantic import HAS_PYDANTIC_V2
9
9
  if HAS_PYDANTIC_V2:
10
10
  from pydantic.v1 import Field, root_validator
11
11
  else:
12
- from pydantic import Field, root_validator
12
+ from pydantic import Field, root_validator # type: ignore
13
+
13
14
  from prefect._internal.schemas.bases import PrefectBaseModel
14
15
  from prefect.client.schemas.objects import StateType
15
16
 
@@ -3,8 +3,10 @@ Command line interface for working with automations.
3
3
  """
4
4
 
5
5
  import functools
6
+ from typing import Optional
6
7
 
7
8
  import orjson
9
+ import typer
8
10
  import yaml as pyyaml
9
11
  from rich.pretty import Pretty
10
12
  from rich.table import Table
@@ -149,15 +151,51 @@ async def pause(id_or_name: str):
149
151
 
150
152
  @automations_app.command()
151
153
  @requires_automations
152
- async def delete(id_or_name: str):
153
- """Delete an automation."""
154
- async with get_client() as client:
155
- automation = await client.find_automation(id_or_name)
154
+ async def delete(
155
+ name: Optional[str] = typer.Argument(None, help="An automation's name"),
156
+ id: Optional[str] = typer.Option(None, "--id", help="An automation's id"),
157
+ ):
158
+ """Delete an automation.
156
159
 
157
- if not automation:
158
- exit_with_success(f"Automation {id_or_name!r} not found.")
160
+ Arguments:
161
+ name: the name of the automation to delete
162
+ id: the id of the automation to delete
159
163
 
160
- async with get_client() as client:
161
- await client.delete_automation(automation.id)
164
+ Examples:
165
+ $ prefect automation delete "my-automation"
166
+ $ prefect automation delete --id "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"
167
+ """
162
168
 
163
- exit_with_success(f"Deleted automation {automation.name!r} ({automation.id})")
169
+ async with get_client() as client:
170
+ if not id and not name:
171
+ exit_with_error("Please provide either a name or an id.")
172
+
173
+ if id:
174
+ automation = await client.read_automation(id)
175
+ if not automation:
176
+ exit_with_error(f"Automation with id {id!r} not found.")
177
+ if not typer.confirm(
178
+ (f"Are you sure you want to delete automation with id {id!r}?"),
179
+ default=False,
180
+ ):
181
+ exit_with_error("Deletion aborted.")
182
+ await client.delete_automation(id)
183
+ exit_with_success(f"Deleted automation with id {id!r}")
184
+
185
+ elif name:
186
+ automation = await client.read_automations_by_name(name=name)
187
+ if not automation:
188
+ exit_with_error(
189
+ f"Automation {name!r} not found. You can also specify an id with the `--id` flag."
190
+ )
191
+ elif len(automation) > 1:
192
+ exit_with_error(
193
+ f"Multiple automations found with name {name!r}. Please specify an id with the `--id` flag instead."
194
+ )
195
+ if not typer.confirm(
196
+ (f"Are you sure you want to delete automation with name {name!r}?"),
197
+ default=False,
198
+ ):
199
+ exit_with_error("Deletion aborted.")
200
+ await client.delete_automation(automation[0].id)
201
+ exit_with_success(f"Deleted automation with name {name!r}")
prefect/events/clients.py CHANGED
@@ -7,7 +7,7 @@ from typing import (
7
7
  ClassVar,
8
8
  Dict,
9
9
  List,
10
- Mapping,
10
+ MutableMapping,
11
11
  Optional,
12
12
  Tuple,
13
13
  Type,
@@ -19,6 +19,7 @@ import orjson
19
19
  import pendulum
20
20
  from cachetools import TTLCache
21
21
  from typing_extensions import Self
22
+ from websockets import Subprotocol
22
23
  from websockets.client import WebSocketClientProtocol, connect
23
24
  from websockets.exceptions import (
24
25
  ConnectionClosed,
@@ -69,7 +70,7 @@ def get_events_client(
69
70
 
70
71
 
71
72
  def get_events_subscriber(
72
- filter: "EventFilter" = None,
73
+ filter: Optional["EventFilter"] = None,
73
74
  reconnection_attempts: int = 10,
74
75
  ) -> "PrefectEventSubscriber":
75
76
  api_url = PREFECT_API_URL.value()
@@ -105,7 +106,7 @@ class EventsClient(abc.ABC):
105
106
  async def _emit(self, event: Event) -> None: # pragma: no cover
106
107
  ...
107
108
 
108
- async def __aenter__(self) -> "EventsClient":
109
+ async def __aenter__(self) -> Self:
109
110
  self._in_context = True
110
111
  return self
111
112
 
@@ -153,7 +154,7 @@ class AssertingEventsClient(EventsClient):
153
154
  async def _emit(self, event: Event) -> None:
154
155
  self.events.append(event)
155
156
 
156
- async def __aenter__(self) -> "AssertingEventsClient":
157
+ async def __aenter__(self) -> Self:
157
158
  await super().__aenter__()
158
159
  self.events = []
159
160
  return self
@@ -227,7 +228,7 @@ class PrefectEventsClient(EventsClient):
227
228
 
228
229
  def __init__(
229
230
  self,
230
- api_url: str = None,
231
+ api_url: Optional[str] = None,
231
232
  reconnection_attempts: int = 10,
232
233
  checkpoint_every: int = 20,
233
234
  ):
@@ -343,8 +344,8 @@ class PrefectCloudEventsClient(PrefectEventsClient):
343
344
 
344
345
  def __init__(
345
346
  self,
346
- api_url: str = None,
347
- api_key: str = None,
347
+ api_url: Optional[str] = None,
348
+ api_key: Optional[str] = None,
348
349
  reconnection_attempts: int = 10,
349
350
  checkpoint_every: int = 20,
350
351
  ):
@@ -393,12 +394,14 @@ class PrefectEventSubscriber:
393
394
 
394
395
  _websocket: Optional[WebSocketClientProtocol]
395
396
  _filter: "EventFilter"
396
- _seen_events: Mapping[UUID, bool]
397
+ _seen_events: MutableMapping[UUID, bool]
398
+
399
+ _api_key: Optional[str]
397
400
 
398
401
  def __init__(
399
402
  self,
400
- api_url: str = None,
401
- filter: "EventFilter" = None,
403
+ api_url: Optional[str] = None,
404
+ filter: Optional["EventFilter"] = None,
402
405
  reconnection_attempts: int = 10,
403
406
  ):
404
407
  """
@@ -414,7 +417,7 @@ class PrefectEventSubscriber:
414
417
 
415
418
  from prefect.events.filters import EventFilter
416
419
 
417
- self._filter = filter or EventFilter()
420
+ self._filter = filter or EventFilter() # type: ignore[call-arg]
418
421
  self._seen_events = TTLCache(maxsize=SEEN_EVENTS_SIZE, ttl=SEEN_EVENTS_TTL)
419
422
 
420
423
  socket_url = (
@@ -427,14 +430,14 @@ class PrefectEventSubscriber:
427
430
 
428
431
  self._connect = connect(
429
432
  socket_url,
430
- subprotocols=["prefect"],
433
+ subprotocols=[Subprotocol("prefect")],
431
434
  )
432
435
  self._websocket = None
433
436
  self._reconnection_attempts = reconnection_attempts
434
437
  if self._reconnection_attempts < 0:
435
438
  raise ValueError("reconnection_attempts must be a non-negative integer")
436
439
 
437
- async def __aenter__(self) -> "PrefectCloudEventSubscriber":
440
+ async def __aenter__(self) -> Self:
438
441
  # Don't handle any errors in the initial connection, because these are most
439
442
  # likely a permission or configuration issue that should propagate
440
443
  await self._reconnect()
@@ -498,7 +501,7 @@ class PrefectEventSubscriber:
498
501
  self._websocket = None
499
502
  await self._connect.__aexit__(exc_type, exc_val, exc_tb)
500
503
 
501
- def __aiter__(self) -> "PrefectCloudEventSubscriber":
504
+ def __aiter__(self) -> Self:
502
505
  return self
503
506
 
504
507
  async def __anext__(self) -> Event:
@@ -516,7 +519,7 @@ class PrefectEventSubscriber:
516
519
 
517
520
  while True:
518
521
  message = orjson.loads(await self._websocket.recv())
519
- event = Event.parse_obj(message["event"])
522
+ event: Event = Event.parse_obj(message["event"])
520
523
 
521
524
  if event.id in self._seen_events:
522
525
  continue
@@ -541,14 +544,15 @@ class PrefectEventSubscriber:
541
544
  # a standard load balancer timeout, but after that, just take a
542
545
  # beat to let things come back around.
543
546
  await asyncio.sleep(1)
547
+ raise StopAsyncIteration
544
548
 
545
549
 
546
550
  class PrefectCloudEventSubscriber(PrefectEventSubscriber):
547
551
  def __init__(
548
552
  self,
549
- api_url: str = None,
550
- api_key: str = None,
551
- filter: "EventFilter" = None,
553
+ api_url: Optional[str] = None,
554
+ api_key: Optional[str] = None,
555
+ filter: Optional["EventFilter"] = None,
552
556
  reconnection_attempts: int = 10,
553
557
  ):
554
558
  """
@@ -567,3 +571,31 @@ class PrefectCloudEventSubscriber(PrefectEventSubscriber):
567
571
  )
568
572
 
569
573
  self._api_key = api_key
574
+
575
+
576
+ class PrefectCloudAccountEventSubscriber(PrefectCloudEventSubscriber):
577
+ def __init__(
578
+ self,
579
+ api_url: Optional[str] = None,
580
+ api_key: Optional[str] = None,
581
+ filter: Optional["EventFilter"] = None,
582
+ reconnection_attempts: int = 10,
583
+ ):
584
+ """
585
+ Args:
586
+ api_url: The base URL for a Prefect Cloud workspace
587
+ api_key: The API of an actor with the manage_events scope
588
+ reconnection_attempts: When the client is disconnected, how many times
589
+ the client should attempt to reconnect
590
+ """
591
+ api_url, api_key = _get_api_url_and_key(api_url, api_key)
592
+
593
+ account_api_url, _, _ = api_url.partition("/workspaces/")
594
+
595
+ super().__init__(
596
+ api_url=account_api_url,
597
+ filter=filter,
598
+ reconnection_attempts=reconnection_attempts,
599
+ )
600
+
601
+ self._api_key = api_key
prefect/events/filters.py CHANGED
@@ -13,7 +13,34 @@ from .schemas.events import Event, Resource, ResourceSpecification
13
13
  if HAS_PYDANTIC_V2:
14
14
  from pydantic.v1 import Field, PrivateAttr
15
15
  else:
16
- from pydantic import Field, PrivateAttr
16
+ from pydantic import Field, PrivateAttr # type: ignore
17
+
18
+
19
+ class AutomationFilterCreated(PrefectBaseModel):
20
+ """Filter by `Automation.created`."""
21
+
22
+ before_: Optional[DateTimeTZ] = Field(
23
+ default=None,
24
+ description="Only include automations created before this datetime",
25
+ )
26
+
27
+
28
+ class AutomationFilterName(PrefectBaseModel):
29
+ """Filter by `Automation.created`."""
30
+
31
+ any_: Optional[List[str]] = Field(
32
+ default=None,
33
+ description="Only include automations with names that match any of these strings",
34
+ )
35
+
36
+
37
+ class AutomationFilter(PrefectBaseModel):
38
+ name: Optional[AutomationFilterName] = Field(
39
+ default=None, description="Filter criteria for `Automation.name`"
40
+ )
41
+ created: Optional[AutomationFilterCreated] = Field(
42
+ default=None, description="Filter criteria for `Automation.created`"
43
+ )
17
44
 
18
45
 
19
46
  class EventDataFilter(PrefectBaseModel, extra="forbid"):
@@ -203,7 +230,7 @@ class EventOrder(AutoEnum):
203
230
 
204
231
  class EventFilter(EventDataFilter):
205
232
  occurred: EventOccurredFilter = Field(
206
- default_factory=EventOccurredFilter,
233
+ default_factory=lambda: EventOccurredFilter(),
207
234
  description="Filter criteria for when the events occurred",
208
235
  )
209
236
  event: Optional[EventNameFilter] = Field(
@@ -220,7 +247,7 @@ class EventFilter(EventDataFilter):
220
247
  None, description="Filter criteria for the related resources of the event"
221
248
  )
222
249
  id: EventIDFilter = Field(
223
- default_factory=EventIDFilter,
250
+ default_factory=lambda: EventIDFilter(id=[]),
224
251
  description="Filter criteria for the events' ID",
225
252
  )
226
253
 
@@ -10,7 +10,9 @@ from typing import (
10
10
  Set,
11
11
  Tuple,
12
12
  Type,
13
+ TypeVar,
13
14
  Union,
15
+ cast,
14
16
  )
15
17
 
16
18
  from prefect.events import emit_event
@@ -41,45 +43,45 @@ def emit_instance_method_called_event(
41
43
  )
42
44
 
43
45
 
44
- def instrument_instance_method_call():
45
- def instrument(function):
46
- if is_instrumented(function):
47
- return function
46
+ F = TypeVar("F", bound=Callable)
48
47
 
49
- if inspect.iscoroutinefunction(function):
50
48
 
51
- @functools.wraps(function)
52
- async def inner(self, *args, **kwargs):
53
- success = True
54
- try:
55
- return await function(self, *args, **kwargs)
56
- except Exception as exc:
57
- success = False
58
- raise exc
59
- finally:
60
- emit_instance_method_called_event(
61
- instance=self, method_name=function.__name__, successful=success
62
- )
49
+ def instrument_instance_method_call(function: F) -> F:
50
+ if is_instrumented(function):
51
+ return function
63
52
 
64
- else:
53
+ if inspect.iscoroutinefunction(function):
65
54
 
66
- @functools.wraps(function)
67
- def inner(self, *args, **kwargs):
68
- success = True
69
- try:
70
- return function(self, *args, **kwargs)
71
- except Exception as exc:
72
- success = False
73
- raise exc
74
- finally:
75
- emit_instance_method_called_event(
76
- instance=self, method_name=function.__name__, successful=success
77
- )
55
+ @functools.wraps(function)
56
+ async def inner(self, *args, **kwargs):
57
+ success = True
58
+ try:
59
+ return await function(self, *args, **kwargs)
60
+ except Exception as exc:
61
+ success = False
62
+ raise exc
63
+ finally:
64
+ emit_instance_method_called_event(
65
+ instance=self, method_name=function.__name__, successful=success
66
+ )
78
67
 
79
- setattr(inner, "__events_instrumented__", True)
80
- return inner
68
+ else:
81
69
 
82
- return instrument
70
+ @functools.wraps(function)
71
+ def inner(self, *args, **kwargs):
72
+ success = True
73
+ try:
74
+ return function(self, *args, **kwargs)
75
+ except Exception as exc:
76
+ success = False
77
+ raise exc
78
+ finally:
79
+ emit_instance_method_called_event(
80
+ instance=self, method_name=function.__name__, successful=success
81
+ )
82
+
83
+ setattr(inner, "__events_instrumented__", True)
84
+ return cast(F, inner)
83
85
 
84
86
 
85
87
  def is_instrumented(function: Callable) -> bool:
@@ -119,17 +121,15 @@ def instrument_method_calls_on_class_instances(cls: Type) -> Type:
119
121
  """
120
122
 
121
123
  required_events_methods = ["_event_kind", "_event_method_called_resources"]
122
- for method in required_events_methods:
123
- if not hasattr(cls, method):
124
+ for method_name in required_events_methods:
125
+ if not hasattr(cls, method_name):
124
126
  raise RuntimeError(
125
- f"Unable to instrument class {cls}. Class must define {method!r}."
127
+ f"Unable to instrument class {cls}. Class must define {method_name!r}."
126
128
  )
127
129
 
128
- decorator = instrument_instance_method_call()
129
-
130
- for name, method in instrumentable_methods(
130
+ for method_name, method in instrumentable_methods(
131
131
  cls,
132
132
  exclude_methods=getattr(cls, "_events_excluded_methods", []),
133
133
  ):
134
- setattr(cls, name, decorator(method))
134
+ setattr(cls, method_name, instrument_instance_method_call(method))
135
135
  return cls
prefect/events/related.py CHANGED
@@ -57,6 +57,7 @@ async def related_resources_from_run_context(
57
57
  exclude: Optional[Set[str]] = None,
58
58
  ) -> List[RelatedResource]:
59
59
  from prefect.client.orchestration import get_client
60
+ from prefect.client.schemas.objects import FlowRun
60
61
  from prefect.context import FlowRunContext, TaskRunContext
61
62
 
62
63
  if exclude is None:
@@ -111,7 +112,7 @@ async def related_resources_from_run_context(
111
112
 
112
113
  flow_run = related_objects[0]["object"]
113
114
 
114
- if flow_run:
115
+ if isinstance(flow_run, FlowRun):
115
116
  related_objects += list(
116
117
  await asyncio.gather(
117
118
  _get_and_cache_related_object(