prefect-client 3.2.15.dev9__py3-none-any.whl → 3.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. prefect/_build_info.py +3 -3
  2. prefect/_internal/compatibility/deprecated.py +28 -23
  3. prefect/_internal/pydantic/v2_schema.py +0 -14
  4. prefect/_internal/schemas/bases.py +6 -3
  5. prefect/_internal/schemas/validators.py +9 -2
  6. prefect/blocks/system.py +7 -6
  7. prefect/client/cloud.py +0 -1
  8. prefect/client/orchestration/__init__.py +0 -1
  9. prefect/client/orchestration/_concurrency_limits/client.py +0 -4
  10. prefect/client/schemas/objects.py +54 -25
  11. prefect/client/schemas/schedules.py +6 -5
  12. prefect/concurrency/_asyncio.py +1 -12
  13. prefect/concurrency/asyncio.py +0 -4
  14. prefect/concurrency/services.py +1 -3
  15. prefect/concurrency/sync.py +1 -6
  16. prefect/context.py +4 -1
  17. prefect/events/clients.py +3 -3
  18. prefect/events/filters.py +7 -2
  19. prefect/events/related.py +5 -3
  20. prefect/events/schemas/events.py +4 -4
  21. prefect/events/utilities.py +43 -32
  22. prefect/exceptions.py +1 -1
  23. prefect/flow_engine.py +2 -11
  24. prefect/futures.py +3 -12
  25. prefect/locking/filesystem.py +3 -2
  26. prefect/logging/formatters.py +1 -1
  27. prefect/logging/handlers.py +2 -2
  28. prefect/main.py +5 -5
  29. prefect/results.py +2 -1
  30. prefect/runner/runner.py +5 -3
  31. prefect/runner/server.py +2 -2
  32. prefect/runtime/flow_run.py +11 -6
  33. prefect/server/api/concurrency_limits_v2.py +12 -8
  34. prefect/server/api/deployments.py +4 -2
  35. prefect/server/api/server.py +2 -2
  36. prefect/server/api/ui/flows.py +7 -2
  37. prefect/server/api/ui/task_runs.py +3 -3
  38. prefect/states.py +10 -35
  39. prefect/task_engine.py +16 -9
  40. prefect/task_worker.py +6 -3
  41. prefect/tasks.py +5 -0
  42. prefect/telemetry/bootstrap.py +3 -1
  43. prefect/telemetry/instrumentation.py +13 -4
  44. prefect/telemetry/logging.py +3 -1
  45. prefect/types/_datetime.py +190 -77
  46. prefect/utilities/collections.py +6 -12
  47. prefect/utilities/dockerutils.py +14 -5
  48. prefect/utilities/engine.py +3 -8
  49. prefect/workers/base.py +15 -10
  50. prefect/workers/server.py +0 -1
  51. {prefect_client-3.2.15.dev9.dist-info → prefect_client-3.3.0.dist-info}/METADATA +7 -4
  52. {prefect_client-3.2.15.dev9.dist-info → prefect_client-3.3.0.dist-info}/RECORD +54 -56
  53. prefect/_internal/pydantic/annotations/__init__.py +0 -0
  54. prefect/_internal/pydantic/annotations/pendulum.py +0 -78
  55. {prefect_client-3.2.15.dev9.dist-info → prefect_client-3.3.0.dist-info}/WHEEL +0 -0
  56. {prefect_client-3.2.15.dev9.dist-info → prefect_client-3.3.0.dist-info}/licenses/LICENSE +0 -0
prefect/context.py CHANGED
@@ -20,6 +20,7 @@ from typing_extensions import Self
20
20
  import prefect.logging
21
21
  import prefect.logging.configuration
22
22
  import prefect.settings
23
+ import prefect.types._datetime
23
24
  from prefect._internal.compatibility.migration import getattr_migration
24
25
  from prefect.client.orchestration import PrefectClient, SyncPrefectClient, get_client
25
26
  from prefect.client.schemas import FlowRun, TaskRun
@@ -315,7 +316,9 @@ class RunContext(ContextModel):
315
316
 
316
317
  start_client_metrics_server()
317
318
 
318
- start_time: DateTime = Field(default_factory=lambda: DateTime.now("UTC"))
319
+ start_time: DateTime = Field(
320
+ default_factory=lambda: prefect.types._datetime.now("UTC")
321
+ )
319
322
  input_keyset: Optional[dict[str, dict[str, str]]] = None
320
323
  client: Union[PrefectClient, SyncPrefectClient]
321
324
 
prefect/events/clients.py CHANGED
@@ -35,6 +35,7 @@ from websockets.exceptions import (
35
35
  ConnectionClosedOK,
36
36
  )
37
37
 
38
+ import prefect.types._datetime
38
39
  from prefect.events import Event
39
40
  from prefect.logging import get_logger
40
41
  from prefect.settings import (
@@ -46,7 +47,6 @@ from prefect.settings import (
46
47
  PREFECT_DEBUG_MODE,
47
48
  PREFECT_SERVER_ALLOW_EPHEMERAL_MODE,
48
49
  )
49
- from prefect.types._datetime import add_years, now
50
50
 
51
51
  if TYPE_CHECKING:
52
52
  from prefect.events.filters import EventFilter
@@ -665,8 +665,8 @@ class PrefectEventSubscriber:
665
665
  from prefect.events.filters import EventOccurredFilter
666
666
 
667
667
  self._filter.occurred = EventOccurredFilter(
668
- since=now("UTC") - timedelta(minutes=1),
669
- until=add_years(now("UTC"), 1),
668
+ since=prefect.types._datetime.now("UTC") - timedelta(minutes=1),
669
+ until=prefect.types._datetime.now("UTC") + timedelta(days=365),
670
670
  )
671
671
 
672
672
  logger.debug(" filtering events since %s...", self._filter.occurred.since)
prefect/events/filters.py CHANGED
@@ -1,8 +1,10 @@
1
+ import datetime
1
2
  from typing import Optional
2
3
  from uuid import UUID
3
4
 
4
5
  from pydantic import Field
5
6
 
7
+ import prefect.types._datetime
6
8
  from prefect._internal.schemas.bases import PrefectBaseModel
7
9
  from prefect.types import DateTime
8
10
  from prefect.utilities.collections import AutoEnum
@@ -59,11 +61,14 @@ class EventDataFilter(PrefectBaseModel, extra="forbid"): # type: ignore[call-ar
59
61
 
60
62
  class EventOccurredFilter(EventDataFilter):
61
63
  since: DateTime = Field(
62
- default_factory=lambda: DateTime.now("UTC").start_of("day").subtract(days=180),
64
+ default_factory=lambda: prefect.types._datetime.start_of_day(
65
+ prefect.types._datetime.now("UTC")
66
+ )
67
+ - datetime.timedelta(days=180),
63
68
  description="Only include events after this time (inclusive)",
64
69
  )
65
70
  until: DateTime = Field(
66
- default_factory=lambda: DateTime.now("UTC"),
71
+ default_factory=lambda: prefect.types._datetime.now("UTC"),
67
72
  description="Only include events prior to this time (inclusive)",
68
73
  )
69
74
 
prefect/events/related.py CHANGED
@@ -14,7 +14,7 @@ from typing import (
14
14
  )
15
15
  from uuid import UUID
16
16
 
17
- from prefect.types._datetime import DateTime, now
17
+ import prefect.types._datetime
18
18
 
19
19
  from .schemas.events import RelatedResource
20
20
 
@@ -23,7 +23,9 @@ if TYPE_CHECKING:
23
23
  from prefect.client.orchestration import PrefectClient
24
24
 
25
25
  ResourceCacheEntry = Dict[str, Union[str, "ObjectBaseModel", None]]
26
- RelatedResourceCache = Dict[str, Tuple[ResourceCacheEntry, DateTime]]
26
+ RelatedResourceCache = Dict[
27
+ str, Tuple[ResourceCacheEntry, prefect.types._datetime.DateTime]
28
+ ]
27
29
 
28
30
  MAX_CACHE_SIZE = 100
29
31
  RESOURCE_CACHE: RelatedResourceCache = {}
@@ -205,7 +207,7 @@ async def _get_and_cache_related_object(
205
207
  "object": obj_,
206
208
  }
207
209
 
208
- cache[cache_key] = (entry, now("UTC"))
210
+ cache[cache_key] = (entry, prefect.types._datetime.now("UTC"))
209
211
 
210
212
  # In the case of a worker or agent this cache could be long-lived. To keep
211
213
  # from running out of memory only keep `MAX_CACHE_SIZE` entries in the
@@ -24,12 +24,12 @@ from pydantic import (
24
24
  )
25
25
  from typing_extensions import Annotated, Self
26
26
 
27
+ import prefect.types._datetime
27
28
  from prefect._internal.schemas.bases import PrefectBaseModel
28
29
  from prefect.logging import get_logger
29
30
  from prefect.settings import (
30
31
  PREFECT_EVENTS_MAXIMUM_LABELS_PER_RESOURCE,
31
32
  )
32
- from prefect.types import DateTime
33
33
 
34
34
  from .labelling import Labelled
35
35
 
@@ -115,8 +115,8 @@ class Event(PrefectBaseModel):
115
115
 
116
116
  model_config: ClassVar[ConfigDict] = ConfigDict(extra="ignore")
117
117
 
118
- occurred: DateTime = Field(
119
- default_factory=lambda: DateTime.now("UTC"),
118
+ occurred: prefect.types._datetime.DateTime = Field(
119
+ default_factory=lambda: prefect.types._datetime.now("UTC"),
120
120
  description="When the event happened from the sender's perspective",
121
121
  )
122
122
  event: str = Field(description="The name of the event that happened")
@@ -184,7 +184,7 @@ class ReceivedEvent(Event):
184
184
 
185
185
  model_config: ClassVar[ConfigDict] = ConfigDict(from_attributes=True)
186
186
 
187
- received: DateTime = Field(
187
+ received: prefect.types._datetime.DateTime = Field(
188
188
  ...,
189
189
  description="When the event was received by Prefect Cloud",
190
190
  )
@@ -1,10 +1,12 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import datetime
3
4
  from datetime import timedelta
4
- from typing import Any
5
+ from typing import TYPE_CHECKING, Any
5
6
  from uuid import UUID
6
7
 
7
- from prefect.types import DateTime
8
+ import prefect.types._datetime
9
+ from prefect.logging.loggers import get_logger
8
10
 
9
11
  from .clients import (
10
12
  AssertingEventsClient,
@@ -15,13 +17,18 @@ from .clients import (
15
17
  from .schemas.events import Event, RelatedResource
16
18
  from .worker import EventsWorker, should_emit_events
17
19
 
20
+ if TYPE_CHECKING:
21
+ import logging
22
+
18
23
  TIGHT_TIMING = timedelta(minutes=5)
19
24
 
25
+ logger: "logging.Logger" = get_logger(__name__)
26
+
20
27
 
21
28
  def emit_event(
22
29
  event: str,
23
30
  resource: dict[str, str],
24
- occurred: DateTime | None = None,
31
+ occurred: datetime.datetime | None = None,
25
32
  related: list[dict[str, str]] | list[RelatedResource] | None = None,
26
33
  payload: dict[str, Any] | None = None,
27
34
  id: UUID | None = None,
@@ -51,41 +58,45 @@ def emit_event(
51
58
  if not should_emit_events():
52
59
  return None
53
60
 
54
- operational_clients = [
55
- AssertingPassthroughEventsClient,
56
- AssertingEventsClient,
57
- PrefectCloudEventsClient,
58
- PrefectEventsClient,
59
- ]
60
- worker_instance = EventsWorker.instance()
61
+ try:
62
+ operational_clients = [
63
+ AssertingPassthroughEventsClient,
64
+ AssertingEventsClient,
65
+ PrefectCloudEventsClient,
66
+ PrefectEventsClient,
67
+ ]
68
+ worker_instance = EventsWorker.instance()
61
69
 
62
- if worker_instance.client_type not in operational_clients:
63
- return None
70
+ if worker_instance.client_type not in operational_clients:
71
+ return None
64
72
 
65
- event_kwargs: dict[str, Any] = {
66
- "event": event,
67
- "resource": resource,
68
- **kwargs,
69
- }
73
+ event_kwargs: dict[str, Any] = {
74
+ "event": event,
75
+ "resource": resource,
76
+ **kwargs,
77
+ }
70
78
 
71
- if occurred is None:
72
- occurred = DateTime.now("UTC")
73
- event_kwargs["occurred"] = occurred
79
+ if occurred is None:
80
+ occurred = prefect.types._datetime.now("UTC")
81
+ event_kwargs["occurred"] = occurred
74
82
 
75
- if related is not None:
76
- event_kwargs["related"] = related
83
+ if related is not None:
84
+ event_kwargs["related"] = related
77
85
 
78
- if payload is not None:
79
- event_kwargs["payload"] = payload
86
+ if payload is not None:
87
+ event_kwargs["payload"] = payload
80
88
 
81
- if id is not None:
82
- event_kwargs["id"] = id
89
+ if id is not None:
90
+ event_kwargs["id"] = id
83
91
 
84
- if follows is not None:
85
- if -TIGHT_TIMING < (occurred - follows.occurred) < TIGHT_TIMING:
86
- event_kwargs["follows"] = follows.id
92
+ if follows is not None:
93
+ if -TIGHT_TIMING < (occurred - follows.occurred) < TIGHT_TIMING:
94
+ event_kwargs["follows"] = follows.id
87
95
 
88
- event_obj = Event(**event_kwargs)
89
- worker_instance.send(event_obj)
96
+ event_obj = Event(**event_kwargs)
97
+ worker_instance.send(event_obj)
90
98
 
91
- return event_obj
99
+ return event_obj
100
+ except Exception:
101
+ logger.exception(f"Error emitting event: {event}")
102
+ return None
prefect/exceptions.py CHANGED
@@ -8,7 +8,7 @@ from collections.abc import Iterable
8
8
  from types import ModuleType, TracebackType
9
9
  from typing import TYPE_CHECKING, Any, Callable, Optional
10
10
 
11
- from httpx._exceptions import HTTPStatusError
11
+ from httpx import HTTPStatusError
12
12
  from pydantic import ValidationError
13
13
  from typing_extensions import Self
14
14
 
prefect/flow_engine.py CHANGED
@@ -355,11 +355,7 @@ class FlowRunEngine(BaseFlowRunEngine[P, R]):
355
355
  # the State was Prefect-created.
356
356
  # TODO: Remove the need to get the result from a State except in cases where the return value
357
357
  # is a State object.
358
- _result = self.state.result(raise_on_failure=raise_on_failure, fetch=True) # type: ignore
359
- # state.result is a `sync_compatible` function that may or may not return an awaitable
360
- # depending on whether the parent frame is sync or not
361
- if asyncio.iscoroutine(_result):
362
- _result = run_coro_as_sync(_result)
358
+ _result = self.state.result(raise_on_failure=raise_on_failure, _sync=True) # type: ignore
363
359
  return _result
364
360
 
365
361
  def handle_success(self, result: R) -> R:
@@ -924,12 +920,7 @@ class AsyncFlowRunEngine(BaseFlowRunEngine[P, R]):
924
920
  # the State was Prefect-created.
925
921
  # TODO: Remove the need to get the result from a State except in cases where the return value
926
922
  # is a State object.
927
- _result = self.state.result(raise_on_failure=raise_on_failure, fetch=True) # type: ignore
928
- # state.result is a `sync_compatible` function that may or may not return an awaitable
929
- # depending on whether the parent frame is sync or not
930
- if asyncio.iscoroutine(_result):
931
- _result = await _result
932
- return _result
923
+ return await self.state.aresult(raise_on_failure=raise_on_failure) # type: ignore
933
924
 
934
925
  async def handle_success(self, result: R) -> R:
935
926
  result_store = getattr(FlowRunContext.get(), "result_store", None)
prefect/futures.py CHANGED
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import abc
4
- import asyncio
5
4
  import concurrent.futures
6
5
  import threading
7
6
  import uuid
@@ -221,12 +220,8 @@ class PrefectConcurrentFuture(PrefectWrappedFuture[R, concurrent.futures.Future[
221
220
  return future_result
222
221
 
223
222
  _result = self._final_state.result(
224
- raise_on_failure=raise_on_failure, fetch=True
223
+ raise_on_failure=raise_on_failure, _sync=True
225
224
  )
226
- # state.result is a `sync_compatible` function that may or may not return an awaitable
227
- # depending on whether the parent frame is sync or not
228
- if asyncio.iscoroutine(_result):
229
- _result = run_coro_as_sync(_result)
230
225
  return _result
231
226
 
232
227
  def __del__(self) -> None:
@@ -316,9 +311,7 @@ class PrefectDistributedFuture(PrefectTaskRunFuture[R]):
316
311
  f"Task run {self.task_run_id} did not complete within {timeout} seconds"
317
312
  )
318
313
 
319
- return await self._final_state.result(
320
- raise_on_failure=raise_on_failure, fetch=True
321
- )
314
+ return await self._final_state.aresult(raise_on_failure=raise_on_failure)
322
315
 
323
316
  def add_done_callback(self, fn: Callable[[PrefectFuture[R]], None]) -> None:
324
317
  if self._final_state:
@@ -433,9 +426,7 @@ class PrefectFlowRunFuture(PrefectFuture[R]):
433
426
  f"Task run {self.task_run_id} did not complete within {timeout} seconds"
434
427
  )
435
428
 
436
- return await self._final_state.result(
437
- raise_on_failure=raise_on_failure, fetch=True
438
- )
429
+ return await self._final_state.aresult(raise_on_failure=raise_on_failure)
439
430
 
440
431
  def add_done_callback(self, fn: Callable[[PrefectFuture[R]], None]) -> None:
441
432
  if self._final_state:
@@ -1,3 +1,4 @@
1
+ import datetime
1
2
  import time
2
3
  from datetime import timedelta
3
4
  from logging import Logger
@@ -9,7 +10,7 @@ import pydantic_core
9
10
  from typing_extensions import TypedDict
10
11
 
11
12
  from prefect.logging.loggers import get_logger
12
- from prefect.types._datetime import DateTime, now, parse_datetime
13
+ from prefect.types._datetime import now, parse_datetime
13
14
 
14
15
  from .protocol import LockManager
15
16
 
@@ -27,7 +28,7 @@ class _LockInfo(TypedDict):
27
28
  """
28
29
 
29
30
  holder: str
30
- expiration: Optional[DateTime]
31
+ expiration: Optional[datetime.datetime]
31
32
  path: Path
32
33
 
33
34
 
@@ -1,6 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- import logging.handlers
3
+ import logging
4
4
  import sys
5
5
  import traceback
6
6
  from types import TracebackType
@@ -236,7 +236,7 @@ class APILogHandler(logging.Handler):
236
236
  worker_id=worker_id,
237
237
  name=record.name,
238
238
  level=record.levelno,
239
- timestamp=from_timestamp(getattr(record, "created", None) or time.time()),
239
+ timestamp=from_timestamp(getattr(record, "created", None) or time.time()), # pyright: ignore[reportArgumentType] DateTime is split into two types depending on Python version
240
240
  message=self.format(record),
241
241
  ).model_dump(mode="json")
242
242
 
@@ -277,7 +277,7 @@ class WorkerAPILogHandler(APILogHandler):
277
277
  worker_id=worker_id,
278
278
  name=record.name,
279
279
  level=record.levelno,
280
- timestamp=from_timestamp(getattr(record, "created", None) or time.time()),
280
+ timestamp=from_timestamp(getattr(record, "created", None) or time.time()), # pyright: ignore[reportArgumentType] DateTime is split into two types depending on Python version
281
281
  message=self.format(record),
282
282
  ).model_dump(mode="json")
283
283
 
prefect/main.py CHANGED
@@ -11,13 +11,13 @@ from prefect._result_records import ResultRecordMetadata
11
11
  from prefect.flow_runs import pause_flow_run, resume_flow_run, suspend_flow_run
12
12
  from prefect.client.orchestration import get_client
13
13
  from prefect.client.cloud import get_cloud_client
14
- import prefect.variables
15
- import prefect.runtime
14
+ import prefect.variables # pyright: ignore[reportUnusedImport] # TODO: Does this need to be imported here?
15
+ import prefect.runtime # pyright: ignore[reportUnusedImport] # TODO: Does this need to be imported here?
16
16
 
17
17
  # Import modules that register types
18
- import prefect.serializers
19
- import prefect.blocks.notifications
20
- import prefect.blocks.system
18
+ import prefect.serializers # pyright: ignore[reportUnusedImport]
19
+ import prefect.blocks.notifications # pyright: ignore[reportUnusedImport]
20
+ import prefect.blocks.system # pyright: ignore[reportUnusedImport]
21
21
 
22
22
  # Initialize the process-wide profile and registry at import time
23
23
  import prefect.context
prefect/results.py CHANGED
@@ -31,6 +31,7 @@ from pydantic import (
31
31
  from typing_extensions import ParamSpec, Self
32
32
 
33
33
  import prefect
34
+ import prefect.types._datetime
34
35
  from prefect._internal.compatibility.async_dispatch import async_dispatch
35
36
  from prefect._result_records import R, ResultRecord, ResultRecordMetadata
36
37
  from prefect.blocks.core import Block
@@ -519,7 +520,7 @@ class ResultStore(BaseModel):
519
520
  if metadata.expiration:
520
521
  # if the result has an expiration,
521
522
  # check if it is still in the future
522
- exists = metadata.expiration > DateTime.now("utc")
523
+ exists = metadata.expiration > prefect.types._datetime.now("UTC")
523
524
  else:
524
525
  exists = True
525
526
  return exists
prefect/runner/runner.py CHANGED
@@ -113,7 +113,7 @@ from prefect.states import (
113
113
  Pending,
114
114
  exception_to_failed_state,
115
115
  )
116
- from prefect.types._datetime import DateTime
116
+ from prefect.types._datetime import now
117
117
  from prefect.types.entrypoint import EntrypointType
118
118
  from prefect.utilities.annotations import NotSet
119
119
  from prefect.utilities.asyncutils import (
@@ -992,7 +992,7 @@ class Runner:
992
992
  if self.stopping:
993
993
  return
994
994
  runs_response = await self._get_scheduled_flow_runs()
995
- self.last_polled: DateTime = DateTime.now("UTC")
995
+ self.last_polled: datetime.datetime = now("UTC")
996
996
  return await self._submit_scheduled_flow_runs(flow_run_response=runs_response)
997
997
 
998
998
  async def _check_for_cancelled_flow_runs(
@@ -1258,7 +1258,9 @@ class Runner:
1258
1258
  """
1259
1259
  Retrieve scheduled flow runs for this runner.
1260
1260
  """
1261
- scheduled_before = DateTime.now("utc").add(seconds=int(self._prefetch_seconds))
1261
+ scheduled_before = now("UTC") + datetime.timedelta(
1262
+ seconds=int(self._prefetch_seconds)
1263
+ )
1262
1264
  self._logger.debug(
1263
1265
  f"Querying for flow runs scheduled before {scheduled_before}"
1264
1266
  )
prefect/runner/server.py CHANGED
@@ -23,7 +23,7 @@ from prefect.settings import (
23
23
  PREFECT_RUNNER_SERVER_MISSED_POLLS_TOLERANCE,
24
24
  PREFECT_RUNNER_SERVER_PORT,
25
25
  )
26
- from prefect.types._datetime import DateTime
26
+ from prefect.types._datetime import now as now_fn
27
27
  from prefect.utilities.asyncutils import run_coro_as_sync
28
28
  from prefect.utilities.importtools import load_script_as_module
29
29
 
@@ -56,7 +56,7 @@ def perform_health_check(
56
56
  )
57
57
 
58
58
  def _health_check():
59
- now = DateTime.now("utc")
59
+ now = now_fn("UTC")
60
60
  poll_delay = (now - runner.last_polled).total_seconds()
61
61
 
62
62
  if TYPE_CHECKING:
@@ -25,12 +25,13 @@ from __future__ import annotations
25
25
  import os
26
26
  from datetime import datetime
27
27
  from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional
28
+ from zoneinfo import ZoneInfo
28
29
 
29
30
  from prefect._internal.concurrency.api import create_call, from_sync
30
31
  from prefect.client.orchestration import get_client
31
32
  from prefect.context import FlowRunContext, TaskRunContext
32
33
  from prefect.settings import PREFECT_API_URL, PREFECT_UI_URL
33
- from prefect.types._datetime import DateTime, Timezone, now, parse_datetime
34
+ from prefect.types._datetime import DateTime, now, parse_datetime
34
35
 
35
36
  if TYPE_CHECKING:
36
37
  from prefect.client.schemas.objects import Flow, FlowRun, TaskRun
@@ -53,10 +54,14 @@ __all__ = [
53
54
  ]
54
55
 
55
56
 
56
- def _parse_datetime_UTC(dt: str) -> DateTime:
57
- pendulum_dt = parse_datetime(dt, tz=Timezone("UTC"), strict=False)
58
- assert isinstance(pendulum_dt, datetime)
59
- return DateTime.instance(pendulum_dt)
57
+ def _parse_datetime_UTC(dt: str) -> datetime:
58
+ parsed_dt = parse_datetime(dt)
59
+ if parsed_dt.tzinfo is None:
60
+ # if the datetime is naive, assume it is UTCff
61
+ return parsed_dt.replace(tzinfo=ZoneInfo("UTC"))
62
+ else:
63
+ # if the datetime is timezone-aware, convert to UTC
64
+ return parsed_dt.astimezone(ZoneInfo("UTC"))
60
65
 
61
66
 
62
67
  type_cast: dict[
@@ -67,7 +72,7 @@ type_cast: dict[
67
72
  int: int,
68
73
  float: float,
69
74
  str: str,
70
- DateTime: _parse_datetime_UTC,
75
+ datetime: _parse_datetime_UTC,
71
76
  # for optional defined attributes, when real value is NoneType, use str
72
77
  type(None): str,
73
78
  }
@@ -156,15 +156,18 @@ async def bulk_increment_active_slots(
156
156
  slots: int = Body(..., gt=0),
157
157
  names: List[str] = Body(..., min_items=1),
158
158
  mode: Literal["concurrency", "rate_limit"] = Body("concurrency"),
159
- create_if_missing: Optional[bool] = Body(None),
159
+ create_if_missing: Optional[bool] = Body(
160
+ None,
161
+ deprecated="Limits must be explicitly created before acquiring concurrency slots.",
162
+ ),
160
163
  db: PrefectDBInterface = Depends(provide_database_interface),
161
164
  ) -> List[MinimalConcurrencyLimitResponse]:
162
165
  async with db.session_context(begin_transaction=True) as session:
163
166
  limits = [
164
167
  schemas.core.ConcurrencyLimitV2.model_validate(limit)
165
168
  for limit in (
166
- await models.concurrency_limits_v2.bulk_read_or_create_concurrency_limits(
167
- session=session, names=names, create_if_missing=create_if_missing
169
+ await models.concurrency_limits_v2.bulk_read_concurrency_limits(
170
+ session=session, names=names
168
171
  )
169
172
  )
170
173
  ]
@@ -246,14 +249,15 @@ async def bulk_decrement_active_slots(
246
249
  slots: int = Body(..., gt=0),
247
250
  names: List[str] = Body(..., min_items=1),
248
251
  occupancy_seconds: Optional[float] = Body(None, gt=0.0),
249
- create_if_missing: bool = Body(True),
252
+ create_if_missing: bool = Body(
253
+ None,
254
+ deprecated="Limits must be explicitly created before decrementing active slots.",
255
+ ),
250
256
  db: PrefectDBInterface = Depends(provide_database_interface),
251
257
  ) -> List[MinimalConcurrencyLimitResponse]:
252
258
  async with db.session_context(begin_transaction=True) as session:
253
- limits = (
254
- await models.concurrency_limits_v2.bulk_read_or_create_concurrency_limits(
255
- session=session, names=names, create_if_missing=create_if_missing
256
- )
259
+ limits = await models.concurrency_limits_v2.bulk_read_concurrency_limits(
260
+ session=session, names=names
257
261
  )
258
262
 
259
263
  if not limits:
@@ -568,8 +568,10 @@ async def delete_deployment(
568
568
  @router.post("/{id}/schedule")
569
569
  async def schedule_deployment(
570
570
  deployment_id: UUID = Path(..., description="The deployment id", alias="id"),
571
- start_time: DateTime = Body(None, description="The earliest date to schedule"),
572
- end_time: DateTime = Body(None, description="The latest date to schedule"),
571
+ start_time: datetime.datetime = Body(
572
+ None, description="The earliest date to schedule"
573
+ ),
574
+ end_time: datetime.datetime = Body(None, description="The latest date to schedule"),
573
575
  # Workaround for the fact that FastAPI does not let us configure ser_json_timedelta
574
576
  # to represent timedeltas as floats in JSON.
575
577
  min_time: float = Body(
@@ -253,11 +253,11 @@ def copy_directory(directory: str, path: str) -> None:
253
253
  # ensure copied files are writeable
254
254
  for root, dirs, files in os.walk(destination):
255
255
  for f in files:
256
- os.chmod(os.path.join(root, f), 0o600)
256
+ os.chmod(os.path.join(root, f), 0o700)
257
257
  else:
258
258
  shutil.copy2(source, destination)
259
259
  # Ensure copied file is writeable
260
- os.chmod(destination, 0o600)
260
+ os.chmod(destination, 0o700)
261
261
 
262
262
 
263
263
  async def custom_internal_exception_handler(
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  from datetime import datetime
4
4
  from typing import TYPE_CHECKING, Dict, List, Optional
5
5
  from uuid import UUID
6
+ from zoneinfo import ZoneInfo
6
7
 
7
8
  import sqlalchemy as sa
8
9
  from fastapi import Body, Depends
@@ -15,7 +16,7 @@ from prefect.server.utilities.database import UUID as UUIDTypeDecorator
15
16
  from prefect.server.utilities.schemas import PrefectBaseModel
16
17
  from prefect.server.utilities.server import PrefectRouter
17
18
  from prefect.types import DateTime
18
- from prefect.types._datetime import create_datetime_instance
19
+ from prefect.types._datetime import create_datetime_instance, parse_datetime
19
20
 
20
21
  if TYPE_CHECKING:
21
22
  import logging
@@ -162,7 +163,11 @@ async def next_runs_by_flow(
162
163
  name=result.name,
163
164
  state_name=result.state_name,
164
165
  state_type=result.state_type,
165
- next_scheduled_start_time=result.next_scheduled_start_time,
166
+ next_scheduled_start_time=parse_datetime(
167
+ result.next_scheduled_start_time
168
+ ).replace(tzinfo=ZoneInfo("UTC"))
169
+ if isinstance(result.next_scheduled_start_time, str)
170
+ else result.next_scheduled_start_time,
166
171
  )
167
172
  for result in results.all()
168
173
  }
@@ -62,14 +62,14 @@ async def read_dashboard_task_run_counts(
62
62
  )
63
63
 
64
64
  bucket_count = 20
65
- start_time = task_runs.start_time.after_.start_of("minute")
65
+ start_time = task_runs.start_time.after_.replace(microsecond=0, second=0)
66
66
  end_time = (
67
67
  end_of_period(task_runs.start_time.before_, "minute")
68
68
  if task_runs.start_time.before_
69
69
  else end_of_period(now("UTC"), "minute")
70
70
  )
71
71
  window = end_time - start_time
72
- delta = window.as_timedelta() / bucket_count
72
+ delta = window / bucket_count
73
73
 
74
74
  async with db.session_context(begin_transaction=False) as session:
75
75
  # Gather the raw counts. The counts are divided into buckets of time
@@ -85,7 +85,7 @@ async def read_dashboard_task_run_counts(
85
85
  start_time.minute,
86
86
  start_time.second,
87
87
  start_time.microsecond,
88
- start_time.timezone,
88
+ start_time.tzinfo,
89
89
  )
90
90
  bucket_expression = sa.func.floor(
91
91
  sa.func.date_diff_seconds(db.TaskRun.start_time, start_datetime)