prefect-client 2.19.3__py3-none-any.whl → 3.0.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (239) hide show
  1. prefect/__init__.py +8 -56
  2. prefect/_internal/compatibility/deprecated.py +6 -115
  3. prefect/_internal/compatibility/experimental.py +4 -79
  4. prefect/_internal/concurrency/api.py +0 -34
  5. prefect/_internal/concurrency/calls.py +0 -6
  6. prefect/_internal/concurrency/cancellation.py +0 -3
  7. prefect/_internal/concurrency/event_loop.py +0 -20
  8. prefect/_internal/concurrency/inspection.py +3 -3
  9. prefect/_internal/concurrency/threads.py +35 -0
  10. prefect/_internal/concurrency/waiters.py +0 -28
  11. prefect/_internal/pydantic/__init__.py +0 -45
  12. prefect/_internal/pydantic/v1_schema.py +21 -22
  13. prefect/_internal/pydantic/v2_schema.py +0 -2
  14. prefect/_internal/pydantic/v2_validated_func.py +18 -23
  15. prefect/_internal/schemas/bases.py +44 -177
  16. prefect/_internal/schemas/fields.py +1 -43
  17. prefect/_internal/schemas/validators.py +60 -158
  18. prefect/artifacts.py +161 -14
  19. prefect/automations.py +39 -4
  20. prefect/blocks/abstract.py +1 -1
  21. prefect/blocks/core.py +268 -148
  22. prefect/blocks/fields.py +2 -57
  23. prefect/blocks/kubernetes.py +8 -12
  24. prefect/blocks/notifications.py +40 -20
  25. prefect/blocks/system.py +22 -11
  26. prefect/blocks/webhook.py +2 -9
  27. prefect/client/base.py +4 -4
  28. prefect/client/cloud.py +8 -13
  29. prefect/client/orchestration.py +347 -341
  30. prefect/client/schemas/actions.py +92 -86
  31. prefect/client/schemas/filters.py +20 -40
  32. prefect/client/schemas/objects.py +147 -145
  33. prefect/client/schemas/responses.py +16 -24
  34. prefect/client/schemas/schedules.py +47 -35
  35. prefect/client/subscriptions.py +2 -2
  36. prefect/client/utilities.py +5 -2
  37. prefect/concurrency/asyncio.py +3 -1
  38. prefect/concurrency/events.py +1 -1
  39. prefect/concurrency/services.py +6 -3
  40. prefect/context.py +195 -27
  41. prefect/deployments/__init__.py +5 -6
  42. prefect/deployments/base.py +7 -5
  43. prefect/deployments/flow_runs.py +185 -0
  44. prefect/deployments/runner.py +50 -45
  45. prefect/deployments/schedules.py +28 -23
  46. prefect/deployments/steps/__init__.py +0 -1
  47. prefect/deployments/steps/core.py +1 -0
  48. prefect/deployments/steps/pull.py +7 -21
  49. prefect/engine.py +12 -2422
  50. prefect/events/actions.py +17 -23
  51. prefect/events/cli/automations.py +19 -6
  52. prefect/events/clients.py +14 -37
  53. prefect/events/filters.py +14 -18
  54. prefect/events/related.py +2 -2
  55. prefect/events/schemas/__init__.py +0 -5
  56. prefect/events/schemas/automations.py +55 -46
  57. prefect/events/schemas/deployment_triggers.py +7 -197
  58. prefect/events/schemas/events.py +34 -65
  59. prefect/events/schemas/labelling.py +10 -14
  60. prefect/events/utilities.py +2 -3
  61. prefect/events/worker.py +2 -3
  62. prefect/filesystems.py +6 -517
  63. prefect/{new_flow_engine.py → flow_engine.py} +313 -72
  64. prefect/flow_runs.py +377 -5
  65. prefect/flows.py +248 -165
  66. prefect/futures.py +186 -345
  67. prefect/infrastructure/__init__.py +0 -27
  68. prefect/infrastructure/provisioners/__init__.py +5 -3
  69. prefect/infrastructure/provisioners/cloud_run.py +11 -6
  70. prefect/infrastructure/provisioners/container_instance.py +11 -7
  71. prefect/infrastructure/provisioners/ecs.py +6 -4
  72. prefect/infrastructure/provisioners/modal.py +8 -5
  73. prefect/input/actions.py +2 -4
  74. prefect/input/run_input.py +5 -7
  75. prefect/logging/formatters.py +0 -2
  76. prefect/logging/handlers.py +3 -11
  77. prefect/logging/loggers.py +2 -2
  78. prefect/manifests.py +2 -1
  79. prefect/records/__init__.py +1 -0
  80. prefect/records/result_store.py +42 -0
  81. prefect/records/store.py +9 -0
  82. prefect/results.py +43 -39
  83. prefect/runner/runner.py +9 -9
  84. prefect/runner/server.py +6 -10
  85. prefect/runner/storage.py +3 -8
  86. prefect/runner/submit.py +2 -2
  87. prefect/runner/utils.py +2 -2
  88. prefect/serializers.py +24 -35
  89. prefect/server/api/collections_data/views/aggregate-worker-metadata.json +5 -14
  90. prefect/settings.py +70 -133
  91. prefect/states.py +17 -47
  92. prefect/task_engine.py +697 -58
  93. prefect/task_runners.py +269 -301
  94. prefect/task_server.py +53 -34
  95. prefect/tasks.py +327 -337
  96. prefect/transactions.py +220 -0
  97. prefect/types/__init__.py +61 -82
  98. prefect/utilities/asyncutils.py +195 -136
  99. prefect/utilities/callables.py +121 -41
  100. prefect/utilities/collections.py +23 -38
  101. prefect/utilities/dispatch.py +11 -3
  102. prefect/utilities/dockerutils.py +4 -0
  103. prefect/utilities/engine.py +140 -20
  104. prefect/utilities/importtools.py +26 -27
  105. prefect/utilities/pydantic.py +128 -38
  106. prefect/utilities/schema_tools/hydration.py +5 -1
  107. prefect/utilities/templating.py +12 -2
  108. prefect/variables.py +78 -61
  109. prefect/workers/__init__.py +0 -1
  110. prefect/workers/base.py +15 -17
  111. prefect/workers/process.py +3 -8
  112. prefect/workers/server.py +2 -2
  113. {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/METADATA +22 -21
  114. prefect_client-3.0.0rc1.dist-info/RECORD +176 -0
  115. prefect/_internal/pydantic/_base_model.py +0 -51
  116. prefect/_internal/pydantic/_compat.py +0 -82
  117. prefect/_internal/pydantic/_flags.py +0 -20
  118. prefect/_internal/pydantic/_types.py +0 -8
  119. prefect/_internal/pydantic/utilities/__init__.py +0 -0
  120. prefect/_internal/pydantic/utilities/config_dict.py +0 -72
  121. prefect/_internal/pydantic/utilities/field_validator.py +0 -150
  122. prefect/_internal/pydantic/utilities/model_construct.py +0 -56
  123. prefect/_internal/pydantic/utilities/model_copy.py +0 -55
  124. prefect/_internal/pydantic/utilities/model_dump.py +0 -136
  125. prefect/_internal/pydantic/utilities/model_dump_json.py +0 -112
  126. prefect/_internal/pydantic/utilities/model_fields.py +0 -50
  127. prefect/_internal/pydantic/utilities/model_fields_set.py +0 -29
  128. prefect/_internal/pydantic/utilities/model_json_schema.py +0 -82
  129. prefect/_internal/pydantic/utilities/model_rebuild.py +0 -80
  130. prefect/_internal/pydantic/utilities/model_validate.py +0 -75
  131. prefect/_internal/pydantic/utilities/model_validate_json.py +0 -68
  132. prefect/_internal/pydantic/utilities/model_validator.py +0 -87
  133. prefect/_internal/pydantic/utilities/type_adapter.py +0 -71
  134. prefect/_vendor/__init__.py +0 -0
  135. prefect/_vendor/fastapi/__init__.py +0 -25
  136. prefect/_vendor/fastapi/applications.py +0 -946
  137. prefect/_vendor/fastapi/background.py +0 -3
  138. prefect/_vendor/fastapi/concurrency.py +0 -44
  139. prefect/_vendor/fastapi/datastructures.py +0 -58
  140. prefect/_vendor/fastapi/dependencies/__init__.py +0 -0
  141. prefect/_vendor/fastapi/dependencies/models.py +0 -64
  142. prefect/_vendor/fastapi/dependencies/utils.py +0 -877
  143. prefect/_vendor/fastapi/encoders.py +0 -177
  144. prefect/_vendor/fastapi/exception_handlers.py +0 -40
  145. prefect/_vendor/fastapi/exceptions.py +0 -46
  146. prefect/_vendor/fastapi/logger.py +0 -3
  147. prefect/_vendor/fastapi/middleware/__init__.py +0 -1
  148. prefect/_vendor/fastapi/middleware/asyncexitstack.py +0 -25
  149. prefect/_vendor/fastapi/middleware/cors.py +0 -3
  150. prefect/_vendor/fastapi/middleware/gzip.py +0 -3
  151. prefect/_vendor/fastapi/middleware/httpsredirect.py +0 -3
  152. prefect/_vendor/fastapi/middleware/trustedhost.py +0 -3
  153. prefect/_vendor/fastapi/middleware/wsgi.py +0 -3
  154. prefect/_vendor/fastapi/openapi/__init__.py +0 -0
  155. prefect/_vendor/fastapi/openapi/constants.py +0 -2
  156. prefect/_vendor/fastapi/openapi/docs.py +0 -203
  157. prefect/_vendor/fastapi/openapi/models.py +0 -480
  158. prefect/_vendor/fastapi/openapi/utils.py +0 -485
  159. prefect/_vendor/fastapi/param_functions.py +0 -340
  160. prefect/_vendor/fastapi/params.py +0 -453
  161. prefect/_vendor/fastapi/requests.py +0 -4
  162. prefect/_vendor/fastapi/responses.py +0 -40
  163. prefect/_vendor/fastapi/routing.py +0 -1331
  164. prefect/_vendor/fastapi/security/__init__.py +0 -15
  165. prefect/_vendor/fastapi/security/api_key.py +0 -98
  166. prefect/_vendor/fastapi/security/base.py +0 -6
  167. prefect/_vendor/fastapi/security/http.py +0 -172
  168. prefect/_vendor/fastapi/security/oauth2.py +0 -227
  169. prefect/_vendor/fastapi/security/open_id_connect_url.py +0 -34
  170. prefect/_vendor/fastapi/security/utils.py +0 -10
  171. prefect/_vendor/fastapi/staticfiles.py +0 -1
  172. prefect/_vendor/fastapi/templating.py +0 -3
  173. prefect/_vendor/fastapi/testclient.py +0 -1
  174. prefect/_vendor/fastapi/types.py +0 -3
  175. prefect/_vendor/fastapi/utils.py +0 -235
  176. prefect/_vendor/fastapi/websockets.py +0 -7
  177. prefect/_vendor/starlette/__init__.py +0 -1
  178. prefect/_vendor/starlette/_compat.py +0 -28
  179. prefect/_vendor/starlette/_exception_handler.py +0 -80
  180. prefect/_vendor/starlette/_utils.py +0 -88
  181. prefect/_vendor/starlette/applications.py +0 -261
  182. prefect/_vendor/starlette/authentication.py +0 -159
  183. prefect/_vendor/starlette/background.py +0 -43
  184. prefect/_vendor/starlette/concurrency.py +0 -59
  185. prefect/_vendor/starlette/config.py +0 -151
  186. prefect/_vendor/starlette/convertors.py +0 -87
  187. prefect/_vendor/starlette/datastructures.py +0 -707
  188. prefect/_vendor/starlette/endpoints.py +0 -130
  189. prefect/_vendor/starlette/exceptions.py +0 -60
  190. prefect/_vendor/starlette/formparsers.py +0 -276
  191. prefect/_vendor/starlette/middleware/__init__.py +0 -17
  192. prefect/_vendor/starlette/middleware/authentication.py +0 -52
  193. prefect/_vendor/starlette/middleware/base.py +0 -220
  194. prefect/_vendor/starlette/middleware/cors.py +0 -176
  195. prefect/_vendor/starlette/middleware/errors.py +0 -265
  196. prefect/_vendor/starlette/middleware/exceptions.py +0 -74
  197. prefect/_vendor/starlette/middleware/gzip.py +0 -113
  198. prefect/_vendor/starlette/middleware/httpsredirect.py +0 -19
  199. prefect/_vendor/starlette/middleware/sessions.py +0 -82
  200. prefect/_vendor/starlette/middleware/trustedhost.py +0 -64
  201. prefect/_vendor/starlette/middleware/wsgi.py +0 -147
  202. prefect/_vendor/starlette/requests.py +0 -328
  203. prefect/_vendor/starlette/responses.py +0 -347
  204. prefect/_vendor/starlette/routing.py +0 -933
  205. prefect/_vendor/starlette/schemas.py +0 -154
  206. prefect/_vendor/starlette/staticfiles.py +0 -248
  207. prefect/_vendor/starlette/status.py +0 -199
  208. prefect/_vendor/starlette/templating.py +0 -231
  209. prefect/_vendor/starlette/testclient.py +0 -804
  210. prefect/_vendor/starlette/types.py +0 -30
  211. prefect/_vendor/starlette/websockets.py +0 -193
  212. prefect/agent.py +0 -698
  213. prefect/deployments/deployments.py +0 -1042
  214. prefect/deprecated/__init__.py +0 -0
  215. prefect/deprecated/data_documents.py +0 -350
  216. prefect/deprecated/packaging/__init__.py +0 -12
  217. prefect/deprecated/packaging/base.py +0 -96
  218. prefect/deprecated/packaging/docker.py +0 -146
  219. prefect/deprecated/packaging/file.py +0 -92
  220. prefect/deprecated/packaging/orion.py +0 -80
  221. prefect/deprecated/packaging/serializers.py +0 -171
  222. prefect/events/instrument.py +0 -135
  223. prefect/infrastructure/base.py +0 -323
  224. prefect/infrastructure/container.py +0 -818
  225. prefect/infrastructure/kubernetes.py +0 -920
  226. prefect/infrastructure/process.py +0 -289
  227. prefect/new_task_engine.py +0 -423
  228. prefect/pydantic/__init__.py +0 -76
  229. prefect/pydantic/main.py +0 -39
  230. prefect/software/__init__.py +0 -2
  231. prefect/software/base.py +0 -50
  232. prefect/software/conda.py +0 -199
  233. prefect/software/pip.py +0 -122
  234. prefect/software/python.py +0 -52
  235. prefect/workers/block.py +0 -218
  236. prefect_client-2.19.3.dist-info/RECORD +0 -292
  237. {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/LICENSE +0 -0
  238. {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/WHEEL +0 -0
  239. {prefect_client-2.19.3.dist-info → prefect_client-3.0.0rc1.dist-info}/top_level.txt +0 -0
@@ -8,7 +8,6 @@ from typing import (
8
8
  Dict,
9
9
  Iterable,
10
10
  List,
11
- NoReturn,
12
11
  Optional,
13
12
  Set,
14
13
  Tuple,
@@ -21,35 +20,20 @@ import certifi
21
20
  import httpcore
22
21
  import httpx
23
22
  import pendulum
24
- from typing_extensions import ParamSpec
25
-
26
- from prefect._internal.compatibility.deprecated import (
27
- handle_deprecated_infra_overrides_parameter,
28
- )
29
- from prefect._internal.pydantic import HAS_PYDANTIC_V2
30
- from prefect.client.schemas import sorting
31
- from prefect.events import filters
32
- from prefect.settings import (
33
- PREFECT_API_SERVICES_TRIGGERS_ENABLED,
34
- PREFECT_EXPERIMENTAL_EVENTS,
35
- )
36
-
37
- if HAS_PYDANTIC_V2:
38
- import pydantic.v1 as pydantic
39
- else:
40
- import pydantic
41
-
23
+ import pydantic
42
24
  from asgi_lifespan import LifespanManager
43
- from prefect._vendor.starlette import status
25
+ from starlette import status
26
+ from typing_extensions import ParamSpec
44
27
 
45
28
  import prefect
46
29
  import prefect.exceptions
47
30
  import prefect.settings
48
31
  import prefect.states
49
32
  from prefect.client.constants import SERVER_API_VERSION
50
- from prefect.client.schemas import FlowRun, OrchestrationResult, TaskRun
33
+ from prefect.client.schemas import FlowRun, OrchestrationResult, TaskRun, sorting
51
34
  from prefect.client.schemas.actions import (
52
35
  ArtifactCreate,
36
+ ArtifactUpdate,
53
37
  BlockDocumentCreate,
54
38
  BlockDocumentUpdate,
55
39
  BlockSchemaCreate,
@@ -133,7 +117,7 @@ from prefect.client.schemas.sorting import (
133
117
  LogSort,
134
118
  TaskRunSort,
135
119
  )
136
- from prefect.deprecated.data_documents import DataDocument
120
+ from prefect.events import filters
137
121
  from prefect.events.schemas.automations import Automation, AutomationCore
138
122
  from prefect.logging import get_logger
139
123
  from prefect.settings import (
@@ -171,12 +155,6 @@ class ServerType(AutoEnum):
171
155
  SERVER = AutoEnum.auto()
172
156
  CLOUD = AutoEnum.auto()
173
157
 
174
- def supports_automations(self) -> bool:
175
- if self == ServerType.CLOUD:
176
- return True
177
-
178
- return PREFECT_EXPERIMENTAL_EVENTS and PREFECT_API_SERVICES_TRIGGERS_ENABLED
179
-
180
158
 
181
159
  def get_client(
182
160
  httpx_settings: Optional[Dict[str, Any]] = None, sync_client: bool = False
@@ -198,14 +176,39 @@ def get_client(
198
176
  client.hello()
199
177
  ```
200
178
  """
201
- ctx = prefect.context.get_settings_context()
179
+ import prefect.context
180
+
181
+ settings_ctx = prefect.context.get_settings_context()
182
+
183
+ # try to load clients from a client context, if possible
184
+ # only load clients that match the provided config / loop
185
+ try:
186
+ loop = asyncio.get_running_loop()
187
+ except RuntimeError:
188
+ loop = None
189
+
190
+ if client_ctx := prefect.context.ClientContext.get():
191
+ if (
192
+ sync_client
193
+ and client_ctx.sync_client
194
+ and client_ctx._httpx_settings == httpx_settings
195
+ ):
196
+ return client_ctx.sync_client
197
+ elif (
198
+ not sync_client
199
+ and client_ctx.async_client
200
+ and client_ctx._httpx_settings == httpx_settings
201
+ and loop in (client_ctx.async_client._loop, None)
202
+ ):
203
+ return client_ctx.async_client
204
+
202
205
  api = PREFECT_API_URL.value()
203
206
 
204
207
  if not api:
205
208
  # create an ephemeral API if none was provided
206
209
  from prefect.server.api.server import create_app
207
210
 
208
- api = create_app(ctx.settings, ephemeral=True)
211
+ api = create_app(settings_ctx.settings, ephemeral=True)
209
212
 
210
213
  if sync_client:
211
214
  return SyncPrefectClient(
@@ -273,6 +276,7 @@ class PrefectClient:
273
276
  httpx_settings["headers"].setdefault("Authorization", f"Bearer {api_key}")
274
277
 
275
278
  # Context management
279
+ self._context_stack: int = 0
276
280
  self._exit_stack = AsyncExitStack()
277
281
  self._ephemeral_app: Optional[ASGIApp] = None
278
282
  self.manage_lifespan = True
@@ -453,7 +457,7 @@ class PrefectClient:
453
457
  """
454
458
  flow_data = FlowCreate(name=flow_name)
455
459
  response = await self._client.post(
456
- "/flows/", json=flow_data.dict(json_compatible=True)
460
+ "/flows/", json=flow_data.model_dump(mode="json")
457
461
  )
458
462
 
459
463
  flow_id = response.json().get("id")
@@ -474,7 +478,7 @@ class PrefectClient:
474
478
  a [Flow model][prefect.client.schemas.objects.Flow] representation of the flow
475
479
  """
476
480
  response = await self._client.get(f"/flows/{flow_id}")
477
- return Flow.parse_obj(response.json())
481
+ return Flow.model_validate(response.json())
478
482
 
479
483
  async def read_flows(
480
484
  self,
@@ -508,29 +512,23 @@ class PrefectClient:
508
512
  a list of Flow model representations of the flows
509
513
  """
510
514
  body = {
511
- "flows": flow_filter.dict(json_compatible=True) if flow_filter else None,
515
+ "flows": flow_filter.model_dump(mode="json") if flow_filter else None,
512
516
  "flow_runs": (
513
- flow_run_filter.dict(json_compatible=True, exclude_unset=True)
517
+ flow_run_filter.model_dump(mode="json", exclude_unset=True)
514
518
  if flow_run_filter
515
519
  else None
516
520
  ),
517
521
  "task_runs": (
518
- task_run_filter.dict(json_compatible=True) if task_run_filter else None
522
+ task_run_filter.model_dump(mode="json") if task_run_filter else None
519
523
  ),
520
524
  "deployments": (
521
- deployment_filter.dict(json_compatible=True)
522
- if deployment_filter
523
- else None
525
+ deployment_filter.model_dump(mode="json") if deployment_filter else None
524
526
  ),
525
527
  "work_pools": (
526
- work_pool_filter.dict(json_compatible=True)
527
- if work_pool_filter
528
- else None
528
+ work_pool_filter.model_dump(mode="json") if work_pool_filter else None
529
529
  ),
530
530
  "work_queues": (
531
- work_queue_filter.dict(json_compatible=True)
532
- if work_queue_filter
533
- else None
531
+ work_queue_filter.model_dump(mode="json") if work_queue_filter else None
534
532
  ),
535
533
  "sort": sort,
536
534
  "limit": limit,
@@ -538,7 +536,7 @@ class PrefectClient:
538
536
  }
539
537
 
540
538
  response = await self._client.post("/flows/filter", json=body)
541
- return pydantic.parse_obj_as(List[Flow], response.json())
539
+ return pydantic.TypeAdapter(List[Flow]).validate_python(response.json())
542
540
 
543
541
  async def read_flow_by_name(
544
542
  self,
@@ -554,7 +552,7 @@ class PrefectClient:
554
552
  a fully hydrated Flow model
555
553
  """
556
554
  response = await self._client.get(f"/flows/name/{flow_name}")
557
- return Flow.parse_obj(response.json())
555
+ return Flow.model_validate(response.json())
558
556
 
559
557
  async def create_flow_run_from_deployment(
560
558
  self,
@@ -622,9 +620,9 @@ class PrefectClient:
622
620
 
623
621
  response = await self._client.post(
624
622
  f"/deployments/{deployment_id}/create_flow_run",
625
- json=flow_run_create.dict(json_compatible=True, exclude_unset=True),
623
+ json=flow_run_create.model_dump(mode="json", exclude_unset=True),
626
624
  )
627
- return FlowRun.parse_obj(response.json())
625
+ return FlowRun.model_validate(response.json())
628
626
 
629
627
  async def create_flow_run(
630
628
  self,
@@ -680,9 +678,9 @@ class PrefectClient:
680
678
  ),
681
679
  )
682
680
 
683
- flow_run_create_json = flow_run_create.dict(json_compatible=True)
681
+ flow_run_create_json = flow_run_create.model_dump(mode="json")
684
682
  response = await self._client.post("/flow_runs/", json=flow_run_create_json)
685
- flow_run = FlowRun.parse_obj(response.json())
683
+ flow_run = FlowRun.model_validate(response.json())
686
684
 
687
685
  # Restore the parameters to the local objects to retain expectations about
688
686
  # Python objects
@@ -740,7 +738,7 @@ class PrefectClient:
740
738
 
741
739
  return await self._client.patch(
742
740
  f"/flow_runs/{flow_run_id}",
743
- json=flow_run_data.dict(json_compatible=True, exclude_unset=True),
741
+ json=flow_run_data.model_dump(mode="json", exclude_unset=True),
744
742
  )
745
743
 
746
744
  async def delete_flow_run(
@@ -790,7 +788,7 @@ class PrefectClient:
790
788
  )
791
789
  response = await self._client.post(
792
790
  "/concurrency_limits/",
793
- json=concurrency_limit_create.dict(json_compatible=True),
791
+ json=concurrency_limit_create.model_dump(mode="json"),
794
792
  )
795
793
 
796
794
  concurrency_limit_id = response.json().get("id")
@@ -832,7 +830,7 @@ class PrefectClient:
832
830
  if not concurrency_limit_id:
833
831
  raise httpx.RequestError(f"Malformed response: {response}")
834
832
 
835
- concurrency_limit = ConcurrencyLimit.parse_obj(response.json())
833
+ concurrency_limit = ConcurrencyLimit.model_validate(response.json())
836
834
  return concurrency_limit
837
835
 
838
836
  async def read_concurrency_limits(
@@ -857,7 +855,9 @@ class PrefectClient:
857
855
  }
858
856
 
859
857
  response = await self._client.post("/concurrency_limits/filter", json=body)
860
- return pydantic.parse_obj_as(List[ConcurrencyLimit], response.json())
858
+ return pydantic.TypeAdapter(List[ConcurrencyLimit]).validate_python(
859
+ response.json()
860
+ )
861
861
 
862
862
  async def reset_concurrency_limit_by_tag(
863
863
  self,
@@ -969,7 +969,7 @@ class PrefectClient:
969
969
  if priority is not None:
970
970
  create_model.priority = priority
971
971
 
972
- data = create_model.dict(json_compatible=True)
972
+ data = create_model.model_dump(mode="json")
973
973
  try:
974
974
  if work_pool_name is not None:
975
975
  response = await self._client.post(
@@ -984,7 +984,7 @@ class PrefectClient:
984
984
  raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
985
985
  else:
986
986
  raise
987
- return WorkQueue.parse_obj(response.json())
987
+ return WorkQueue.model_validate(response.json())
988
988
 
989
989
  async def read_work_queue_by_name(
990
990
  self,
@@ -1019,7 +1019,7 @@ class PrefectClient:
1019
1019
  else:
1020
1020
  raise
1021
1021
 
1022
- return WorkQueue.parse_obj(response.json())
1022
+ return WorkQueue.model_validate(response.json())
1023
1023
 
1024
1024
  async def update_work_queue(self, id: UUID, **kwargs):
1025
1025
  """
@@ -1038,7 +1038,7 @@ class PrefectClient:
1038
1038
  if not kwargs:
1039
1039
  raise ValueError("No fields provided to update.")
1040
1040
 
1041
- data = WorkQueueUpdate(**kwargs).dict(json_compatible=True, exclude_unset=True)
1041
+ data = WorkQueueUpdate(**kwargs).model_dump(mode="json", exclude_unset=True)
1042
1042
  try:
1043
1043
  await self._client.patch(f"/work_queues/{id}", json=data)
1044
1044
  except httpx.HTTPStatusError as e:
@@ -1085,7 +1085,7 @@ class PrefectClient:
1085
1085
  raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
1086
1086
  else:
1087
1087
  raise
1088
- return pydantic.parse_obj_as(List[FlowRun], response.json())
1088
+ return pydantic.TypeAdapter(List[FlowRun]).validate_python(response.json())
1089
1089
 
1090
1090
  async def read_work_queue(
1091
1091
  self,
@@ -1111,7 +1111,7 @@ class PrefectClient:
1111
1111
  raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
1112
1112
  else:
1113
1113
  raise
1114
- return WorkQueue.parse_obj(response.json())
1114
+ return WorkQueue.model_validate(response.json())
1115
1115
 
1116
1116
  async def read_work_queue_status(
1117
1117
  self,
@@ -1137,7 +1137,7 @@ class PrefectClient:
1137
1137
  raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
1138
1138
  else:
1139
1139
  raise
1140
- return WorkQueueStatusDetail.parse_obj(response.json())
1140
+ return WorkQueueStatusDetail.model_validate(response.json())
1141
1141
 
1142
1142
  async def match_work_queues(
1143
1143
  self,
@@ -1206,8 +1206,8 @@ class PrefectClient:
1206
1206
  try:
1207
1207
  response = await self._client.post(
1208
1208
  "/block_types/",
1209
- json=block_type.dict(
1210
- json_compatible=True, exclude_unset=True, exclude={"id"}
1209
+ json=block_type.model_dump(
1210
+ mode="json", exclude_unset=True, exclude={"id"}
1211
1211
  ),
1212
1212
  )
1213
1213
  except httpx.HTTPStatusError as e:
@@ -1215,7 +1215,7 @@ class PrefectClient:
1215
1215
  raise prefect.exceptions.ObjectAlreadyExists(http_exc=e) from e
1216
1216
  else:
1217
1217
  raise
1218
- return BlockType.parse_obj(response.json())
1218
+ return BlockType.model_validate(response.json())
1219
1219
 
1220
1220
  async def create_block_schema(self, block_schema: BlockSchemaCreate) -> BlockSchema:
1221
1221
  """
@@ -1224,8 +1224,8 @@ class PrefectClient:
1224
1224
  try:
1225
1225
  response = await self._client.post(
1226
1226
  "/block_schemas/",
1227
- json=block_schema.dict(
1228
- json_compatible=True,
1227
+ json=block_schema.model_dump(
1228
+ mode="json",
1229
1229
  exclude_unset=True,
1230
1230
  exclude={"id", "block_type", "checksum"},
1231
1231
  ),
@@ -1235,7 +1235,7 @@ class PrefectClient:
1235
1235
  raise prefect.exceptions.ObjectAlreadyExists(http_exc=e) from e
1236
1236
  else:
1237
1237
  raise
1238
- return BlockSchema.parse_obj(response.json())
1238
+ return BlockSchema.model_validate(response.json())
1239
1239
 
1240
1240
  async def create_block_document(
1241
1241
  self,
@@ -1252,24 +1252,14 @@ class PrefectClient:
1252
1252
  `SecretBytes` fields. Note Blocks may not work as expected if
1253
1253
  this is set to `False`.
1254
1254
  """
1255
- if isinstance(block_document, BlockDocument):
1256
- block_document = BlockDocumentCreate.parse_obj(
1257
- block_document.dict(
1258
- json_compatible=True,
1259
- include_secrets=include_secrets,
1260
- exclude_unset=True,
1261
- exclude={"id", "block_schema", "block_type"},
1262
- ),
1263
- )
1264
-
1265
1255
  try:
1266
1256
  response = await self._client.post(
1267
1257
  "/block_documents/",
1268
- json=block_document.dict(
1269
- json_compatible=True,
1270
- include_secrets=include_secrets,
1258
+ json=block_document.model_dump(
1259
+ mode="json",
1271
1260
  exclude_unset=True,
1272
1261
  exclude={"id", "block_schema", "block_type"},
1262
+ context={"include_secrets": include_secrets},
1273
1263
  ),
1274
1264
  )
1275
1265
  except httpx.HTTPStatusError as e:
@@ -1277,7 +1267,7 @@ class PrefectClient:
1277
1267
  raise prefect.exceptions.ObjectAlreadyExists(http_exc=e) from e
1278
1268
  else:
1279
1269
  raise
1280
- return BlockDocument.parse_obj(response.json())
1270
+ return BlockDocument.model_validate(response.json())
1281
1271
 
1282
1272
  async def update_block_document(
1283
1273
  self,
@@ -1290,11 +1280,10 @@ class PrefectClient:
1290
1280
  try:
1291
1281
  await self._client.patch(
1292
1282
  f"/block_documents/{block_document_id}",
1293
- json=block_document.dict(
1294
- json_compatible=True,
1283
+ json=block_document.model_dump(
1284
+ mode="json",
1295
1285
  exclude_unset=True,
1296
1286
  include={"data", "merge_existing_data", "block_schema_id"},
1297
- include_secrets=True,
1298
1287
  ),
1299
1288
  )
1300
1289
  except httpx.HTTPStatusError as e:
@@ -1326,7 +1315,7 @@ class PrefectClient:
1326
1315
  raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
1327
1316
  else:
1328
1317
  raise
1329
- return BlockType.parse_obj(response.json())
1318
+ return BlockType.model_validate(response.json())
1330
1319
 
1331
1320
  async def read_block_schema_by_checksum(
1332
1321
  self, checksum: str, version: Optional[str] = None
@@ -1344,7 +1333,7 @@ class PrefectClient:
1344
1333
  raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
1345
1334
  else:
1346
1335
  raise
1347
- return BlockSchema.parse_obj(response.json())
1336
+ return BlockSchema.model_validate(response.json())
1348
1337
 
1349
1338
  async def update_block_type(self, block_type_id: UUID, block_type: BlockTypeUpdate):
1350
1339
  """
@@ -1353,11 +1342,10 @@ class PrefectClient:
1353
1342
  try:
1354
1343
  await self._client.patch(
1355
1344
  f"/block_types/{block_type_id}",
1356
- json=block_type.dict(
1357
- json_compatible=True,
1345
+ json=block_type.model_dump(
1346
+ mode="json",
1358
1347
  exclude_unset=True,
1359
1348
  include=BlockTypeUpdate.updatable_fields(),
1360
- include_secrets=True,
1361
1349
  ),
1362
1350
  )
1363
1351
  except httpx.HTTPStatusError as e:
@@ -1396,7 +1384,7 @@ class PrefectClient:
1396
1384
  List of BlockTypes.
1397
1385
  """
1398
1386
  response = await self._client.post("/block_types/filter", json={})
1399
- return pydantic.parse_obj_as(List[BlockType], response.json())
1387
+ return pydantic.TypeAdapter(List[BlockType]).validate_python(response.json())
1400
1388
 
1401
1389
  async def read_block_schemas(self) -> List[BlockSchema]:
1402
1390
  """
@@ -1408,7 +1396,7 @@ class PrefectClient:
1408
1396
  A BlockSchema.
1409
1397
  """
1410
1398
  response = await self._client.post("/block_schemas/filter", json={})
1411
- return pydantic.parse_obj_as(List[BlockSchema], response.json())
1399
+ return pydantic.TypeAdapter(List[BlockSchema]).validate_python(response.json())
1412
1400
 
1413
1401
  async def get_most_recent_block_schema_for_block_type(
1414
1402
  self,
@@ -1436,7 +1424,9 @@ class PrefectClient:
1436
1424
  )
1437
1425
  except httpx.HTTPStatusError:
1438
1426
  raise
1439
- return BlockSchema.parse_obj(response.json()[0]) if response.json() else None
1427
+ return (
1428
+ BlockSchema.model_validate(response.json()[0]) if response.json() else None
1429
+ )
1440
1430
 
1441
1431
  async def read_block_document(
1442
1432
  self,
@@ -1474,7 +1464,7 @@ class PrefectClient:
1474
1464
  raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
1475
1465
  else:
1476
1466
  raise
1477
- return BlockDocument.parse_obj(response.json())
1467
+ return BlockDocument.model_validate(response.json())
1478
1468
 
1479
1469
  async def read_block_document_by_name(
1480
1470
  self,
@@ -1512,7 +1502,7 @@ class PrefectClient:
1512
1502
  raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
1513
1503
  else:
1514
1504
  raise
1515
- return BlockDocument.parse_obj(response.json())
1505
+ return BlockDocument.model_validate(response.json())
1516
1506
 
1517
1507
  async def read_block_documents(
1518
1508
  self,
@@ -1547,7 +1537,9 @@ class PrefectClient:
1547
1537
  include_secrets=include_secrets,
1548
1538
  ),
1549
1539
  )
1550
- return pydantic.parse_obj_as(List[BlockDocument], response.json())
1540
+ return pydantic.TypeAdapter(List[BlockDocument]).validate_python(
1541
+ response.json()
1542
+ )
1551
1543
 
1552
1544
  async def read_block_documents_by_type(
1553
1545
  self,
@@ -1576,26 +1568,27 @@ class PrefectClient:
1576
1568
  ),
1577
1569
  )
1578
1570
 
1579
- return pydantic.parse_obj_as(List[BlockDocument], response.json())
1571
+ return pydantic.TypeAdapter(List[BlockDocument]).validate_python(
1572
+ response.json()
1573
+ )
1580
1574
 
1581
1575
  async def create_deployment(
1582
1576
  self,
1583
1577
  flow_id: UUID,
1584
1578
  name: str,
1585
- version: str = None,
1586
- schedule: SCHEDULE_TYPES = None,
1587
- schedules: List[DeploymentScheduleCreate] = None,
1579
+ version: Optional[str] = None,
1580
+ schedule: Optional[SCHEDULE_TYPES] = None,
1581
+ schedules: Optional[List[DeploymentScheduleCreate]] = None,
1588
1582
  parameters: Optional[Dict[str, Any]] = None,
1589
- description: str = None,
1590
- work_queue_name: str = None,
1591
- work_pool_name: str = None,
1592
- tags: List[str] = None,
1593
- storage_document_id: UUID = None,
1594
- manifest_path: str = None,
1595
- path: str = None,
1596
- entrypoint: str = None,
1597
- infrastructure_document_id: UUID = None,
1598
- infra_overrides: Optional[Dict[str, Any]] = None, # for backwards compat
1583
+ description: Optional[str] = None,
1584
+ work_queue_name: Optional[str] = None,
1585
+ work_pool_name: Optional[str] = None,
1586
+ tags: Optional[List[str]] = None,
1587
+ storage_document_id: Optional[UUID] = None,
1588
+ manifest_path: Optional[str] = None,
1589
+ path: Optional[str] = None,
1590
+ entrypoint: Optional[str] = None,
1591
+ infrastructure_document_id: Optional[UUID] = None,
1599
1592
  parameter_openapi_schema: Optional[Dict[str, Any]] = None,
1600
1593
  is_schedule_active: Optional[bool] = None,
1601
1594
  paused: Optional[bool] = None,
@@ -1627,8 +1620,9 @@ class PrefectClient:
1627
1620
  Returns:
1628
1621
  the ID of the deployment in the backend
1629
1622
  """
1630
- jv = handle_deprecated_infra_overrides_parameter(job_variables, infra_overrides)
1631
1623
 
1624
+ if parameter_openapi_schema is None:
1625
+ parameter_openapi_schema = {}
1632
1626
  deployment_create = DeploymentCreate(
1633
1627
  flow_id=flow_id,
1634
1628
  name=name,
@@ -1642,7 +1636,7 @@ class PrefectClient:
1642
1636
  entrypoint=entrypoint,
1643
1637
  manifest_path=manifest_path, # for backwards compat
1644
1638
  infrastructure_document_id=infrastructure_document_id,
1645
- job_variables=jv,
1639
+ job_variables=dict(job_variables or {}),
1646
1640
  parameter_openapi_schema=parameter_openapi_schema,
1647
1641
  is_schedule_active=is_schedule_active,
1648
1642
  paused=paused,
@@ -1659,7 +1653,7 @@ class PrefectClient:
1659
1653
  exclude = {
1660
1654
  field
1661
1655
  for field in ["work_pool_name", "work_queue_name"]
1662
- if field not in deployment_create.__fields_set__
1656
+ if field not in deployment_create.model_fields_set
1663
1657
  }
1664
1658
 
1665
1659
  if deployment_create.is_schedule_active is None:
@@ -1674,7 +1668,7 @@ class PrefectClient:
1674
1668
  if deployment_create.enforce_parameter_schema is None:
1675
1669
  exclude.add("enforce_parameter_schema")
1676
1670
 
1677
- json = deployment_create.dict(json_compatible=True, exclude=exclude)
1671
+ json = deployment_create.model_dump(mode="json", exclude=exclude)
1678
1672
  response = await self._client.post(
1679
1673
  "/deployments/",
1680
1674
  json=json,
@@ -1732,7 +1726,7 @@ class PrefectClient:
1732
1726
 
1733
1727
  await self._client.patch(
1734
1728
  f"/deployments/{deployment.id}",
1735
- json=deployment_update.dict(json_compatible=True, exclude=exclude),
1729
+ json=deployment_update.model_dump(mode="json", exclude=exclude),
1736
1730
  )
1737
1731
 
1738
1732
  async def _create_deployment_from_schema(self, schema: DeploymentCreate) -> UUID:
@@ -1742,7 +1736,7 @@ class PrefectClient:
1742
1736
  # TODO: We are likely to remove this method once we have considered the
1743
1737
  # packaging interface for deployments further.
1744
1738
  response = await self._client.post(
1745
- "/deployments/", json=schema.dict(json_compatible=True)
1739
+ "/deployments/", json=schema.model_dump(mode="json")
1746
1740
  )
1747
1741
  deployment_id = response.json().get("id")
1748
1742
  if not deployment_id:
@@ -1770,7 +1764,7 @@ class PrefectClient:
1770
1764
  raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
1771
1765
  else:
1772
1766
  raise
1773
- return DeploymentResponse.parse_obj(response.json())
1767
+ return DeploymentResponse.model_validate(response.json())
1774
1768
 
1775
1769
  async def read_deployment_by_name(
1776
1770
  self,
@@ -1797,19 +1791,19 @@ class PrefectClient:
1797
1791
  else:
1798
1792
  raise
1799
1793
 
1800
- return DeploymentResponse.parse_obj(response.json())
1794
+ return DeploymentResponse.model_validate(response.json())
1801
1795
 
1802
1796
  async def read_deployments(
1803
1797
  self,
1804
1798
  *,
1805
- flow_filter: FlowFilter = None,
1806
- flow_run_filter: FlowRunFilter = None,
1807
- task_run_filter: TaskRunFilter = None,
1808
- deployment_filter: DeploymentFilter = None,
1809
- work_pool_filter: WorkPoolFilter = None,
1810
- work_queue_filter: WorkQueueFilter = None,
1811
- limit: int = None,
1812
- sort: DeploymentSort = None,
1799
+ flow_filter: Optional[FlowFilter] = None,
1800
+ flow_run_filter: Optional[FlowRunFilter] = None,
1801
+ task_run_filter: Optional[TaskRunFilter] = None,
1802
+ deployment_filter: Optional[DeploymentFilter] = None,
1803
+ work_pool_filter: Optional[WorkPoolFilter] = None,
1804
+ work_queue_filter: Optional[WorkQueueFilter] = None,
1805
+ limit: Optional[int] = None,
1806
+ sort: Optional[DeploymentSort] = None,
1813
1807
  offset: int = 0,
1814
1808
  ) -> List[DeploymentResponse]:
1815
1809
  """
@@ -1831,29 +1825,23 @@ class PrefectClient:
1831
1825
  of the deployments
1832
1826
  """
1833
1827
  body = {
1834
- "flows": flow_filter.dict(json_compatible=True) if flow_filter else None,
1828
+ "flows": flow_filter.model_dump(mode="json") if flow_filter else None,
1835
1829
  "flow_runs": (
1836
- flow_run_filter.dict(json_compatible=True, exclude_unset=True)
1830
+ flow_run_filter.model_dump(mode="json", exclude_unset=True)
1837
1831
  if flow_run_filter
1838
1832
  else None
1839
1833
  ),
1840
1834
  "task_runs": (
1841
- task_run_filter.dict(json_compatible=True) if task_run_filter else None
1835
+ task_run_filter.model_dump(mode="json") if task_run_filter else None
1842
1836
  ),
1843
1837
  "deployments": (
1844
- deployment_filter.dict(json_compatible=True)
1845
- if deployment_filter
1846
- else None
1838
+ deployment_filter.model_dump(mode="json") if deployment_filter else None
1847
1839
  ),
1848
1840
  "work_pools": (
1849
- work_pool_filter.dict(json_compatible=True)
1850
- if work_pool_filter
1851
- else None
1841
+ work_pool_filter.model_dump(mode="json") if work_pool_filter else None
1852
1842
  ),
1853
1843
  "work_pool_queues": (
1854
- work_queue_filter.dict(json_compatible=True)
1855
- if work_queue_filter
1856
- else None
1844
+ work_queue_filter.model_dump(mode="json") if work_queue_filter else None
1857
1845
  ),
1858
1846
  "limit": limit,
1859
1847
  "offset": offset,
@@ -1861,7 +1849,9 @@ class PrefectClient:
1861
1849
  }
1862
1850
 
1863
1851
  response = await self._client.post("/deployments/filter", json=body)
1864
- return pydantic.parse_obj_as(List[DeploymentResponse], response.json())
1852
+ return pydantic.TypeAdapter(List[DeploymentResponse]).validate_python(
1853
+ response.json()
1854
+ )
1865
1855
 
1866
1856
  async def delete_deployment(
1867
1857
  self,
@@ -1909,13 +1899,15 @@ class PrefectClient:
1909
1899
  ]
1910
1900
 
1911
1901
  json = [
1912
- deployment_schedule_create.dict(json_compatible=True)
1902
+ deployment_schedule_create.model_dump(mode="json")
1913
1903
  for deployment_schedule_create in deployment_schedule_create
1914
1904
  ]
1915
1905
  response = await self._client.post(
1916
1906
  f"/deployments/{deployment_id}/schedules", json=json
1917
1907
  )
1918
- return pydantic.parse_obj_as(List[DeploymentSchedule], response.json())
1908
+ return pydantic.TypeAdapter(List[DeploymentSchedule]).validate_python(
1909
+ response.json()
1910
+ )
1919
1911
 
1920
1912
  async def read_deployment_schedules(
1921
1913
  self,
@@ -1937,7 +1929,9 @@ class PrefectClient:
1937
1929
  raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
1938
1930
  else:
1939
1931
  raise
1940
- return pydantic.parse_obj_as(List[DeploymentSchedule], response.json())
1932
+ return pydantic.TypeAdapter(List[DeploymentSchedule]).validate_python(
1933
+ response.json()
1934
+ )
1941
1935
 
1942
1936
  async def update_deployment_schedule(
1943
1937
  self,
@@ -1962,7 +1956,7 @@ class PrefectClient:
1962
1956
  kwargs["schedule"] = schedule
1963
1957
 
1964
1958
  deployment_schedule_update = DeploymentScheduleUpdate(**kwargs)
1965
- json = deployment_schedule_update.dict(json_compatible=True, exclude_unset=True)
1959
+ json = deployment_schedule_update.model_dump(mode="json", exclude_unset=True)
1966
1960
 
1967
1961
  try:
1968
1962
  await self._client.patch(
@@ -2016,7 +2010,7 @@ class PrefectClient:
2016
2010
  raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
2017
2011
  else:
2018
2012
  raise
2019
- return FlowRun.parse_obj(response.json())
2013
+ return FlowRun.model_validate(response.json())
2020
2014
 
2021
2015
  async def resume_flow_run(
2022
2016
  self, flow_run_id: UUID, run_input: Optional[Dict] = None
@@ -2038,7 +2032,7 @@ class PrefectClient:
2038
2032
  except httpx.HTTPStatusError:
2039
2033
  raise
2040
2034
 
2041
- return OrchestrationResult.parse_obj(response.json())
2035
+ return OrchestrationResult.model_validate(response.json())
2042
2036
 
2043
2037
  async def read_flow_runs(
2044
2038
  self,
@@ -2073,29 +2067,23 @@ class PrefectClient:
2073
2067
  of the flow runs
2074
2068
  """
2075
2069
  body = {
2076
- "flows": flow_filter.dict(json_compatible=True) if flow_filter else None,
2070
+ "flows": flow_filter.model_dump(mode="json") if flow_filter else None,
2077
2071
  "flow_runs": (
2078
- flow_run_filter.dict(json_compatible=True, exclude_unset=True)
2072
+ flow_run_filter.model_dump(mode="json", exclude_unset=True)
2079
2073
  if flow_run_filter
2080
2074
  else None
2081
2075
  ),
2082
2076
  "task_runs": (
2083
- task_run_filter.dict(json_compatible=True) if task_run_filter else None
2077
+ task_run_filter.model_dump(mode="json") if task_run_filter else None
2084
2078
  ),
2085
2079
  "deployments": (
2086
- deployment_filter.dict(json_compatible=True)
2087
- if deployment_filter
2088
- else None
2080
+ deployment_filter.model_dump(mode="json") if deployment_filter else None
2089
2081
  ),
2090
2082
  "work_pools": (
2091
- work_pool_filter.dict(json_compatible=True)
2092
- if work_pool_filter
2093
- else None
2083
+ work_pool_filter.model_dump(mode="json") if work_pool_filter else None
2094
2084
  ),
2095
2085
  "work_pool_queues": (
2096
- work_queue_filter.dict(json_compatible=True)
2097
- if work_queue_filter
2098
- else None
2086
+ work_queue_filter.model_dump(mode="json") if work_queue_filter else None
2099
2087
  ),
2100
2088
  "sort": sort,
2101
2089
  "limit": limit,
@@ -2103,7 +2091,7 @@ class PrefectClient:
2103
2091
  }
2104
2092
 
2105
2093
  response = await self._client.post("/flow_runs/filter", json=body)
2106
- return pydantic.parse_obj_as(List[FlowRun], response.json())
2094
+ return pydantic.TypeAdapter(List[FlowRun]).validate_python(response.json())
2107
2095
 
2108
2096
  async def set_flow_run_state(
2109
2097
  self,
@@ -2123,13 +2111,17 @@ class PrefectClient:
2123
2111
  Returns:
2124
2112
  an OrchestrationResult model representation of state orchestration output
2125
2113
  """
2114
+ flow_run_id = (
2115
+ flow_run_id if isinstance(flow_run_id, UUID) else UUID(flow_run_id)
2116
+ )
2126
2117
  state_create = state.to_state_create()
2127
2118
  state_create.state_details.flow_run_id = flow_run_id
2128
2119
  state_create.state_details.transition_id = uuid4()
2120
+ print(repr(state_create))
2129
2121
  try:
2130
2122
  response = await self._client.post(
2131
2123
  f"/flow_runs/{flow_run_id}/set_state",
2132
- json=dict(state=state_create.dict(json_compatible=True), force=force),
2124
+ json=dict(state=state_create.model_dump(mode="json"), force=force),
2133
2125
  )
2134
2126
  except httpx.HTTPStatusError as e:
2135
2127
  if e.response.status_code == status.HTTP_404_NOT_FOUND:
@@ -2137,7 +2129,7 @@ class PrefectClient:
2137
2129
  else:
2138
2130
  raise
2139
2131
 
2140
- return OrchestrationResult.parse_obj(response.json())
2132
+ return OrchestrationResult.model_validate(response.json())
2141
2133
 
2142
2134
  async def read_flow_run_states(
2143
2135
  self, flow_run_id: UUID
@@ -2155,13 +2147,15 @@ class PrefectClient:
2155
2147
  response = await self._client.get(
2156
2148
  "/flow_run_states/", params=dict(flow_run_id=str(flow_run_id))
2157
2149
  )
2158
- return pydantic.parse_obj_as(List[prefect.states.State], response.json())
2150
+ return pydantic.TypeAdapter(List[prefect.states.State]).validate_python(
2151
+ response.json()
2152
+ )
2159
2153
 
2160
2154
  async def set_task_run_name(self, task_run_id: UUID, name: str):
2161
2155
  task_run_data = TaskRunUpdate(name=name)
2162
2156
  return await self._client.patch(
2163
2157
  f"/task_runs/{task_run_id}",
2164
- json=task_run_data.dict(json_compatible=True, exclude_unset=True),
2158
+ json=task_run_data.model_dump(mode="json", exclude_unset=True),
2165
2159
  )
2166
2160
 
2167
2161
  async def create_task_run(
@@ -2169,6 +2163,7 @@ class PrefectClient:
2169
2163
  task: "TaskObject[P, R]",
2170
2164
  flow_run_id: Optional[UUID],
2171
2165
  dynamic_key: str,
2166
+ id: Optional[UUID] = None,
2172
2167
  name: Optional[str] = None,
2173
2168
  extra_tags: Optional[Iterable[str]] = None,
2174
2169
  state: Optional[prefect.states.State[R]] = None,
@@ -2192,6 +2187,8 @@ class PrefectClient:
2192
2187
  task: The Task to run
2193
2188
  flow_run_id: The flow run id with which to associate the task run
2194
2189
  dynamic_key: A key unique to this particular run of a Task within the flow
2190
+ id: An optional ID for the task run. If not provided, one will be generated
2191
+ server-side.
2195
2192
  name: An optional name for the task run
2196
2193
  extra_tags: an optional list of extra tags to apply to the task run in
2197
2194
  addition to `task.tags`
@@ -2208,10 +2205,11 @@ class PrefectClient:
2208
2205
  state = prefect.states.Pending()
2209
2206
 
2210
2207
  task_run_data = TaskRunCreate(
2208
+ id=id,
2211
2209
  name=name,
2212
2210
  flow_run_id=flow_run_id,
2213
2211
  task_key=task.task_key,
2214
- dynamic_key=dynamic_key,
2212
+ dynamic_key=str(dynamic_key),
2215
2213
  tags=list(tags),
2216
2214
  task_version=task.version,
2217
2215
  empirical_policy=TaskRunPolicy(
@@ -2222,11 +2220,10 @@ class PrefectClient:
2222
2220
  state=state.to_state_create(),
2223
2221
  task_inputs=task_inputs or {},
2224
2222
  )
2223
+ content = task_run_data.model_dump_json(exclude={"id"} if id is None else None)
2225
2224
 
2226
- response = await self._client.post(
2227
- "/task_runs/", json=task_run_data.dict(json_compatible=True)
2228
- )
2229
- return TaskRun.parse_obj(response.json())
2225
+ response = await self._client.post("/task_runs/", content=content)
2226
+ return TaskRun.model_validate(response.json())
2230
2227
 
2231
2228
  async def read_task_run(self, task_run_id: UUID) -> TaskRun:
2232
2229
  """
@@ -2238,8 +2235,14 @@ class PrefectClient:
2238
2235
  Returns:
2239
2236
  a Task Run model representation of the task run
2240
2237
  """
2241
- response = await self._client.get(f"/task_runs/{task_run_id}")
2242
- return TaskRun.parse_obj(response.json())
2238
+ try:
2239
+ response = await self._client.get(f"/task_runs/{task_run_id}")
2240
+ return TaskRun.model_validate(response.json())
2241
+ except httpx.HTTPStatusError as e:
2242
+ if e.response.status_code == status.HTTP_404_NOT_FOUND:
2243
+ raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
2244
+ else:
2245
+ raise
2243
2246
 
2244
2247
  async def read_task_runs(
2245
2248
  self,
@@ -2270,26 +2273,24 @@ class PrefectClient:
2270
2273
  of the task runs
2271
2274
  """
2272
2275
  body = {
2273
- "flows": flow_filter.dict(json_compatible=True) if flow_filter else None,
2276
+ "flows": flow_filter.model_dump(mode="json") if flow_filter else None,
2274
2277
  "flow_runs": (
2275
- flow_run_filter.dict(json_compatible=True, exclude_unset=True)
2278
+ flow_run_filter.model_dump(mode="json", exclude_unset=True)
2276
2279
  if flow_run_filter
2277
2280
  else None
2278
2281
  ),
2279
2282
  "task_runs": (
2280
- task_run_filter.dict(json_compatible=True) if task_run_filter else None
2283
+ task_run_filter.model_dump(mode="json") if task_run_filter else None
2281
2284
  ),
2282
2285
  "deployments": (
2283
- deployment_filter.dict(json_compatible=True)
2284
- if deployment_filter
2285
- else None
2286
+ deployment_filter.model_dump(mode="json") if deployment_filter else None
2286
2287
  ),
2287
2288
  "sort": sort,
2288
2289
  "limit": limit,
2289
2290
  "offset": offset,
2290
2291
  }
2291
2292
  response = await self._client.post("/task_runs/filter", json=body)
2292
- return pydantic.parse_obj_as(List[TaskRun], response.json())
2293
+ return pydantic.TypeAdapter(List[TaskRun]).validate_python(response.json())
2293
2294
 
2294
2295
  async def delete_task_run(self, task_run_id: UUID) -> None:
2295
2296
  """
@@ -2331,9 +2332,9 @@ class PrefectClient:
2331
2332
  state_create.state_details.task_run_id = task_run_id
2332
2333
  response = await self._client.post(
2333
2334
  f"/task_runs/{task_run_id}/set_state",
2334
- json=dict(state=state_create.dict(json_compatible=True), force=force),
2335
+ json=dict(state=state_create.model_dump(mode="json"), force=force),
2335
2336
  )
2336
- return OrchestrationResult.parse_obj(response.json())
2337
+ return OrchestrationResult.model_validate(response.json())
2337
2338
 
2338
2339
  async def read_task_run_states(
2339
2340
  self, task_run_id: UUID
@@ -2350,7 +2351,9 @@ class PrefectClient:
2350
2351
  response = await self._client.get(
2351
2352
  "/task_run_states/", params=dict(task_run_id=str(task_run_id))
2352
2353
  )
2353
- return pydantic.parse_obj_as(List[prefect.states.State], response.json())
2354
+ return pydantic.TypeAdapter(List[prefect.states.State]).validate_python(
2355
+ response.json()
2356
+ )
2354
2357
 
2355
2358
  async def create_logs(self, logs: Iterable[Union[LogCreate, dict]]) -> None:
2356
2359
  """
@@ -2360,7 +2363,7 @@ class PrefectClient:
2360
2363
  logs: An iterable of `LogCreate` objects or already json-compatible dicts
2361
2364
  """
2362
2365
  serialized_logs = [
2363
- log.dict(json_compatible=True) if isinstance(log, LogCreate) else log
2366
+ log.model_dump(mode="json") if isinstance(log, LogCreate) else log
2364
2367
  for log in logs
2365
2368
  ]
2366
2369
  await self._client.post("/logs/", json=serialized_logs)
@@ -2397,7 +2400,7 @@ class PrefectClient:
2397
2400
  )
2398
2401
  response = await self._client.post(
2399
2402
  "/flow_run_notification_policies/",
2400
- json=policy.dict(json_compatible=True),
2403
+ json=policy.model_dump(mode="json"),
2401
2404
  )
2402
2405
 
2403
2406
  policy_id = response.json().get("id")
@@ -2467,7 +2470,7 @@ class PrefectClient:
2467
2470
  try:
2468
2471
  await self._client.patch(
2469
2472
  f"/flow_run_notification_policies/{id}",
2470
- json=policy.dict(json_compatible=True, exclude_unset=True),
2473
+ json=policy.model_dump(mode="json", exclude_unset=True),
2471
2474
  )
2472
2475
  except httpx.HTTPStatusError as e:
2473
2476
  if e.response.status_code == status.HTTP_404_NOT_FOUND:
@@ -2496,7 +2499,7 @@ class PrefectClient:
2496
2499
  """
2497
2500
  body = {
2498
2501
  "flow_run_notification_policy_filter": (
2499
- flow_run_notification_policy_filter.dict(json_compatible=True)
2502
+ flow_run_notification_policy_filter.model_dump(mode="json")
2500
2503
  if flow_run_notification_policy_filter
2501
2504
  else None
2502
2505
  ),
@@ -2506,7 +2509,9 @@ class PrefectClient:
2506
2509
  response = await self._client.post(
2507
2510
  "/flow_run_notification_policies/filter", json=body
2508
2511
  )
2509
- return pydantic.parse_obj_as(List[FlowRunNotificationPolicy], response.json())
2512
+ return pydantic.TypeAdapter(List[FlowRunNotificationPolicy]).validate_python(
2513
+ response.json()
2514
+ )
2510
2515
 
2511
2516
  async def read_logs(
2512
2517
  self,
@@ -2519,45 +2524,14 @@ class PrefectClient:
2519
2524
  Read flow and task run logs.
2520
2525
  """
2521
2526
  body = {
2522
- "logs": log_filter.dict(json_compatible=True) if log_filter else None,
2527
+ "logs": log_filter.model_dump(mode="json") if log_filter else None,
2523
2528
  "limit": limit,
2524
2529
  "offset": offset,
2525
2530
  "sort": sort,
2526
2531
  }
2527
2532
 
2528
2533
  response = await self._client.post("/logs/filter", json=body)
2529
- return pydantic.parse_obj_as(List[Log], response.json())
2530
-
2531
- async def resolve_datadoc(self, datadoc: DataDocument) -> Any:
2532
- """
2533
- Recursively decode possibly nested data documents.
2534
-
2535
- "server" encoded documents will be retrieved from the server.
2536
-
2537
- Args:
2538
- datadoc: The data document to resolve
2539
-
2540
- Returns:
2541
- a decoded object, the innermost data
2542
- """
2543
- if not isinstance(datadoc, DataDocument):
2544
- raise TypeError(
2545
- f"`resolve_datadoc` received invalid type {type(datadoc).__name__}"
2546
- )
2547
-
2548
- async def resolve_inner(data):
2549
- if isinstance(data, bytes):
2550
- try:
2551
- data = DataDocument.parse_raw(data)
2552
- except pydantic.ValidationError:
2553
- return data
2554
-
2555
- if isinstance(data, DataDocument):
2556
- return await resolve_inner(data.decode())
2557
-
2558
- return data
2559
-
2560
- return await resolve_inner(datadoc)
2534
+ return pydantic.TypeAdapter(List[Log]).validate_python(response.json())
2561
2535
 
2562
2536
  async def send_worker_heartbeat(
2563
2537
  self,
@@ -2601,7 +2575,7 @@ class PrefectClient:
2601
2575
  f"/work_pools/{work_pool_name}/workers/filter",
2602
2576
  json={
2603
2577
  "worker_filter": (
2604
- worker_filter.dict(json_compatible=True, exclude_unset=True)
2578
+ worker_filter.model_dump(mode="json", exclude_unset=True)
2605
2579
  if worker_filter
2606
2580
  else None
2607
2581
  ),
@@ -2610,7 +2584,7 @@ class PrefectClient:
2610
2584
  },
2611
2585
  )
2612
2586
 
2613
- return pydantic.parse_obj_as(List[Worker], response.json())
2587
+ return pydantic.TypeAdapter(List[Worker]).validate_python(response.json())
2614
2588
 
2615
2589
  async def read_work_pool(self, work_pool_name: str) -> WorkPool:
2616
2590
  """
@@ -2625,7 +2599,7 @@ class PrefectClient:
2625
2599
  """
2626
2600
  try:
2627
2601
  response = await self._client.get(f"/work_pools/{work_pool_name}")
2628
- return pydantic.parse_obj_as(WorkPool, response.json())
2602
+ return WorkPool.model_validate(response.json())
2629
2603
  except httpx.HTTPStatusError as e:
2630
2604
  if e.response.status_code == status.HTTP_404_NOT_FOUND:
2631
2605
  raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
@@ -2654,13 +2628,11 @@ class PrefectClient:
2654
2628
  "limit": limit,
2655
2629
  "offset": offset,
2656
2630
  "work_pools": (
2657
- work_pool_filter.dict(json_compatible=True)
2658
- if work_pool_filter
2659
- else None
2631
+ work_pool_filter.model_dump(mode="json") if work_pool_filter else None
2660
2632
  ),
2661
2633
  }
2662
2634
  response = await self._client.post("/work_pools/filter", json=body)
2663
- return pydantic.parse_obj_as(List[WorkPool], response.json())
2635
+ return pydantic.TypeAdapter(List[WorkPool]).validate_python(response.json())
2664
2636
 
2665
2637
  async def create_work_pool(
2666
2638
  self,
@@ -2678,7 +2650,7 @@ class PrefectClient:
2678
2650
  try:
2679
2651
  response = await self._client.post(
2680
2652
  "/work_pools/",
2681
- json=work_pool.dict(json_compatible=True, exclude_unset=True),
2653
+ json=work_pool.model_dump(mode="json", exclude_unset=True),
2682
2654
  )
2683
2655
  except httpx.HTTPStatusError as e:
2684
2656
  if e.response.status_code == status.HTTP_409_CONFLICT:
@@ -2686,7 +2658,7 @@ class PrefectClient:
2686
2658
  else:
2687
2659
  raise
2688
2660
 
2689
- return pydantic.parse_obj_as(WorkPool, response.json())
2661
+ return WorkPool.model_validate(response.json())
2690
2662
 
2691
2663
  async def update_work_pool(
2692
2664
  self,
@@ -2703,7 +2675,7 @@ class PrefectClient:
2703
2675
  try:
2704
2676
  await self._client.patch(
2705
2677
  f"/work_pools/{work_pool_name}",
2706
- json=work_pool.dict(json_compatible=True, exclude_unset=True),
2678
+ json=work_pool.model_dump(mode="json", exclude_unset=True),
2707
2679
  )
2708
2680
  except httpx.HTTPStatusError as e:
2709
2681
  if e.response.status_code == status.HTTP_404_NOT_FOUND:
@@ -2750,7 +2722,7 @@ class PrefectClient:
2750
2722
  """
2751
2723
  json = {
2752
2724
  "work_queues": (
2753
- work_queue_filter.dict(json_compatible=True, exclude_unset=True)
2725
+ work_queue_filter.model_dump(mode="json", exclude_unset=True)
2754
2726
  if work_queue_filter
2755
2727
  else None
2756
2728
  ),
@@ -2772,14 +2744,14 @@ class PrefectClient:
2772
2744
  else:
2773
2745
  response = await self._client.post("/work_queues/filter", json=json)
2774
2746
 
2775
- return pydantic.parse_obj_as(List[WorkQueue], response.json())
2747
+ return pydantic.TypeAdapter(List[WorkQueue]).validate_python(response.json())
2776
2748
 
2777
2749
  async def get_scheduled_flow_runs_for_deployments(
2778
2750
  self,
2779
2751
  deployment_ids: List[UUID],
2780
2752
  scheduled_before: Optional[datetime.datetime] = None,
2781
2753
  limit: Optional[int] = None,
2782
- ):
2754
+ ) -> List[FlowRunResponse]:
2783
2755
  body: Dict[str, Any] = dict(deployment_ids=[str(id) for id in deployment_ids])
2784
2756
  if scheduled_before:
2785
2757
  body["scheduled_before"] = str(scheduled_before)
@@ -2791,7 +2763,9 @@ class PrefectClient:
2791
2763
  json=body,
2792
2764
  )
2793
2765
 
2794
- return pydantic.parse_obj_as(List[FlowRunResponse], response.json())
2766
+ return pydantic.TypeAdapter(List[FlowRunResponse]).validate_python(
2767
+ response.json()
2768
+ )
2795
2769
 
2796
2770
  async def get_scheduled_flow_runs_for_work_pool(
2797
2771
  self,
@@ -2824,7 +2798,9 @@ class PrefectClient:
2824
2798
  f"/work_pools/{work_pool_name}/get_scheduled_flow_runs",
2825
2799
  json=body,
2826
2800
  )
2827
- return pydantic.parse_obj_as(List[WorkerFlowRunResponse], response.json())
2801
+ return pydantic.TypeAdapter(List[WorkerFlowRunResponse]).validate_python(
2802
+ response.json()
2803
+ )
2828
2804
 
2829
2805
  async def create_artifact(
2830
2806
  self,
@@ -2841,10 +2817,29 @@ class PrefectClient:
2841
2817
 
2842
2818
  response = await self._client.post(
2843
2819
  "/artifacts/",
2844
- json=artifact.dict(json_compatible=True, exclude_unset=True),
2820
+ json=artifact.model_dump(mode="json", exclude_unset=True),
2845
2821
  )
2846
2822
 
2847
- return pydantic.parse_obj_as(Artifact, response.json())
2823
+ return Artifact.model_validate(response.json())
2824
+
2825
+ async def update_artifact(
2826
+ self,
2827
+ artifact_id: UUID,
2828
+ artifact: ArtifactUpdate,
2829
+ ) -> None:
2830
+ """
2831
+ Updates an artifact
2832
+
2833
+ Args:
2834
+ artifact: Desired values for the updated artifact.
2835
+ Returns:
2836
+ Information about the updated artifact.
2837
+ """
2838
+
2839
+ await self._client.patch(
2840
+ f"/artifacts/{artifact_id}",
2841
+ json=artifact.model_dump(mode="json", exclude_unset=True),
2842
+ )
2848
2843
 
2849
2844
  async def read_artifacts(
2850
2845
  self,
@@ -2871,20 +2866,20 @@ class PrefectClient:
2871
2866
  """
2872
2867
  body = {
2873
2868
  "artifacts": (
2874
- artifact_filter.dict(json_compatible=True) if artifact_filter else None
2869
+ artifact_filter.model_dump(mode="json") if artifact_filter else None
2875
2870
  ),
2876
2871
  "flow_runs": (
2877
- flow_run_filter.dict(json_compatible=True) if flow_run_filter else None
2872
+ flow_run_filter.model_dump(mode="json") if flow_run_filter else None
2878
2873
  ),
2879
2874
  "task_runs": (
2880
- task_run_filter.dict(json_compatible=True) if task_run_filter else None
2875
+ task_run_filter.model_dump(mode="json") if task_run_filter else None
2881
2876
  ),
2882
2877
  "sort": sort,
2883
2878
  "limit": limit,
2884
2879
  "offset": offset,
2885
2880
  }
2886
2881
  response = await self._client.post("/artifacts/filter", json=body)
2887
- return pydantic.parse_obj_as(List[Artifact], response.json())
2882
+ return pydantic.TypeAdapter(List[Artifact]).validate_python(response.json())
2888
2883
 
2889
2884
  async def read_latest_artifacts(
2890
2885
  self,
@@ -2911,20 +2906,22 @@ class PrefectClient:
2911
2906
  """
2912
2907
  body = {
2913
2908
  "artifacts": (
2914
- artifact_filter.dict(json_compatible=True) if artifact_filter else None
2909
+ artifact_filter.model_dump(mode="json") if artifact_filter else None
2915
2910
  ),
2916
2911
  "flow_runs": (
2917
- flow_run_filter.dict(json_compatible=True) if flow_run_filter else None
2912
+ flow_run_filter.model_dump(mode="json") if flow_run_filter else None
2918
2913
  ),
2919
2914
  "task_runs": (
2920
- task_run_filter.dict(json_compatible=True) if task_run_filter else None
2915
+ task_run_filter.model_dump(mode="json") if task_run_filter else None
2921
2916
  ),
2922
2917
  "sort": sort,
2923
2918
  "limit": limit,
2924
2919
  "offset": offset,
2925
2920
  }
2926
2921
  response = await self._client.post("/artifacts/latest/filter", json=body)
2927
- return pydantic.parse_obj_as(List[ArtifactCollection], response.json())
2922
+ return pydantic.TypeAdapter(List[ArtifactCollection]).validate_python(
2923
+ response.json()
2924
+ )
2928
2925
 
2929
2926
  async def delete_artifact(self, artifact_id: UUID) -> None:
2930
2927
  """
@@ -2952,7 +2949,7 @@ class PrefectClient:
2952
2949
  """
2953
2950
  response = await self._client.post(
2954
2951
  "/variables/",
2955
- json=variable.dict(json_compatible=True, exclude_unset=True),
2952
+ json=variable.model_dump(mode="json", exclude_unset=True),
2956
2953
  )
2957
2954
  return Variable(**response.json())
2958
2955
 
@@ -2967,7 +2964,7 @@ class PrefectClient:
2967
2964
  """
2968
2965
  await self._client.patch(
2969
2966
  f"/variables/name/{variable.name}",
2970
- json=variable.dict(json_compatible=True, exclude_unset=True),
2967
+ json=variable.model_dump(mode="json", exclude_unset=True),
2971
2968
  )
2972
2969
 
2973
2970
  async def read_variable_by_name(self, name: str) -> Optional[Variable]:
@@ -2994,7 +2991,7 @@ class PrefectClient:
2994
2991
  async def read_variables(self, limit: int = None) -> List[Variable]:
2995
2992
  """Reads all variables."""
2996
2993
  response = await self._client.post("/variables/filter", json={"limit": limit})
2997
- return pydantic.parse_obj_as(List[Variable], response.json())
2994
+ return pydantic.TypeAdapter(List[Variable]).validate_python(response.json())
2998
2995
 
2999
2996
  async def read_worker_metadata(self) -> Dict[str, Any]:
3000
2997
  """Reads worker metadata stored in Prefect collection registry."""
@@ -3027,7 +3024,7 @@ class PrefectClient:
3027
3024
  ) -> UUID:
3028
3025
  response = await self._client.post(
3029
3026
  "/v2/concurrency_limits/",
3030
- json=concurrency_limit.dict(json_compatible=True, exclude_unset=True),
3027
+ json=concurrency_limit.model_dump(mode="json", exclude_unset=True),
3031
3028
  )
3032
3029
  return UUID(response.json()["id"])
3033
3030
 
@@ -3037,7 +3034,7 @@ class PrefectClient:
3037
3034
  try:
3038
3035
  response = await self._client.patch(
3039
3036
  f"/v2/concurrency_limits/{name}",
3040
- json=concurrency_limit.dict(json_compatible=True, exclude_unset=True),
3037
+ json=concurrency_limit.model_dump(mode="json", exclude_unset=True),
3041
3038
  )
3042
3039
  return response
3043
3040
  except httpx.HTTPStatusError as e:
@@ -3063,7 +3060,7 @@ class PrefectClient:
3063
3060
  ) -> GlobalConcurrencyLimitResponse:
3064
3061
  try:
3065
3062
  response = await self._client.get(f"/v2/concurrency_limits/{name}")
3066
- return GlobalConcurrencyLimitResponse.parse_obj(response.json())
3063
+ return GlobalConcurrencyLimitResponse.model_validate(response.json())
3067
3064
  except httpx.HTTPStatusError as e:
3068
3065
  if e.response.status_code == status.HTTP_404_NOT_FOUND:
3069
3066
  raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
@@ -3080,9 +3077,9 @@ class PrefectClient:
3080
3077
  "offset": offset,
3081
3078
  },
3082
3079
  )
3083
- return pydantic.parse_obj_as(
3084
- List[GlobalConcurrencyLimitResponse], response.json()
3085
- )
3080
+ return pydantic.TypeAdapter(
3081
+ List[GlobalConcurrencyLimitResponse]
3082
+ ).validate_python(response.json())
3086
3083
 
3087
3084
  async def create_flow_run_input(
3088
3085
  self, flow_run_id: UUID, key: str, value: str, sender: Optional[str] = None
@@ -3118,7 +3115,7 @@ class PrefectClient:
3118
3115
  },
3119
3116
  )
3120
3117
  response.raise_for_status()
3121
- return pydantic.parse_obj_as(List[FlowRunInput], response.json())
3118
+ return pydantic.TypeAdapter(List[FlowRunInput]).validate_python(response.json())
3122
3119
 
3123
3120
  async def read_flow_run_input(self, flow_run_id: UUID, key: str) -> str:
3124
3121
  """
@@ -3143,49 +3140,27 @@ class PrefectClient:
3143
3140
  response = await self._client.delete(f"/flow_runs/{flow_run_id}/input/{key}")
3144
3141
  response.raise_for_status()
3145
3142
 
3146
- def _raise_for_unsupported_automations(self) -> NoReturn:
3147
- if not PREFECT_EXPERIMENTAL_EVENTS:
3148
- raise RuntimeError(
3149
- "The current server and client configuration does not support "
3150
- "events. Enable experimental events support with the "
3151
- "PREFECT_EXPERIMENTAL_EVENTS setting."
3152
- )
3153
- else:
3154
- raise RuntimeError(
3155
- "The current server and client configuration does not support "
3156
- "automations. Enable experimental automations with the "
3157
- "PREFECT_API_SERVICES_TRIGGERS_ENABLED setting."
3158
- )
3159
-
3160
3143
  async def create_automation(self, automation: AutomationCore) -> UUID:
3161
3144
  """Creates an automation in Prefect Cloud."""
3162
- if not self.server_type.supports_automations():
3163
- self._raise_for_unsupported_automations()
3164
-
3165
3145
  response = await self._client.post(
3166
3146
  "/automations/",
3167
- json=automation.dict(json_compatible=True),
3147
+ json=automation.model_dump(mode="json"),
3168
3148
  )
3169
3149
 
3170
3150
  return UUID(response.json()["id"])
3171
3151
 
3172
3152
  async def update_automation(self, automation_id: UUID, automation: AutomationCore):
3173
3153
  """Updates an automation in Prefect Cloud."""
3174
- if not self.server_type.supports_automations():
3175
- self._raise_for_unsupported_automations()
3176
3154
  response = await self._client.put(
3177
3155
  f"/automations/{automation_id}",
3178
- json=automation.dict(json_compatible=True, exclude_unset=True),
3156
+ json=automation.model_dump(mode="json", exclude_unset=True),
3179
3157
  )
3180
3158
  response.raise_for_status
3181
3159
 
3182
3160
  async def read_automations(self) -> List[Automation]:
3183
- if not self.server_type.supports_automations():
3184
- self._raise_for_unsupported_automations()
3185
-
3186
3161
  response = await self._client.post("/automations/filter")
3187
3162
  response.raise_for_status()
3188
- return pydantic.parse_obj_as(List[Automation], response.json())
3163
+ return pydantic.TypeAdapter(List[Automation]).validate_python(response.json())
3189
3164
 
3190
3165
  async def find_automation(
3191
3166
  self, id_or_name: Union[str, UUID], exit_if_not_found: bool = True
@@ -3221,14 +3196,11 @@ class PrefectClient:
3221
3196
  return None
3222
3197
 
3223
3198
  async def read_automation(self, automation_id: UUID) -> Optional[Automation]:
3224
- if not self.server_type.supports_automations():
3225
- self._raise_for_unsupported_automations()
3226
-
3227
3199
  response = await self._client.get(f"/automations/{automation_id}")
3228
3200
  if response.status_code == 404:
3229
3201
  return None
3230
3202
  response.raise_for_status()
3231
- return Automation.parse_obj(response.json())
3203
+ return Automation.model_validate(response.json())
3232
3204
 
3233
3205
  async def read_automations_by_name(self, name: str) -> List[Automation]:
3234
3206
  """
@@ -3240,15 +3212,13 @@ class PrefectClient:
3240
3212
  Returns:
3241
3213
  a list of Automation model representations of the automations
3242
3214
  """
3243
- if not self.server_type.supports_automations():
3244
- self._raise_for_unsupported_automations()
3245
3215
  automation_filter = filters.AutomationFilter(name=dict(any_=[name]))
3246
3216
 
3247
3217
  response = await self._client.post(
3248
3218
  "/automations/filter",
3249
3219
  json={
3250
3220
  "sort": sorting.AutomationSort.UPDATED_DESC,
3251
- "automations": automation_filter.dict(json_compatible=True)
3221
+ "automations": automation_filter.model_dump(mode="json")
3252
3222
  if automation_filter
3253
3223
  else None,
3254
3224
  },
@@ -3256,30 +3226,21 @@ class PrefectClient:
3256
3226
 
3257
3227
  response.raise_for_status()
3258
3228
 
3259
- return pydantic.parse_obj_as(List[Automation], response.json())
3229
+ return pydantic.TypeAdapter(List[Automation]).validate_python(response.json())
3260
3230
 
3261
3231
  async def pause_automation(self, automation_id: UUID):
3262
- if not self.server_type.supports_automations():
3263
- self._raise_for_unsupported_automations()
3264
-
3265
3232
  response = await self._client.patch(
3266
3233
  f"/automations/{automation_id}", json={"enabled": False}
3267
3234
  )
3268
3235
  response.raise_for_status()
3269
3236
 
3270
3237
  async def resume_automation(self, automation_id: UUID):
3271
- if not self.server_type.supports_automations():
3272
- self._raise_for_unsupported_automations()
3273
-
3274
3238
  response = await self._client.patch(
3275
3239
  f"/automations/{automation_id}", json={"enabled": True}
3276
3240
  )
3277
3241
  response.raise_for_status()
3278
3242
 
3279
3243
  async def delete_automation(self, automation_id: UUID):
3280
- if not self.server_type.supports_automations():
3281
- self._raise_for_unsupported_automations()
3282
-
3283
3244
  response = await self._client.delete(f"/automations/{automation_id}")
3284
3245
  if response.status_code == 404:
3285
3246
  return
@@ -3289,17 +3250,11 @@ class PrefectClient:
3289
3250
  async def read_resource_related_automations(
3290
3251
  self, resource_id: str
3291
3252
  ) -> List[Automation]:
3292
- if not self.server_type.supports_automations():
3293
- self._raise_for_unsupported_automations()
3294
-
3295
3253
  response = await self._client.get(f"/automations/related-to/{resource_id}")
3296
3254
  response.raise_for_status()
3297
- return pydantic.parse_obj_as(List[Automation], response.json())
3255
+ return pydantic.TypeAdapter(List[Automation]).validate_python(response.json())
3298
3256
 
3299
3257
  async def delete_resource_owned_automations(self, resource_id: str):
3300
- if not self.server_type.supports_automations():
3301
- self._raise_for_unsupported_automations()
3302
-
3303
3258
  await self._client.delete(f"/automations/owned-by/{resource_id}")
3304
3259
 
3305
3260
  async def __aenter__(self):
@@ -3318,9 +3273,11 @@ class PrefectClient:
3318
3273
  "Retrieve a new client with `get_client()` instead."
3319
3274
  )
3320
3275
 
3276
+ self._context_stack += 1
3277
+
3321
3278
  if self._started:
3322
- # httpx.AsyncClient does not allow reentrancy so we will not either.
3323
- raise RuntimeError("The client cannot be started more than once.")
3279
+ # allow reentrancy
3280
+ return self
3324
3281
 
3325
3282
  self._loop = asyncio.get_running_loop()
3326
3283
  await self._exit_stack.__aenter__()
@@ -3351,6 +3308,10 @@ class PrefectClient:
3351
3308
  """
3352
3309
  Shutdown the client.
3353
3310
  """
3311
+
3312
+ self._context_stack -= 1
3313
+ if self._context_stack > 0:
3314
+ return
3354
3315
  self._closed = True
3355
3316
  return await self._exit_stack.__aexit__(*exc_info)
3356
3317
 
@@ -3416,6 +3377,7 @@ class SyncPrefectClient:
3416
3377
  httpx_settings["headers"].setdefault("Authorization", f"Bearer {api_key}")
3417
3378
 
3418
3379
  # Context management
3380
+ self._context_stack: int = 0
3419
3381
  self._ephemeral_app: Optional[ASGIApp] = None
3420
3382
  self.manage_lifespan = True
3421
3383
  self.server_type: ServerType
@@ -3547,9 +3509,12 @@ class SyncPrefectClient:
3547
3509
  "Retrieve a new client with `get_client()` instead."
3548
3510
  )
3549
3511
 
3512
+ self._context_stack += 1
3513
+
3550
3514
  if self._started:
3551
- # httpx.Client does not allow reentrancy so we will not either.
3552
- raise RuntimeError("The client cannot be started more than once.")
3515
+ # allow reentrancy
3516
+ return self
3517
+
3553
3518
  self._client.__enter__()
3554
3519
  self._started = True
3555
3520
 
@@ -3559,6 +3524,9 @@ class SyncPrefectClient:
3559
3524
  """
3560
3525
  Shutdown the client.
3561
3526
  """
3527
+ self._context_stack -= 1
3528
+ if self._context_stack > 0:
3529
+ return
3562
3530
  self._closed = True
3563
3531
  self._client.__exit__(*exc_info)
3564
3532
 
@@ -3612,9 +3580,7 @@ class SyncPrefectClient:
3612
3580
  the ID of the flow in the backend
3613
3581
  """
3614
3582
  flow_data = FlowCreate(name=flow_name)
3615
- response = self._client.post(
3616
- "/flows/", json=flow_data.dict(json_compatible=True)
3617
- )
3583
+ response = self._client.post("/flows/", json=flow_data.model_dump(mode="json"))
3618
3584
 
3619
3585
  flow_id = response.json().get("id")
3620
3586
  if not flow_id:
@@ -3677,9 +3643,9 @@ class SyncPrefectClient:
3677
3643
  ),
3678
3644
  )
3679
3645
 
3680
- flow_run_create_json = flow_run_create.dict(json_compatible=True)
3646
+ flow_run_create_json = flow_run_create.model_dump(mode="json")
3681
3647
  response = self._client.post("/flow_runs/", json=flow_run_create_json)
3682
- flow_run = FlowRun.parse_obj(response.json())
3648
+ flow_run = FlowRun.model_validate(response.json())
3683
3649
 
3684
3650
  # Restore the parameters to the local objects to retain expectations about
3685
3651
  # Python objects
@@ -3704,7 +3670,7 @@ class SyncPrefectClient:
3704
3670
  raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
3705
3671
  else:
3706
3672
  raise
3707
- return FlowRun.parse_obj(response.json())
3673
+ return FlowRun.model_validate(response.json())
3708
3674
 
3709
3675
  def read_flow_runs(
3710
3676
  self,
@@ -3739,29 +3705,23 @@ class SyncPrefectClient:
3739
3705
  of the flow runs
3740
3706
  """
3741
3707
  body = {
3742
- "flows": flow_filter.dict(json_compatible=True) if flow_filter else None,
3708
+ "flows": flow_filter.model_dump(mode="json") if flow_filter else None,
3743
3709
  "flow_runs": (
3744
- flow_run_filter.dict(json_compatible=True, exclude_unset=True)
3710
+ flow_run_filter.model_dump(mode="json", exclude_unset=True)
3745
3711
  if flow_run_filter
3746
3712
  else None
3747
3713
  ),
3748
3714
  "task_runs": (
3749
- task_run_filter.dict(json_compatible=True) if task_run_filter else None
3715
+ task_run_filter.model_dump(mode="json") if task_run_filter else None
3750
3716
  ),
3751
3717
  "deployments": (
3752
- deployment_filter.dict(json_compatible=True)
3753
- if deployment_filter
3754
- else None
3718
+ deployment_filter.model_dump(mode="json") if deployment_filter else None
3755
3719
  ),
3756
3720
  "work_pools": (
3757
- work_pool_filter.dict(json_compatible=True)
3758
- if work_pool_filter
3759
- else None
3721
+ work_pool_filter.model_dump(mode="json") if work_pool_filter else None
3760
3722
  ),
3761
3723
  "work_pool_queues": (
3762
- work_queue_filter.dict(json_compatible=True)
3763
- if work_queue_filter
3764
- else None
3724
+ work_queue_filter.model_dump(mode="json") if work_queue_filter else None
3765
3725
  ),
3766
3726
  "sort": sort,
3767
3727
  "limit": limit,
@@ -3769,7 +3729,7 @@ class SyncPrefectClient:
3769
3729
  }
3770
3730
 
3771
3731
  response = self._client.post("/flow_runs/filter", json=body)
3772
- return pydantic.parse_obj_as(List[FlowRun], response.json())
3732
+ return pydantic.TypeAdapter(List[FlowRun]).validate_python(response.json())
3773
3733
 
3774
3734
  def set_flow_run_state(
3775
3735
  self,
@@ -3795,7 +3755,7 @@ class SyncPrefectClient:
3795
3755
  try:
3796
3756
  response = self._client.post(
3797
3757
  f"/flow_runs/{flow_run_id}/set_state",
3798
- json=dict(state=state_create.dict(json_compatible=True), force=force),
3758
+ json=dict(state=state_create.model_dump(mode="json"), force=force),
3799
3759
  )
3800
3760
  except httpx.HTTPStatusError as e:
3801
3761
  if e.response.status_code == status.HTTP_404_NOT_FOUND:
@@ -3803,13 +3763,28 @@ class SyncPrefectClient:
3803
3763
  else:
3804
3764
  raise
3805
3765
 
3806
- return OrchestrationResult.parse_obj(response.json())
3766
+ return OrchestrationResult.model_validate(response.json())
3767
+
3768
+ def set_flow_run_name(self, flow_run_id: UUID, name: str):
3769
+ flow_run_data = TaskRunUpdate(name=name)
3770
+ return self._client.patch(
3771
+ f"/flow_runs/{flow_run_id}",
3772
+ json=flow_run_data.model_dump(mode="json", exclude_unset=True),
3773
+ )
3774
+
3775
+ def set_task_run_name(self, task_run_id: UUID, name: str):
3776
+ task_run_data = TaskRunUpdate(name=name)
3777
+ return self._client.patch(
3778
+ f"/task_runs/{task_run_id}",
3779
+ json=task_run_data.model_dump(mode="json", exclude_unset=True),
3780
+ )
3807
3781
 
3808
3782
  def create_task_run(
3809
3783
  self,
3810
3784
  task: "TaskObject[P, R]",
3811
3785
  flow_run_id: Optional[UUID],
3812
3786
  dynamic_key: str,
3787
+ id: Optional[UUID] = None,
3813
3788
  name: Optional[str] = None,
3814
3789
  extra_tags: Optional[Iterable[str]] = None,
3815
3790
  state: Optional[prefect.states.State[R]] = None,
@@ -3833,6 +3808,8 @@ class SyncPrefectClient:
3833
3808
  task: The Task to run
3834
3809
  flow_run_id: The flow run id with which to associate the task run
3835
3810
  dynamic_key: A key unique to this particular run of a Task within the flow
3811
+ id: An optional ID for the task run. If not provided, one will be generated
3812
+ server-side.
3836
3813
  name: An optional name for the task run
3837
3814
  extra_tags: an optional list of extra tags to apply to the task run in
3838
3815
  addition to `task.tags`
@@ -3849,6 +3826,7 @@ class SyncPrefectClient:
3849
3826
  state = prefect.states.Pending()
3850
3827
 
3851
3828
  task_run_data = TaskRunCreate(
3829
+ id=id,
3852
3830
  name=name,
3853
3831
  flow_run_id=flow_run_id,
3854
3832
  task_key=task.task_key,
@@ -3864,10 +3842,10 @@ class SyncPrefectClient:
3864
3842
  task_inputs=task_inputs or {},
3865
3843
  )
3866
3844
 
3867
- response = self._client.post(
3868
- "/task_runs/", json=task_run_data.dict(json_compatible=True)
3869
- )
3870
- return TaskRun.parse_obj(response.json())
3845
+ content = task_run_data.model_dump_json(exclude={"id"} if id is None else None)
3846
+
3847
+ response = self._client.post("/task_runs/", content=content)
3848
+ return TaskRun.model_validate(response.json())
3871
3849
 
3872
3850
  def read_task_run(self, task_run_id: UUID) -> TaskRun:
3873
3851
  """
@@ -3879,8 +3857,14 @@ class SyncPrefectClient:
3879
3857
  Returns:
3880
3858
  a Task Run model representation of the task run
3881
3859
  """
3882
- response = self._client.get(f"/task_runs/{task_run_id}")
3883
- return TaskRun.parse_obj(response.json())
3860
+ try:
3861
+ response = self._client.get(f"/task_runs/{task_run_id}")
3862
+ return TaskRun.model_validate(response.json())
3863
+ except httpx.HTTPStatusError as e:
3864
+ if e.response.status_code == status.HTTP_404_NOT_FOUND:
3865
+ raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
3866
+ else:
3867
+ raise
3884
3868
 
3885
3869
  def read_task_runs(
3886
3870
  self,
@@ -3911,26 +3895,24 @@ class SyncPrefectClient:
3911
3895
  of the task runs
3912
3896
  """
3913
3897
  body = {
3914
- "flows": flow_filter.dict(json_compatible=True) if flow_filter else None,
3898
+ "flows": flow_filter.model_dump(mode="json") if flow_filter else None,
3915
3899
  "flow_runs": (
3916
- flow_run_filter.dict(json_compatible=True, exclude_unset=True)
3900
+ flow_run_filter.model_dump(mode="json", exclude_unset=True)
3917
3901
  if flow_run_filter
3918
3902
  else None
3919
3903
  ),
3920
3904
  "task_runs": (
3921
- task_run_filter.dict(json_compatible=True) if task_run_filter else None
3905
+ task_run_filter.model_dump(mode="json") if task_run_filter else None
3922
3906
  ),
3923
3907
  "deployments": (
3924
- deployment_filter.dict(json_compatible=True)
3925
- if deployment_filter
3926
- else None
3908
+ deployment_filter.model_dump(mode="json") if deployment_filter else None
3927
3909
  ),
3928
3910
  "sort": sort,
3929
3911
  "limit": limit,
3930
3912
  "offset": offset,
3931
3913
  }
3932
3914
  response = self._client.post("/task_runs/filter", json=body)
3933
- return pydantic.parse_obj_as(List[TaskRun], response.json())
3915
+ return pydantic.TypeAdapter(List[TaskRun]).validate_python(response.json())
3934
3916
 
3935
3917
  def set_task_run_state(
3936
3918
  self,
@@ -3954,9 +3936,9 @@ class SyncPrefectClient:
3954
3936
  state_create.state_details.task_run_id = task_run_id
3955
3937
  response = self._client.post(
3956
3938
  f"/task_runs/{task_run_id}/set_state",
3957
- json=dict(state=state_create.dict(json_compatible=True), force=force),
3939
+ json=dict(state=state_create.model_dump(mode="json"), force=force),
3958
3940
  )
3959
- return OrchestrationResult.parse_obj(response.json())
3941
+ return OrchestrationResult.model_validate(response.json())
3960
3942
 
3961
3943
  def read_task_run_states(self, task_run_id: UUID) -> List[prefect.states.State]:
3962
3944
  """
@@ -3971,4 +3953,28 @@ class SyncPrefectClient:
3971
3953
  response = self._client.get(
3972
3954
  "/task_run_states/", params=dict(task_run_id=str(task_run_id))
3973
3955
  )
3974
- return pydantic.parse_obj_as(List[prefect.states.State], response.json())
3956
+ return pydantic.TypeAdapter(List[prefect.states.State]).validate_python(
3957
+ response.json()
3958
+ )
3959
+
3960
+ def read_deployment(
3961
+ self,
3962
+ deployment_id: UUID,
3963
+ ) -> DeploymentResponse:
3964
+ """
3965
+ Query the Prefect API for a deployment by id.
3966
+
3967
+ Args:
3968
+ deployment_id: the deployment ID of interest
3969
+
3970
+ Returns:
3971
+ a [Deployment model][prefect.client.schemas.objects.Deployment] representation of the deployment
3972
+ """
3973
+ try:
3974
+ response = self._client.get(f"/deployments/{deployment_id}")
3975
+ except httpx.HTTPStatusError as e:
3976
+ if e.response.status_code == status.HTTP_404_NOT_FOUND:
3977
+ raise prefect.exceptions.ObjectNotFound(http_exc=e) from e
3978
+ else:
3979
+ raise
3980
+ return DeploymentResponse.model_validate(response.json())