prefect-client 3.2.1__py3-none-any.whl → 3.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. prefect/__init__.py +15 -8
  2. prefect/_build_info.py +5 -0
  3. prefect/_internal/schemas/bases.py +4 -7
  4. prefect/_internal/schemas/validators.py +5 -6
  5. prefect/_result_records.py +6 -1
  6. prefect/client/orchestration/__init__.py +18 -6
  7. prefect/client/schemas/schedules.py +2 -2
  8. prefect/concurrency/asyncio.py +4 -3
  9. prefect/concurrency/sync.py +3 -3
  10. prefect/concurrency/v1/asyncio.py +3 -3
  11. prefect/concurrency/v1/sync.py +3 -3
  12. prefect/deployments/flow_runs.py +2 -2
  13. prefect/docker/docker_image.py +2 -3
  14. prefect/engine.py +1 -1
  15. prefect/events/clients.py +4 -3
  16. prefect/events/related.py +3 -5
  17. prefect/flows.py +11 -5
  18. prefect/locking/filesystem.py +8 -8
  19. prefect/logging/handlers.py +7 -11
  20. prefect/main.py +0 -2
  21. prefect/runtime/flow_run.py +10 -17
  22. prefect/server/api/__init__.py +34 -0
  23. prefect/server/api/admin.py +85 -0
  24. prefect/server/api/artifacts.py +224 -0
  25. prefect/server/api/automations.py +239 -0
  26. prefect/server/api/block_capabilities.py +25 -0
  27. prefect/server/api/block_documents.py +164 -0
  28. prefect/server/api/block_schemas.py +153 -0
  29. prefect/server/api/block_types.py +211 -0
  30. prefect/server/api/clients.py +246 -0
  31. prefect/server/api/collections.py +75 -0
  32. prefect/server/api/concurrency_limits.py +286 -0
  33. prefect/server/api/concurrency_limits_v2.py +269 -0
  34. prefect/server/api/csrf_token.py +38 -0
  35. prefect/server/api/dependencies.py +196 -0
  36. prefect/server/api/deployments.py +941 -0
  37. prefect/server/api/events.py +300 -0
  38. prefect/server/api/flow_run_notification_policies.py +120 -0
  39. prefect/server/api/flow_run_states.py +52 -0
  40. prefect/server/api/flow_runs.py +867 -0
  41. prefect/server/api/flows.py +210 -0
  42. prefect/server/api/logs.py +43 -0
  43. prefect/server/api/middleware.py +73 -0
  44. prefect/server/api/root.py +35 -0
  45. prefect/server/api/run_history.py +170 -0
  46. prefect/server/api/saved_searches.py +99 -0
  47. prefect/server/api/server.py +891 -0
  48. prefect/server/api/task_run_states.py +52 -0
  49. prefect/server/api/task_runs.py +342 -0
  50. prefect/server/api/task_workers.py +31 -0
  51. prefect/server/api/templates.py +35 -0
  52. prefect/server/api/ui/__init__.py +3 -0
  53. prefect/server/api/ui/flow_runs.py +128 -0
  54. prefect/server/api/ui/flows.py +173 -0
  55. prefect/server/api/ui/schemas.py +63 -0
  56. prefect/server/api/ui/task_runs.py +175 -0
  57. prefect/server/api/validation.py +382 -0
  58. prefect/server/api/variables.py +181 -0
  59. prefect/server/api/work_queues.py +230 -0
  60. prefect/server/api/workers.py +656 -0
  61. prefect/settings/sources.py +18 -5
  62. prefect/states.py +3 -3
  63. prefect/task_engine.py +3 -3
  64. prefect/types/_datetime.py +82 -3
  65. prefect/utilities/dockerutils.py +2 -2
  66. prefect/workers/base.py +5 -5
  67. {prefect_client-3.2.1.dist-info → prefect_client-3.2.3.dist-info}/METADATA +10 -15
  68. {prefect_client-3.2.1.dist-info → prefect_client-3.2.3.dist-info}/RECORD +70 -32
  69. {prefect_client-3.2.1.dist-info → prefect_client-3.2.3.dist-info}/WHEEL +1 -2
  70. prefect/_version.py +0 -21
  71. prefect_client-3.2.1.dist-info/top_level.txt +0 -1
  72. {prefect_client-3.2.1.dist-info → prefect_client-3.2.3.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,941 @@
1
+ """
2
+ Routes for interacting with Deployment objects.
3
+ """
4
+
5
+ import datetime
6
+ from typing import List, Optional
7
+ from uuid import UUID
8
+
9
+ import jsonschema.exceptions
10
+ import sqlalchemy as sa
11
+ from fastapi import Body, Depends, HTTPException, Path, Response, status
12
+ from starlette.background import BackgroundTasks
13
+
14
+ import prefect.server.api.dependencies as dependencies
15
+ import prefect.server.models as models
16
+ import prefect.server.schemas as schemas
17
+ from prefect.server.api.validation import (
18
+ validate_job_variables_for_deployment,
19
+ validate_job_variables_for_deployment_flow_run,
20
+ )
21
+ from prefect.server.api.workers import WorkerLookups
22
+ from prefect.server.database import PrefectDBInterface, provide_database_interface
23
+ from prefect.server.exceptions import MissingVariableError, ObjectNotFoundError
24
+ from prefect.server.models.deployments import mark_deployments_ready
25
+ from prefect.server.models.workers import DEFAULT_AGENT_WORK_POOL_NAME
26
+ from prefect.server.schemas.responses import DeploymentPaginationResponse
27
+ from prefect.server.utilities.server import PrefectRouter
28
+ from prefect.types import DateTime
29
+ from prefect.types._datetime import now
30
+ from prefect.utilities.schema_tools.hydration import (
31
+ HydrationContext,
32
+ HydrationError,
33
+ hydrate,
34
+ )
35
+ from prefect.utilities.schema_tools.validation import (
36
+ CircularSchemaRefError,
37
+ ValidationError,
38
+ validate,
39
+ )
40
+
41
+ router: PrefectRouter = PrefectRouter(prefix="/deployments", tags=["Deployments"])
42
+
43
+
44
+ def _multiple_schedules_error(deployment_id) -> HTTPException:
45
+ return HTTPException(
46
+ status.HTTP_422_UNPROCESSABLE_ENTITY,
47
+ detail=(
48
+ "Error updating deployment: "
49
+ f"Deployment {deployment_id!r} has multiple schedules. "
50
+ "Please use the UI or update your client to adjust this "
51
+ "deployment's schedules.",
52
+ ),
53
+ )
54
+
55
+
56
+ @router.post("/")
57
+ async def create_deployment(
58
+ deployment: schemas.actions.DeploymentCreate,
59
+ response: Response,
60
+ worker_lookups: WorkerLookups = Depends(WorkerLookups),
61
+ created_by: Optional[schemas.core.CreatedBy] = Depends(dependencies.get_created_by),
62
+ updated_by: Optional[schemas.core.UpdatedBy] = Depends(dependencies.get_updated_by),
63
+ db: PrefectDBInterface = Depends(provide_database_interface),
64
+ ) -> schemas.responses.DeploymentResponse:
65
+ """
66
+ Gracefully creates a new deployment from the provided schema. If a deployment with
67
+ the same name and flow_id already exists, the deployment is updated.
68
+
69
+ If the deployment has an active schedule, flow runs will be scheduled.
70
+ When upserting, any scheduled runs from the existing deployment will be deleted.
71
+ """
72
+
73
+ data = deployment.model_dump(exclude_unset=True)
74
+ data["created_by"] = created_by.model_dump() if created_by else None
75
+ data["updated_by"] = updated_by.model_dump() if created_by else None
76
+
77
+ async with db.session_context(begin_transaction=True) as session:
78
+ if (
79
+ deployment.work_pool_name
80
+ and deployment.work_pool_name != DEFAULT_AGENT_WORK_POOL_NAME
81
+ ):
82
+ # Make sure that deployment is valid before beginning creation process
83
+ work_pool = await models.workers.read_work_pool_by_name(
84
+ session=session, work_pool_name=deployment.work_pool_name
85
+ )
86
+ if work_pool is None:
87
+ raise HTTPException(
88
+ status_code=status.HTTP_404_NOT_FOUND,
89
+ detail=f'Work pool "{deployment.work_pool_name}" not found.',
90
+ )
91
+
92
+ await validate_job_variables_for_deployment(
93
+ session,
94
+ work_pool,
95
+ deployment,
96
+ )
97
+
98
+ # hydrate the input model into a full model
99
+ deployment_dict = deployment.model_dump(
100
+ exclude={"work_pool_name"}, exclude_unset=True
101
+ )
102
+ if deployment.work_pool_name and deployment.work_queue_name:
103
+ # If a specific pool name/queue name combination was provided, get the
104
+ # ID for that work pool queue.
105
+ deployment_dict[
106
+ "work_queue_id"
107
+ ] = await worker_lookups._get_work_queue_id_from_name(
108
+ session=session,
109
+ work_pool_name=deployment.work_pool_name,
110
+ work_queue_name=deployment.work_queue_name,
111
+ create_queue_if_not_found=True,
112
+ )
113
+ elif deployment.work_pool_name:
114
+ # If just a pool name was provided, get the ID for its default
115
+ # work pool queue.
116
+ deployment_dict[
117
+ "work_queue_id"
118
+ ] = await worker_lookups._get_default_work_queue_id_from_work_pool_name(
119
+ session=session,
120
+ work_pool_name=deployment.work_pool_name,
121
+ )
122
+ elif deployment.work_queue_name:
123
+ # If just a queue name was provided, ensure that the queue exists and
124
+ # get its ID.
125
+ work_queue = await models.work_queues.ensure_work_queue_exists(
126
+ session=session, name=deployment.work_queue_name
127
+ )
128
+ deployment_dict["work_queue_id"] = work_queue.id
129
+
130
+ deployment = schemas.core.Deployment(**deployment_dict)
131
+ # check to see if relevant blocks exist, allowing us throw a useful error message
132
+ # for debugging
133
+ if deployment.infrastructure_document_id is not None:
134
+ infrastructure_block = (
135
+ await models.block_documents.read_block_document_by_id(
136
+ session=session,
137
+ block_document_id=deployment.infrastructure_document_id,
138
+ )
139
+ )
140
+ if not infrastructure_block:
141
+ raise HTTPException(
142
+ status_code=status.HTTP_409_CONFLICT,
143
+ detail=(
144
+ "Error creating deployment. Could not find infrastructure"
145
+ f" block with id: {deployment.infrastructure_document_id}. This"
146
+ " usually occurs when applying a deployment specification that"
147
+ " was built against a different Prefect database / workspace."
148
+ ),
149
+ )
150
+
151
+ if deployment.storage_document_id is not None:
152
+ storage_block = await models.block_documents.read_block_document_by_id(
153
+ session=session,
154
+ block_document_id=deployment.storage_document_id,
155
+ )
156
+ if not storage_block:
157
+ raise HTTPException(
158
+ status_code=status.HTTP_409_CONFLICT,
159
+ detail=(
160
+ "Error creating deployment. Could not find storage block with"
161
+ f" id: {deployment.storage_document_id}. This usually occurs"
162
+ " when applying a deployment specification that was built"
163
+ " against a different Prefect database / workspace."
164
+ ),
165
+ )
166
+
167
+ right_now = now("UTC")
168
+ model = await models.deployments.create_deployment(
169
+ session=session, deployment=deployment
170
+ )
171
+
172
+ if model.created >= right_now:
173
+ response.status_code = status.HTTP_201_CREATED
174
+
175
+ return schemas.responses.DeploymentResponse.model_validate(
176
+ model, from_attributes=True
177
+ )
178
+
179
+
180
+ @router.patch("/{id}", status_code=status.HTTP_204_NO_CONTENT)
181
+ async def update_deployment(
182
+ deployment: schemas.actions.DeploymentUpdate,
183
+ deployment_id: UUID = Path(..., description="The deployment id", alias="id"),
184
+ db: PrefectDBInterface = Depends(provide_database_interface),
185
+ ) -> None:
186
+ async with db.session_context(begin_transaction=True) as session:
187
+ existing_deployment = await models.deployments.read_deployment(
188
+ session=session, deployment_id=deployment_id
189
+ )
190
+ if not existing_deployment:
191
+ raise HTTPException(
192
+ status.HTTP_404_NOT_FOUND, detail="Deployment not found."
193
+ )
194
+
195
+ # Checking how we should handle schedule updates
196
+ # If not all existing schedules have slugs then we'll fall back to the existing logic where are schedules are recreated to match the request.
197
+ # If the existing schedules have slugs, but not all provided schedules have slugs, then we'll return a 422 to avoid accidentally blowing away schedules.
198
+ # Otherwise, we'll use the existing slugs and the provided slugs to make targeted updates to the deployment's schedules.
199
+ schedules_to_patch: list[schemas.actions.DeploymentScheduleUpdate] = []
200
+ schedules_to_create: list[schemas.actions.DeploymentScheduleUpdate] = []
201
+ all_provided_have_slugs = all(
202
+ schedule.slug is not None for schedule in deployment.schedules or []
203
+ )
204
+ all_existing_have_slugs = existing_deployment.schedules and all(
205
+ schedule.slug is not None for schedule in existing_deployment.schedules
206
+ )
207
+ if all_provided_have_slugs and all_existing_have_slugs:
208
+ current_slugs = [
209
+ schedule.slug for schedule in existing_deployment.schedules
210
+ ]
211
+
212
+ for schedule in deployment.schedules:
213
+ if schedule.slug in current_slugs:
214
+ schedules_to_patch.append(schedule)
215
+ elif schedule.schedule:
216
+ schedules_to_create.append(schedule)
217
+ else:
218
+ raise HTTPException(
219
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
220
+ detail="Unable to create new deployment schedules without a schedule configuration.",
221
+ )
222
+ # Clear schedules to handle their update/creation separately
223
+ deployment.schedules = None
224
+ elif not all_provided_have_slugs and all_existing_have_slugs:
225
+ raise HTTPException(
226
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
227
+ detail="Please provide a slug for each schedule in your request to ensure schedules are updated correctly.",
228
+ )
229
+
230
+ if deployment.work_pool_name:
231
+ # Make sure that deployment is valid before beginning creation process
232
+ work_pool = await models.workers.read_work_pool_by_name(
233
+ session=session, work_pool_name=deployment.work_pool_name
234
+ )
235
+ try:
236
+ deployment.check_valid_configuration(work_pool.base_job_template)
237
+ except (MissingVariableError, jsonschema.exceptions.ValidationError) as exc:
238
+ raise HTTPException(
239
+ status_code=status.HTTP_409_CONFLICT,
240
+ detail=f"Error creating deployment: {exc!r}",
241
+ )
242
+
243
+ if deployment.parameters is not None:
244
+ try:
245
+ dehydrated_params = deployment.parameters
246
+ ctx = await HydrationContext.build(
247
+ session=session,
248
+ raise_on_error=True,
249
+ render_jinja=True,
250
+ render_workspace_variables=True,
251
+ )
252
+ parameters = hydrate(dehydrated_params, ctx)
253
+ deployment.parameters = parameters
254
+ except HydrationError as exc:
255
+ raise HTTPException(
256
+ status.HTTP_400_BAD_REQUEST,
257
+ detail=f"Error hydrating deployment parameters: {exc}",
258
+ )
259
+ else:
260
+ parameters = existing_deployment.parameters
261
+
262
+ enforce_parameter_schema = (
263
+ deployment.enforce_parameter_schema
264
+ if deployment.enforce_parameter_schema is not None
265
+ else existing_deployment.enforce_parameter_schema
266
+ )
267
+ if enforce_parameter_schema:
268
+ # ensure that the new parameters conform to the proposed schema
269
+ if deployment.parameter_openapi_schema:
270
+ openapi_schema = deployment.parameter_openapi_schema
271
+ else:
272
+ openapi_schema = existing_deployment.parameter_openapi_schema
273
+
274
+ if not isinstance(openapi_schema, dict):
275
+ raise HTTPException(
276
+ status.HTTP_409_CONFLICT,
277
+ detail=(
278
+ "Error updating deployment: Cannot update parameters because"
279
+ " parameter schema enforcement is enabled and the deployment"
280
+ " does not have a valid parameter schema."
281
+ ),
282
+ )
283
+ try:
284
+ validate(
285
+ parameters,
286
+ openapi_schema,
287
+ raise_on_error=True,
288
+ ignore_required=True,
289
+ )
290
+ except ValidationError as exc:
291
+ raise HTTPException(
292
+ status.HTTP_409_CONFLICT,
293
+ detail=f"Error updating deployment: {exc}",
294
+ )
295
+ except CircularSchemaRefError:
296
+ raise HTTPException(
297
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
298
+ detail="Invalid schema: Unable to validate schema with circular references.",
299
+ )
300
+
301
+ result = await models.deployments.update_deployment(
302
+ session=session, deployment_id=deployment_id, deployment=deployment
303
+ )
304
+
305
+ for schedule in schedules_to_patch:
306
+ await models.deployments.update_deployment_schedule(
307
+ session=session,
308
+ deployment_id=deployment_id,
309
+ schedule=schedule,
310
+ deployment_schedule_slug=schedule.slug,
311
+ )
312
+ if schedules_to_create:
313
+ await models.deployments.create_deployment_schedules(
314
+ session=session,
315
+ deployment_id=deployment_id,
316
+ schedules=[
317
+ schemas.actions.DeploymentScheduleCreate(
318
+ schedule=schedule.schedule, # type: ignore We will raise above if schedule is not provided
319
+ active=schedule.active if schedule.active is not None else True,
320
+ slug=schedule.slug,
321
+ parameters=schedule.parameters,
322
+ )
323
+ for schedule in schedules_to_create
324
+ ],
325
+ )
326
+ if not result:
327
+ raise HTTPException(status.HTTP_404_NOT_FOUND, detail="Deployment not found.")
328
+
329
+
330
+ @router.get("/name/{flow_name}/{deployment_name}")
331
+ async def read_deployment_by_name(
332
+ flow_name: str = Path(..., description="The name of the flow"),
333
+ deployment_name: str = Path(..., description="The name of the deployment"),
334
+ db: PrefectDBInterface = Depends(provide_database_interface),
335
+ ) -> schemas.responses.DeploymentResponse:
336
+ """
337
+ Get a deployment using the name of the flow and the deployment.
338
+ """
339
+ async with db.session_context() as session:
340
+ deployment = await models.deployments.read_deployment_by_name(
341
+ session=session, name=deployment_name, flow_name=flow_name
342
+ )
343
+ if not deployment:
344
+ raise HTTPException(
345
+ status.HTTP_404_NOT_FOUND, detail="Deployment not found"
346
+ )
347
+ return schemas.responses.DeploymentResponse.model_validate(
348
+ deployment, from_attributes=True
349
+ )
350
+
351
+
352
+ @router.get("/{id}")
353
+ async def read_deployment(
354
+ deployment_id: UUID = Path(..., description="The deployment id", alias="id"),
355
+ db: PrefectDBInterface = Depends(provide_database_interface),
356
+ ) -> schemas.responses.DeploymentResponse:
357
+ """
358
+ Get a deployment by id.
359
+ """
360
+ async with db.session_context() as session:
361
+ deployment = await models.deployments.read_deployment(
362
+ session=session, deployment_id=deployment_id
363
+ )
364
+ if not deployment:
365
+ raise HTTPException(
366
+ status_code=status.HTTP_404_NOT_FOUND, detail="Deployment not found"
367
+ )
368
+ return schemas.responses.DeploymentResponse.model_validate(
369
+ deployment, from_attributes=True
370
+ )
371
+
372
+
373
+ @router.post("/filter")
374
+ async def read_deployments(
375
+ limit: int = dependencies.LimitBody(),
376
+ offset: int = Body(0, ge=0),
377
+ flows: schemas.filters.FlowFilter = None,
378
+ flow_runs: schemas.filters.FlowRunFilter = None,
379
+ task_runs: schemas.filters.TaskRunFilter = None,
380
+ deployments: schemas.filters.DeploymentFilter = None,
381
+ work_pools: schemas.filters.WorkPoolFilter = None,
382
+ work_pool_queues: schemas.filters.WorkQueueFilter = None,
383
+ sort: schemas.sorting.DeploymentSort = Body(
384
+ schemas.sorting.DeploymentSort.NAME_ASC
385
+ ),
386
+ db: PrefectDBInterface = Depends(provide_database_interface),
387
+ ) -> List[schemas.responses.DeploymentResponse]:
388
+ """
389
+ Query for deployments.
390
+ """
391
+ async with db.session_context() as session:
392
+ response = await models.deployments.read_deployments(
393
+ session=session,
394
+ offset=offset,
395
+ sort=sort,
396
+ limit=limit,
397
+ flow_filter=flows,
398
+ flow_run_filter=flow_runs,
399
+ task_run_filter=task_runs,
400
+ deployment_filter=deployments,
401
+ work_pool_filter=work_pools,
402
+ work_queue_filter=work_pool_queues,
403
+ )
404
+ return [
405
+ schemas.responses.DeploymentResponse.model_validate(
406
+ deployment, from_attributes=True
407
+ )
408
+ for deployment in response
409
+ ]
410
+
411
+
412
+ @router.post("/paginate")
413
+ async def paginate_deployments(
414
+ limit: int = dependencies.LimitBody(),
415
+ page: int = Body(1, ge=1),
416
+ flows: schemas.filters.FlowFilter = None,
417
+ flow_runs: schemas.filters.FlowRunFilter = None,
418
+ task_runs: schemas.filters.TaskRunFilter = None,
419
+ deployments: schemas.filters.DeploymentFilter = None,
420
+ work_pools: schemas.filters.WorkPoolFilter = None,
421
+ work_pool_queues: schemas.filters.WorkQueueFilter = None,
422
+ sort: schemas.sorting.DeploymentSort = Body(
423
+ schemas.sorting.DeploymentSort.NAME_ASC
424
+ ),
425
+ db: PrefectDBInterface = Depends(provide_database_interface),
426
+ ) -> DeploymentPaginationResponse:
427
+ """
428
+ Pagination query for flow runs.
429
+ """
430
+ offset = (page - 1) * limit
431
+
432
+ async with db.session_context() as session:
433
+ response = await models.deployments.read_deployments(
434
+ session=session,
435
+ offset=offset,
436
+ sort=sort,
437
+ limit=limit,
438
+ flow_filter=flows,
439
+ flow_run_filter=flow_runs,
440
+ task_run_filter=task_runs,
441
+ deployment_filter=deployments,
442
+ work_pool_filter=work_pools,
443
+ work_queue_filter=work_pool_queues,
444
+ )
445
+
446
+ count = await models.deployments.count_deployments(
447
+ session=session,
448
+ flow_filter=flows,
449
+ flow_run_filter=flow_runs,
450
+ task_run_filter=task_runs,
451
+ deployment_filter=deployments,
452
+ work_pool_filter=work_pools,
453
+ work_queue_filter=work_pool_queues,
454
+ )
455
+
456
+ results = [
457
+ schemas.responses.DeploymentResponse.model_validate(
458
+ deployment, from_attributes=True
459
+ )
460
+ for deployment in response
461
+ ]
462
+
463
+ return DeploymentPaginationResponse(
464
+ results=results,
465
+ count=count,
466
+ limit=limit,
467
+ pages=(count + limit - 1) // limit,
468
+ page=page,
469
+ )
470
+
471
+
472
+ @router.post("/get_scheduled_flow_runs")
473
+ async def get_scheduled_flow_runs_for_deployments(
474
+ background_tasks: BackgroundTasks,
475
+ deployment_ids: List[UUID] = Body(
476
+ default=..., description="The deployment IDs to get scheduled runs for"
477
+ ),
478
+ scheduled_before: DateTime = Body(
479
+ None, description="The maximum time to look for scheduled flow runs"
480
+ ),
481
+ limit: int = dependencies.LimitBody(),
482
+ db: PrefectDBInterface = Depends(provide_database_interface),
483
+ ) -> List[schemas.responses.FlowRunResponse]:
484
+ """
485
+ Get scheduled runs for a set of deployments. Used by a runner to poll for work.
486
+ """
487
+ async with db.session_context() as session:
488
+ orm_flow_runs = await models.flow_runs.read_flow_runs(
489
+ session=session,
490
+ limit=limit,
491
+ deployment_filter=schemas.filters.DeploymentFilter(
492
+ id=schemas.filters.DeploymentFilterId(any_=deployment_ids),
493
+ ),
494
+ flow_run_filter=schemas.filters.FlowRunFilter(
495
+ next_scheduled_start_time=schemas.filters.FlowRunFilterNextScheduledStartTime(
496
+ before_=scheduled_before
497
+ ),
498
+ state=schemas.filters.FlowRunFilterState(
499
+ type=schemas.filters.FlowRunFilterStateType(
500
+ any_=[schemas.states.StateType.SCHEDULED]
501
+ )
502
+ ),
503
+ ),
504
+ sort=schemas.sorting.FlowRunSort.NEXT_SCHEDULED_START_TIME_ASC,
505
+ )
506
+
507
+ flow_run_responses = [
508
+ schemas.responses.FlowRunResponse.model_validate(
509
+ orm_flow_run, from_attributes=True
510
+ )
511
+ for orm_flow_run in orm_flow_runs
512
+ ]
513
+
514
+ background_tasks.add_task(
515
+ mark_deployments_ready,
516
+ deployment_ids=deployment_ids,
517
+ )
518
+
519
+ return flow_run_responses
520
+
521
+
522
+ @router.post("/count")
523
+ async def count_deployments(
524
+ flows: schemas.filters.FlowFilter = None,
525
+ flow_runs: schemas.filters.FlowRunFilter = None,
526
+ task_runs: schemas.filters.TaskRunFilter = None,
527
+ deployments: schemas.filters.DeploymentFilter = None,
528
+ work_pools: schemas.filters.WorkPoolFilter = None,
529
+ work_pool_queues: schemas.filters.WorkQueueFilter = None,
530
+ db: PrefectDBInterface = Depends(provide_database_interface),
531
+ ) -> int:
532
+ """
533
+ Count deployments.
534
+ """
535
+ async with db.session_context() as session:
536
+ return await models.deployments.count_deployments(
537
+ session=session,
538
+ flow_filter=flows,
539
+ flow_run_filter=flow_runs,
540
+ task_run_filter=task_runs,
541
+ deployment_filter=deployments,
542
+ work_pool_filter=work_pools,
543
+ work_queue_filter=work_pool_queues,
544
+ )
545
+
546
+
547
+ @router.delete("/{id}", status_code=status.HTTP_204_NO_CONTENT)
548
+ async def delete_deployment(
549
+ deployment_id: UUID = Path(..., description="The deployment id", alias="id"),
550
+ db: PrefectDBInterface = Depends(provide_database_interface),
551
+ ) -> None:
552
+ """
553
+ Delete a deployment by id.
554
+ """
555
+ async with db.session_context(begin_transaction=True) as session:
556
+ result = await models.deployments.delete_deployment(
557
+ session=session, deployment_id=deployment_id
558
+ )
559
+ if not result:
560
+ raise HTTPException(
561
+ status_code=status.HTTP_404_NOT_FOUND, detail="Deployment not found"
562
+ )
563
+
564
+
565
+ @router.post("/{id}/schedule")
566
+ async def schedule_deployment(
567
+ deployment_id: UUID = Path(..., description="The deployment id", alias="id"),
568
+ start_time: DateTime = Body(None, description="The earliest date to schedule"),
569
+ end_time: DateTime = Body(None, description="The latest date to schedule"),
570
+ # Workaround for the fact that FastAPI does not let us configure ser_json_timedelta
571
+ # to represent timedeltas as floats in JSON.
572
+ min_time: float = Body(
573
+ None,
574
+ description=(
575
+ "Runs will be scheduled until at least this long after the `start_time`"
576
+ ),
577
+ json_schema_extra={"format": "time-delta"},
578
+ ),
579
+ min_runs: int = Body(None, description="The minimum number of runs to schedule"),
580
+ max_runs: int = Body(None, description="The maximum number of runs to schedule"),
581
+ db: PrefectDBInterface = Depends(provide_database_interface),
582
+ ) -> None:
583
+ """
584
+ Schedule runs for a deployment. For backfills, provide start/end times in the past.
585
+
586
+ This function will generate the minimum number of runs that satisfy the min
587
+ and max times, and the min and max counts. Specifically, the following order
588
+ will be respected.
589
+
590
+ - Runs will be generated starting on or after the `start_time`
591
+ - No more than `max_runs` runs will be generated
592
+ - No runs will be generated after `end_time` is reached
593
+ - At least `min_runs` runs will be generated
594
+ - Runs will be generated until at least `start_time + min_time` is reached
595
+ """
596
+ if isinstance(min_time, float):
597
+ min_time = datetime.timedelta(seconds=min_time)
598
+
599
+ async with db.session_context(begin_transaction=True) as session:
600
+ await models.deployments.schedule_runs(
601
+ session=session,
602
+ deployment_id=deployment_id,
603
+ start_time=start_time,
604
+ min_time=min_time,
605
+ end_time=end_time,
606
+ min_runs=min_runs,
607
+ max_runs=max_runs,
608
+ )
609
+
610
+
611
+ @router.post("/{id:uuid}/resume_deployment")
612
+ async def resume_deployment(
613
+ deployment_id: UUID = Path(..., description="The deployment id", alias="id"),
614
+ db: PrefectDBInterface = Depends(provide_database_interface),
615
+ ) -> None:
616
+ """
617
+ Set a deployment schedule to active. Runs will be scheduled immediately.
618
+ """
619
+ async with db.session_context(begin_transaction=True) as session:
620
+ deployment = await models.deployments.read_deployment(
621
+ session=session, deployment_id=deployment_id
622
+ )
623
+ if not deployment:
624
+ raise HTTPException(
625
+ status_code=status.HTTP_404_NOT_FOUND, detail="Deployment not found"
626
+ )
627
+ deployment.paused = False
628
+
629
+
630
+ @router.post("/{id:uuid}/pause_deployment")
631
+ async def pause_deployment(
632
+ deployment_id: UUID = Path(..., description="The deployment id", alias="id"),
633
+ db: PrefectDBInterface = Depends(provide_database_interface),
634
+ ) -> None:
635
+ """
636
+ Set a deployment schedule to inactive. Any auto-scheduled runs still in a Scheduled
637
+ state will be deleted.
638
+ """
639
+ async with db.session_context(begin_transaction=False) as session:
640
+ deployment = await models.deployments.read_deployment(
641
+ session=session, deployment_id=deployment_id
642
+ )
643
+ if not deployment:
644
+ raise HTTPException(
645
+ status_code=status.HTTP_404_NOT_FOUND, detail="Deployment not found"
646
+ )
647
+ deployment.paused = True
648
+
649
+ # commit here to make the inactive schedule "visible" to the scheduler service
650
+ await session.commit()
651
+
652
+ # delete any auto scheduled runs
653
+ await models.deployments._delete_scheduled_runs(
654
+ session=session,
655
+ deployment_id=deployment_id,
656
+ auto_scheduled_only=True,
657
+ )
658
+
659
+ await session.commit()
660
+
661
+
662
+ @router.post("/{id}/create_flow_run")
663
+ async def create_flow_run_from_deployment(
664
+ flow_run: schemas.actions.DeploymentFlowRunCreate,
665
+ deployment_id: UUID = Path(..., description="The deployment id", alias="id"),
666
+ created_by: Optional[schemas.core.CreatedBy] = Depends(dependencies.get_created_by),
667
+ db: PrefectDBInterface = Depends(provide_database_interface),
668
+ worker_lookups: WorkerLookups = Depends(WorkerLookups),
669
+ response: Response = None,
670
+ ) -> schemas.responses.FlowRunResponse:
671
+ """
672
+ Create a flow run from a deployment.
673
+
674
+ Any parameters not provided will be inferred from the deployment's parameters.
675
+ If tags are not provided, the deployment's tags will be used.
676
+
677
+ If no state is provided, the flow run will be created in a SCHEDULED state.
678
+ """
679
+ async with db.session_context(begin_transaction=True) as session:
680
+ # get relevant info from the deployment
681
+ deployment = await models.deployments.read_deployment(
682
+ session=session, deployment_id=deployment_id
683
+ )
684
+
685
+ if not deployment:
686
+ raise HTTPException(
687
+ status_code=status.HTTP_404_NOT_FOUND, detail="Deployment not found"
688
+ )
689
+
690
+ try:
691
+ dehydrated_params = deployment.parameters
692
+ dehydrated_params.update(flow_run.parameters or {})
693
+ ctx = await HydrationContext.build(
694
+ session=session,
695
+ raise_on_error=True,
696
+ render_jinja=True,
697
+ render_workspace_variables=True,
698
+ )
699
+ parameters = hydrate(dehydrated_params, ctx)
700
+ except HydrationError as exc:
701
+ raise HTTPException(
702
+ status.HTTP_400_BAD_REQUEST,
703
+ detail=f"Error hydrating flow run parameters: {exc}",
704
+ )
705
+
706
+ # default
707
+ enforce_parameter_schema = deployment.enforce_parameter_schema
708
+
709
+ # run override
710
+ if flow_run.enforce_parameter_schema is not None:
711
+ enforce_parameter_schema = flow_run.enforce_parameter_schema
712
+
713
+ if enforce_parameter_schema:
714
+ if not isinstance(deployment.parameter_openapi_schema, dict):
715
+ raise HTTPException(
716
+ status.HTTP_409_CONFLICT,
717
+ detail=(
718
+ "Error updating deployment: Cannot update parameters because"
719
+ " parameter schema enforcement is enabled and the deployment"
720
+ " does not have a valid parameter schema."
721
+ ),
722
+ )
723
+ try:
724
+ validate(
725
+ parameters, deployment.parameter_openapi_schema, raise_on_error=True
726
+ )
727
+ except ValidationError as exc:
728
+ raise HTTPException(
729
+ status.HTTP_409_CONFLICT,
730
+ detail=f"Error creating flow run: {exc}",
731
+ )
732
+ except CircularSchemaRefError:
733
+ raise HTTPException(
734
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
735
+ detail="Invalid schema: Unable to validate schema with circular references.",
736
+ )
737
+
738
+ await validate_job_variables_for_deployment_flow_run(
739
+ session, deployment, flow_run
740
+ )
741
+
742
+ work_queue_name = deployment.work_queue_name
743
+ work_queue_id = deployment.work_queue_id
744
+
745
+ if flow_run.work_queue_name:
746
+ # can't mutate the ORM model or else it will commit the changes back
747
+ work_queue_id = await worker_lookups._get_work_queue_id_from_name(
748
+ session=session,
749
+ work_pool_name=deployment.work_queue.work_pool.name,
750
+ work_queue_name=flow_run.work_queue_name,
751
+ create_queue_if_not_found=True,
752
+ )
753
+ work_queue_name = flow_run.work_queue_name
754
+
755
+ # hydrate the input model into a full flow run / state model
756
+ flow_run = schemas.core.FlowRun(
757
+ **flow_run.model_dump(
758
+ exclude={
759
+ "parameters",
760
+ "tags",
761
+ "infrastructure_document_id",
762
+ "work_queue_name",
763
+ "enforce_parameter_schema",
764
+ }
765
+ ),
766
+ flow_id=deployment.flow_id,
767
+ deployment_id=deployment.id,
768
+ deployment_version=deployment.version,
769
+ parameters=parameters,
770
+ tags=set(deployment.tags).union(flow_run.tags),
771
+ infrastructure_document_id=(
772
+ flow_run.infrastructure_document_id
773
+ or deployment.infrastructure_document_id
774
+ ),
775
+ work_queue_name=work_queue_name,
776
+ work_queue_id=work_queue_id,
777
+ created_by=created_by,
778
+ )
779
+
780
+ if not flow_run.state:
781
+ flow_run.state = schemas.states.Scheduled()
782
+
783
+ right_now = now("UTC")
784
+ model = await models.flow_runs.create_flow_run(
785
+ session=session, flow_run=flow_run
786
+ )
787
+ if model.created >= right_now:
788
+ response.status_code = status.HTTP_201_CREATED
789
+ return schemas.responses.FlowRunResponse.model_validate(
790
+ model, from_attributes=True
791
+ )
792
+
793
+
794
+ # DEPRECATED
795
+ @router.get("/{id}/work_queue_check", deprecated=True)
796
+ async def work_queue_check_for_deployment(
797
+ deployment_id: UUID = Path(..., description="The deployment id", alias="id"),
798
+ db: PrefectDBInterface = Depends(provide_database_interface),
799
+ ) -> List[schemas.core.WorkQueue]:
800
+ """
801
+ Get list of work-queues that are able to pick up the specified deployment.
802
+
803
+ This endpoint is intended to be used by the UI to provide users warnings
804
+ about deployments that are unable to be executed because there are no work
805
+ queues that will pick up their runs, based on existing filter criteria. It
806
+ may be deprecated in the future because there is not a strict relationship
807
+ between work queues and deployments.
808
+ """
809
+ try:
810
+ async with db.session_context() as session:
811
+ work_queues = await models.deployments.check_work_queues_for_deployment(
812
+ session=session, deployment_id=deployment_id
813
+ )
814
+ except ObjectNotFoundError:
815
+ raise HTTPException(
816
+ status_code=status.HTTP_404_NOT_FOUND, detail="Deployment not found"
817
+ )
818
+ return work_queues
819
+
820
+
821
+ @router.get("/{id}/schedules")
822
+ async def read_deployment_schedules(
823
+ deployment_id: UUID = Path(..., description="The deployment id", alias="id"),
824
+ db: PrefectDBInterface = Depends(provide_database_interface),
825
+ ) -> List[schemas.core.DeploymentSchedule]:
826
+ async with db.session_context() as session:
827
+ deployment = await models.deployments.read_deployment(
828
+ session=session, deployment_id=deployment_id
829
+ )
830
+
831
+ if not deployment:
832
+ raise HTTPException(
833
+ status.HTTP_404_NOT_FOUND, detail="Deployment not found."
834
+ )
835
+
836
+ return await models.deployments.read_deployment_schedules(
837
+ session=session,
838
+ deployment_id=deployment.id,
839
+ )
840
+
841
+
842
+ @router.post("/{id}/schedules", status_code=status.HTTP_201_CREATED)
843
+ async def create_deployment_schedules(
844
+ deployment_id: UUID = Path(..., description="The deployment id", alias="id"),
845
+ schedules: List[schemas.actions.DeploymentScheduleCreate] = Body(
846
+ default=..., description="The schedules to create"
847
+ ),
848
+ db: PrefectDBInterface = Depends(provide_database_interface),
849
+ ) -> List[schemas.core.DeploymentSchedule]:
850
+ async with db.session_context(begin_transaction=True) as session:
851
+ deployment = await models.deployments.read_deployment(
852
+ session=session, deployment_id=deployment_id
853
+ )
854
+
855
+ if not deployment:
856
+ raise HTTPException(
857
+ status.HTTP_404_NOT_FOUND, detail="Deployment not found."
858
+ )
859
+
860
+ try:
861
+ created = await models.deployments.create_deployment_schedules(
862
+ session=session,
863
+ deployment_id=deployment.id,
864
+ schedules=schedules,
865
+ )
866
+ except sa.exc.IntegrityError as e:
867
+ if "duplicate key value violates unique constraint" in str(e):
868
+ raise HTTPException(
869
+ status.HTTP_409_CONFLICT,
870
+ detail="Schedule slugs must be unique within a deployment.",
871
+ )
872
+ raise
873
+ return created
874
+
875
+
876
+ @router.patch("/{id}/schedules/{schedule_id}", status_code=status.HTTP_204_NO_CONTENT)
877
+ async def update_deployment_schedule(
878
+ deployment_id: UUID = Path(..., description="The deployment id", alias="id"),
879
+ schedule_id: UUID = Path(..., description="The schedule id", alias="schedule_id"),
880
+ schedule: schemas.actions.DeploymentScheduleUpdate = Body(
881
+ default=..., description="The updated schedule"
882
+ ),
883
+ db: PrefectDBInterface = Depends(provide_database_interface),
884
+ ) -> None:
885
+ async with db.session_context(begin_transaction=True) as session:
886
+ deployment = await models.deployments.read_deployment(
887
+ session=session, deployment_id=deployment_id
888
+ )
889
+
890
+ if not deployment:
891
+ raise HTTPException(
892
+ status.HTTP_404_NOT_FOUND, detail="Deployment not found."
893
+ )
894
+
895
+ updated = await models.deployments.update_deployment_schedule(
896
+ session=session,
897
+ deployment_id=deployment_id,
898
+ deployment_schedule_id=schedule_id,
899
+ schedule=schedule,
900
+ )
901
+
902
+ if not updated:
903
+ raise HTTPException(status.HTTP_404_NOT_FOUND, detail="Schedule not found.")
904
+
905
+ await models.deployments._delete_scheduled_runs(
906
+ session=session,
907
+ deployment_id=deployment_id,
908
+ auto_scheduled_only=True,
909
+ )
910
+
911
+
912
+ @router.delete("/{id}/schedules/{schedule_id}", status_code=status.HTTP_204_NO_CONTENT)
913
+ async def delete_deployment_schedule(
914
+ deployment_id: UUID = Path(..., description="The deployment id", alias="id"),
915
+ schedule_id: UUID = Path(..., description="The schedule id", alias="schedule_id"),
916
+ db: PrefectDBInterface = Depends(provide_database_interface),
917
+ ) -> None:
918
+ async with db.session_context(begin_transaction=True) as session:
919
+ deployment = await models.deployments.read_deployment(
920
+ session=session, deployment_id=deployment_id
921
+ )
922
+
923
+ if not deployment:
924
+ raise HTTPException(
925
+ status.HTTP_404_NOT_FOUND, detail="Deployment not found."
926
+ )
927
+
928
+ deleted = await models.deployments.delete_deployment_schedule(
929
+ session=session,
930
+ deployment_id=deployment_id,
931
+ deployment_schedule_id=schedule_id,
932
+ )
933
+
934
+ if not deleted:
935
+ raise HTTPException(status.HTTP_404_NOT_FOUND, detail="Schedule not found.")
936
+
937
+ await models.deployments._delete_scheduled_runs(
938
+ session=session,
939
+ deployment_id=deployment_id,
940
+ auto_scheduled_only=True,
941
+ )