prefect-client 3.2.2__py3-none-any.whl → 3.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. prefect/__init__.py +15 -8
  2. prefect/_build_info.py +5 -0
  3. prefect/client/orchestration/__init__.py +16 -5
  4. prefect/main.py +0 -2
  5. prefect/server/api/__init__.py +34 -0
  6. prefect/server/api/admin.py +85 -0
  7. prefect/server/api/artifacts.py +224 -0
  8. prefect/server/api/automations.py +239 -0
  9. prefect/server/api/block_capabilities.py +25 -0
  10. prefect/server/api/block_documents.py +164 -0
  11. prefect/server/api/block_schemas.py +153 -0
  12. prefect/server/api/block_types.py +211 -0
  13. prefect/server/api/clients.py +246 -0
  14. prefect/server/api/collections.py +75 -0
  15. prefect/server/api/concurrency_limits.py +286 -0
  16. prefect/server/api/concurrency_limits_v2.py +269 -0
  17. prefect/server/api/csrf_token.py +38 -0
  18. prefect/server/api/dependencies.py +196 -0
  19. prefect/server/api/deployments.py +941 -0
  20. prefect/server/api/events.py +300 -0
  21. prefect/server/api/flow_run_notification_policies.py +120 -0
  22. prefect/server/api/flow_run_states.py +52 -0
  23. prefect/server/api/flow_runs.py +867 -0
  24. prefect/server/api/flows.py +210 -0
  25. prefect/server/api/logs.py +43 -0
  26. prefect/server/api/middleware.py +73 -0
  27. prefect/server/api/root.py +35 -0
  28. prefect/server/api/run_history.py +170 -0
  29. prefect/server/api/saved_searches.py +99 -0
  30. prefect/server/api/server.py +891 -0
  31. prefect/server/api/task_run_states.py +52 -0
  32. prefect/server/api/task_runs.py +342 -0
  33. prefect/server/api/task_workers.py +31 -0
  34. prefect/server/api/templates.py +35 -0
  35. prefect/server/api/ui/__init__.py +3 -0
  36. prefect/server/api/ui/flow_runs.py +128 -0
  37. prefect/server/api/ui/flows.py +173 -0
  38. prefect/server/api/ui/schemas.py +63 -0
  39. prefect/server/api/ui/task_runs.py +175 -0
  40. prefect/server/api/validation.py +382 -0
  41. prefect/server/api/variables.py +181 -0
  42. prefect/server/api/work_queues.py +230 -0
  43. prefect/server/api/workers.py +656 -0
  44. prefect/settings/sources.py +18 -5
  45. {prefect_client-3.2.2.dist-info → prefect_client-3.2.4.dist-info}/METADATA +10 -15
  46. {prefect_client-3.2.2.dist-info → prefect_client-3.2.4.dist-info}/RECORD +48 -10
  47. {prefect_client-3.2.2.dist-info → prefect_client-3.2.4.dist-info}/WHEEL +1 -2
  48. prefect/_version.py +0 -21
  49. prefect_client-3.2.2.dist-info/top_level.txt +0 -1
  50. {prefect_client-3.2.2.dist-info → prefect_client-3.2.4.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,867 @@
1
+ """
2
+ Routes for interacting with flow run objects.
3
+ """
4
+
5
+ import csv
6
+ import datetime
7
+ import io
8
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional
9
+ from uuid import UUID
10
+
11
+ import orjson
12
+ import sqlalchemy as sa
13
+ from fastapi import (
14
+ Body,
15
+ Depends,
16
+ HTTPException,
17
+ Path,
18
+ Query,
19
+ Response,
20
+ status,
21
+ )
22
+ from fastapi.encoders import jsonable_encoder
23
+ from fastapi.responses import ORJSONResponse, PlainTextResponse, StreamingResponse
24
+ from sqlalchemy.exc import IntegrityError
25
+
26
+ import prefect.server.api.dependencies as dependencies
27
+ import prefect.server.models as models
28
+ import prefect.server.schemas as schemas
29
+ from prefect.logging import get_logger
30
+ from prefect.server.api.run_history import run_history
31
+ from prefect.server.api.validation import validate_job_variables_for_deployment_flow_run
32
+ from prefect.server.database import PrefectDBInterface, provide_database_interface
33
+ from prefect.server.exceptions import FlowRunGraphTooLarge
34
+ from prefect.server.models.flow_runs import (
35
+ DependencyResult,
36
+ read_flow_run_graph,
37
+ )
38
+ from prefect.server.orchestration import dependencies as orchestration_dependencies
39
+ from prefect.server.orchestration.policies import (
40
+ FlowRunOrchestrationPolicy,
41
+ TaskRunOrchestrationPolicy,
42
+ )
43
+ from prefect.server.schemas.graph import Graph
44
+ from prefect.server.schemas.responses import (
45
+ FlowRunPaginationResponse,
46
+ OrchestrationResult,
47
+ )
48
+ from prefect.server.utilities.server import PrefectRouter
49
+ from prefect.types import DateTime
50
+ from prefect.types._datetime import now
51
+ from prefect.utilities import schema_tools
52
+
53
+ if TYPE_CHECKING:
54
+ import logging
55
+
56
+ logger: "logging.Logger" = get_logger("server.api")
57
+
58
+ router: PrefectRouter = PrefectRouter(prefix="/flow_runs", tags=["Flow Runs"])
59
+
60
+
61
+ @router.post("/")
62
+ async def create_flow_run(
63
+ flow_run: schemas.actions.FlowRunCreate,
64
+ db: PrefectDBInterface = Depends(provide_database_interface),
65
+ response: Response = None, # type: ignore
66
+ created_by: Optional[schemas.core.CreatedBy] = Depends(dependencies.get_created_by),
67
+ orchestration_parameters: Dict[str, Any] = Depends(
68
+ orchestration_dependencies.provide_flow_orchestration_parameters
69
+ ),
70
+ api_version: str = Depends(dependencies.provide_request_api_version),
71
+ ) -> schemas.responses.FlowRunResponse:
72
+ """
73
+ Create a flow run. If a flow run with the same flow_id and
74
+ idempotency key already exists, the existing flow run will be returned.
75
+
76
+ If no state is provided, the flow run will be created in a PENDING state.
77
+ """
78
+ # hydrate the input model into a full flow run / state model
79
+ flow_run_object = schemas.core.FlowRun(
80
+ **flow_run.model_dump(), created_by=created_by
81
+ )
82
+
83
+ # pass the request version to the orchestration engine to support compatibility code
84
+ orchestration_parameters.update({"api-version": api_version})
85
+
86
+ if not flow_run_object.state:
87
+ flow_run_object.state = schemas.states.Pending()
88
+
89
+ right_now = now("UTC")
90
+
91
+ async with db.session_context(begin_transaction=True) as session:
92
+ model = await models.flow_runs.create_flow_run(
93
+ session=session,
94
+ flow_run=flow_run_object,
95
+ orchestration_parameters=orchestration_parameters,
96
+ )
97
+ if model.created >= right_now:
98
+ response.status_code = status.HTTP_201_CREATED
99
+
100
+ return schemas.responses.FlowRunResponse.model_validate(
101
+ model, from_attributes=True
102
+ )
103
+
104
+
105
+ @router.patch("/{id}", status_code=status.HTTP_204_NO_CONTENT)
106
+ async def update_flow_run(
107
+ flow_run: schemas.actions.FlowRunUpdate,
108
+ flow_run_id: UUID = Path(..., description="The flow run id", alias="id"),
109
+ db: PrefectDBInterface = Depends(provide_database_interface),
110
+ ) -> None:
111
+ """
112
+ Updates a flow run.
113
+ """
114
+ async with db.session_context(begin_transaction=True) as session:
115
+ if flow_run.job_variables is not None:
116
+ this_run = await models.flow_runs.read_flow_run(
117
+ session, flow_run_id=flow_run_id
118
+ )
119
+ if this_run is None:
120
+ raise HTTPException(
121
+ status.HTTP_404_NOT_FOUND, detail="Flow run not found"
122
+ )
123
+ if not this_run.state:
124
+ raise HTTPException(
125
+ status.HTTP_400_BAD_REQUEST,
126
+ detail="Flow run state is required to update job variables but none exists",
127
+ )
128
+ if this_run.state.type != schemas.states.StateType.SCHEDULED:
129
+ raise HTTPException(
130
+ status_code=status.HTTP_400_BAD_REQUEST,
131
+ detail=f"Job variables for a flow run in state {this_run.state.type.name} cannot be updated",
132
+ )
133
+ if this_run.deployment_id is None:
134
+ raise HTTPException(
135
+ status_code=status.HTTP_400_BAD_REQUEST,
136
+ detail="A deployment for the flow run could not be found",
137
+ )
138
+
139
+ deployment = await models.deployments.read_deployment(
140
+ session=session, deployment_id=this_run.deployment_id
141
+ )
142
+ if deployment is None:
143
+ raise HTTPException(
144
+ status_code=status.HTTP_400_BAD_REQUEST,
145
+ detail="A deployment for the flow run could not be found",
146
+ )
147
+
148
+ await validate_job_variables_for_deployment_flow_run(
149
+ session, deployment, flow_run
150
+ )
151
+
152
+ result = await models.flow_runs.update_flow_run(
153
+ session=session, flow_run=flow_run, flow_run_id=flow_run_id
154
+ )
155
+ if not result:
156
+ raise HTTPException(status.HTTP_404_NOT_FOUND, detail="Flow run not found")
157
+
158
+
159
+ @router.post("/count")
160
+ async def count_flow_runs(
161
+ flows: schemas.filters.FlowFilter = None,
162
+ flow_runs: schemas.filters.FlowRunFilter = None,
163
+ task_runs: schemas.filters.TaskRunFilter = None,
164
+ deployments: schemas.filters.DeploymentFilter = None,
165
+ work_pools: schemas.filters.WorkPoolFilter = None,
166
+ work_pool_queues: schemas.filters.WorkQueueFilter = None,
167
+ db: PrefectDBInterface = Depends(provide_database_interface),
168
+ ) -> int:
169
+ """
170
+ Query for flow runs.
171
+ """
172
+ async with db.session_context() as session:
173
+ return await models.flow_runs.count_flow_runs(
174
+ session=session,
175
+ flow_filter=flows,
176
+ flow_run_filter=flow_runs,
177
+ task_run_filter=task_runs,
178
+ deployment_filter=deployments,
179
+ work_pool_filter=work_pools,
180
+ work_queue_filter=work_pool_queues,
181
+ )
182
+
183
+
184
+ @router.post("/lateness")
185
+ async def average_flow_run_lateness(
186
+ flows: Optional[schemas.filters.FlowFilter] = None,
187
+ flow_runs: Optional[schemas.filters.FlowRunFilter] = None,
188
+ task_runs: Optional[schemas.filters.TaskRunFilter] = None,
189
+ deployments: Optional[schemas.filters.DeploymentFilter] = None,
190
+ work_pools: Optional[schemas.filters.WorkPoolFilter] = None,
191
+ work_pool_queues: Optional[schemas.filters.WorkQueueFilter] = None,
192
+ db: PrefectDBInterface = Depends(provide_database_interface),
193
+ ) -> Optional[float]:
194
+ """
195
+ Query for average flow-run lateness in seconds.
196
+ """
197
+ async with db.session_context() as session:
198
+ if db.dialect.name == "sqlite":
199
+ # Since we want an _average_ of the lateness we're unable to use
200
+ # the existing FlowRun.expected_start_time_delta property as it
201
+ # returns a timedelta and SQLite is unable to properly deal with it
202
+ # and always returns 1970.0 as the average. This copies the same
203
+ # logic but ensures that it returns the number of seconds instead
204
+ # so it's compatible with SQLite.
205
+ base_query = sa.case(
206
+ (
207
+ db.FlowRun.start_time > db.FlowRun.expected_start_time,
208
+ sa.func.strftime("%s", db.FlowRun.start_time)
209
+ - sa.func.strftime("%s", db.FlowRun.expected_start_time),
210
+ ),
211
+ (
212
+ db.FlowRun.start_time.is_(None)
213
+ & db.FlowRun.state_type.notin_(schemas.states.TERMINAL_STATES)
214
+ & (db.FlowRun.expected_start_time < sa.func.datetime("now")),
215
+ sa.func.strftime("%s", sa.func.datetime("now"))
216
+ - sa.func.strftime("%s", db.FlowRun.expected_start_time),
217
+ ),
218
+ else_=0,
219
+ )
220
+ else:
221
+ base_query = db.FlowRun.estimated_start_time_delta
222
+
223
+ query = await models.flow_runs._apply_flow_run_filters(
224
+ db,
225
+ sa.select(sa.func.avg(base_query)),
226
+ flow_filter=flows,
227
+ flow_run_filter=flow_runs,
228
+ task_run_filter=task_runs,
229
+ deployment_filter=deployments,
230
+ work_pool_filter=work_pools,
231
+ work_queue_filter=work_pool_queues,
232
+ )
233
+ result = await session.execute(query)
234
+
235
+ avg_lateness = result.scalar()
236
+
237
+ if avg_lateness is None:
238
+ return None
239
+ elif isinstance(avg_lateness, datetime.timedelta):
240
+ return avg_lateness.total_seconds()
241
+ else:
242
+ return avg_lateness
243
+
244
+
245
+ @router.post("/history")
246
+ async def flow_run_history(
247
+ history_start: DateTime = Body(..., description="The history's start time."),
248
+ history_end: DateTime = Body(..., description="The history's end time."),
249
+ # Workaround for the fact that FastAPI does not let us configure ser_json_timedelta
250
+ # to represent timedeltas as floats in JSON.
251
+ history_interval: float = Body(
252
+ ...,
253
+ description=(
254
+ "The size of each history interval, in seconds. Must be at least 1 second."
255
+ ),
256
+ json_schema_extra={"format": "time-delta"},
257
+ alias="history_interval_seconds",
258
+ ),
259
+ flows: schemas.filters.FlowFilter = None,
260
+ flow_runs: schemas.filters.FlowRunFilter = None,
261
+ task_runs: schemas.filters.TaskRunFilter = None,
262
+ deployments: schemas.filters.DeploymentFilter = None,
263
+ work_pools: schemas.filters.WorkPoolFilter = None,
264
+ work_queues: schemas.filters.WorkQueueFilter = None,
265
+ db: PrefectDBInterface = Depends(provide_database_interface),
266
+ ) -> List[schemas.responses.HistoryResponse]:
267
+ """
268
+ Query for flow run history data across a given range and interval.
269
+ """
270
+ if isinstance(history_interval, float):
271
+ history_interval = datetime.timedelta(seconds=history_interval)
272
+
273
+ if history_interval < datetime.timedelta(seconds=1):
274
+ raise HTTPException(
275
+ status.HTTP_422_UNPROCESSABLE_ENTITY,
276
+ detail="History interval must not be less than 1 second.",
277
+ )
278
+
279
+ async with db.session_context() as session:
280
+ return await run_history(
281
+ session=session,
282
+ run_type="flow_run",
283
+ history_start=history_start,
284
+ history_end=history_end,
285
+ history_interval=history_interval,
286
+ flows=flows,
287
+ flow_runs=flow_runs,
288
+ task_runs=task_runs,
289
+ deployments=deployments,
290
+ work_pools=work_pools,
291
+ work_queues=work_queues,
292
+ )
293
+
294
+
295
+ @router.get("/{id}")
296
+ async def read_flow_run(
297
+ flow_run_id: UUID = Path(..., description="The flow run id", alias="id"),
298
+ db: PrefectDBInterface = Depends(provide_database_interface),
299
+ ) -> schemas.responses.FlowRunResponse:
300
+ """
301
+ Get a flow run by id.
302
+ """
303
+ async with db.session_context() as session:
304
+ flow_run = await models.flow_runs.read_flow_run(
305
+ session=session, flow_run_id=flow_run_id
306
+ )
307
+ if not flow_run:
308
+ raise HTTPException(status.HTTP_404_NOT_FOUND, detail="Flow run not found")
309
+ return schemas.responses.FlowRunResponse.model_validate(
310
+ flow_run, from_attributes=True
311
+ )
312
+
313
+
314
+ @router.get("/{id}/graph")
315
+ async def read_flow_run_graph_v1(
316
+ flow_run_id: UUID = Path(..., description="The flow run id", alias="id"),
317
+ db: PrefectDBInterface = Depends(provide_database_interface),
318
+ ) -> List[DependencyResult]:
319
+ """
320
+ Get a task run dependency map for a given flow run.
321
+ """
322
+ async with db.session_context() as session:
323
+ return await models.flow_runs.read_task_run_dependencies(
324
+ session=session, flow_run_id=flow_run_id
325
+ )
326
+
327
+
328
+ @router.get("/{id:uuid}/graph-v2")
329
+ async def read_flow_run_graph_v2(
330
+ flow_run_id: UUID = Path(..., description="The flow run id", alias="id"),
331
+ since: DateTime = Query(
332
+ default=jsonable_encoder(DateTime.min),
333
+ description="Only include runs that start or end after this time.",
334
+ ),
335
+ db: PrefectDBInterface = Depends(provide_database_interface),
336
+ ) -> Graph:
337
+ """
338
+ Get a graph of the tasks and subflow runs for the given flow run
339
+ """
340
+ async with db.session_context() as session:
341
+ try:
342
+ return await read_flow_run_graph(
343
+ session=session,
344
+ flow_run_id=flow_run_id,
345
+ since=since,
346
+ )
347
+ except FlowRunGraphTooLarge as e:
348
+ raise HTTPException(
349
+ status_code=status.HTTP_400_BAD_REQUEST,
350
+ detail=str(e),
351
+ )
352
+
353
+
354
+ @router.post("/{id}/resume")
355
+ async def resume_flow_run(
356
+ flow_run_id: UUID = Path(..., description="The flow run id", alias="id"),
357
+ db: PrefectDBInterface = Depends(provide_database_interface),
358
+ run_input: Optional[dict[str, Any]] = Body(default=None, embed=True),
359
+ response: Response = None,
360
+ flow_policy: type[FlowRunOrchestrationPolicy] = Depends(
361
+ orchestration_dependencies.provide_flow_policy
362
+ ),
363
+ task_policy: type[TaskRunOrchestrationPolicy] = Depends(
364
+ orchestration_dependencies.provide_task_policy
365
+ ),
366
+ orchestration_parameters: Dict[str, Any] = Depends(
367
+ orchestration_dependencies.provide_flow_orchestration_parameters
368
+ ),
369
+ api_version: str = Depends(dependencies.provide_request_api_version),
370
+ ) -> OrchestrationResult:
371
+ """
372
+ Resume a paused flow run.
373
+ """
374
+ right_now = now("UTC")
375
+
376
+ async with db.session_context(begin_transaction=True) as session:
377
+ flow_run = await models.flow_runs.read_flow_run(session, flow_run_id)
378
+ state = flow_run.state
379
+
380
+ if state is None or state.type != schemas.states.StateType.PAUSED:
381
+ result = OrchestrationResult(
382
+ state=None,
383
+ status=schemas.responses.SetStateStatus.ABORT,
384
+ details=schemas.responses.StateAbortDetails(
385
+ reason="Cannot resume a flow run that is not paused."
386
+ ),
387
+ )
388
+ return result
389
+
390
+ orchestration_parameters.update({"api-version": api_version})
391
+
392
+ keyset = state.state_details.run_input_keyset
393
+
394
+ if keyset:
395
+ run_input = run_input or {}
396
+
397
+ try:
398
+ hydration_context = await schema_tools.HydrationContext.build(
399
+ session=session,
400
+ raise_on_error=True,
401
+ render_jinja=True,
402
+ render_workspace_variables=True,
403
+ )
404
+ run_input = schema_tools.hydrate(run_input, hydration_context) or {}
405
+ except schema_tools.HydrationError as exc:
406
+ return OrchestrationResult(
407
+ state=state,
408
+ status=schemas.responses.SetStateStatus.REJECT,
409
+ details=schemas.responses.StateAbortDetails(
410
+ reason=f"Error hydrating run input: {exc}",
411
+ ),
412
+ )
413
+
414
+ schema_json = await models.flow_run_input.read_flow_run_input(
415
+ session=session, flow_run_id=flow_run.id, key=keyset["schema"]
416
+ )
417
+
418
+ if schema_json is None:
419
+ return OrchestrationResult(
420
+ state=state,
421
+ status=schemas.responses.SetStateStatus.REJECT,
422
+ details=schemas.responses.StateAbortDetails(
423
+ reason="Run input schema not found."
424
+ ),
425
+ )
426
+
427
+ try:
428
+ schema = orjson.loads(schema_json.value)
429
+ except orjson.JSONDecodeError:
430
+ return OrchestrationResult(
431
+ state=state,
432
+ status=schemas.responses.SetStateStatus.REJECT,
433
+ details=schemas.responses.StateAbortDetails(
434
+ reason="Run input schema is not valid JSON."
435
+ ),
436
+ )
437
+
438
+ try:
439
+ schema_tools.validate(run_input, schema, raise_on_error=True)
440
+ except schema_tools.ValidationError as exc:
441
+ return OrchestrationResult(
442
+ state=state,
443
+ status=schemas.responses.SetStateStatus.REJECT,
444
+ details=schemas.responses.StateAbortDetails(
445
+ reason=f"Reason: {exc}"
446
+ ),
447
+ )
448
+ except schema_tools.CircularSchemaRefError:
449
+ return OrchestrationResult(
450
+ state=state,
451
+ status=schemas.responses.SetStateStatus.REJECT,
452
+ details=schemas.responses.StateAbortDetails(
453
+ reason="Invalid schema: Unable to validate schema with circular references.",
454
+ ),
455
+ )
456
+
457
+ if state.state_details.pause_reschedule:
458
+ orchestration_result = await models.flow_runs.set_flow_run_state(
459
+ session=session,
460
+ flow_run_id=flow_run_id,
461
+ state=schemas.states.Scheduled(
462
+ name="Resuming", scheduled_time=now("UTC")
463
+ ),
464
+ flow_policy=flow_policy,
465
+ orchestration_parameters=orchestration_parameters,
466
+ )
467
+ else:
468
+ orchestration_result = await models.flow_runs.set_flow_run_state(
469
+ session=session,
470
+ flow_run_id=flow_run_id,
471
+ state=schemas.states.Running(),
472
+ flow_policy=flow_policy,
473
+ orchestration_parameters=orchestration_parameters,
474
+ )
475
+
476
+ if (
477
+ keyset
478
+ and run_input
479
+ and orchestration_result.status == schemas.responses.SetStateStatus.ACCEPT
480
+ ):
481
+ # The state change is accepted, go ahead and store the validated
482
+ # run input.
483
+ await models.flow_run_input.create_flow_run_input(
484
+ session=session,
485
+ flow_run_input=schemas.core.FlowRunInput(
486
+ flow_run_id=flow_run_id,
487
+ key=keyset["response"],
488
+ value=orjson.dumps(run_input).decode("utf-8"),
489
+ ),
490
+ )
491
+
492
+ # set the 201 if a new state was created
493
+ if (
494
+ orchestration_result.state
495
+ and orchestration_result.state.timestamp >= right_now
496
+ ):
497
+ response.status_code = status.HTTP_201_CREATED
498
+ else:
499
+ response.status_code = status.HTTP_200_OK
500
+
501
+ return orchestration_result
502
+
503
+
504
+ @router.post("/filter", response_class=ORJSONResponse)
505
+ async def read_flow_runs(
506
+ sort: schemas.sorting.FlowRunSort = Body(schemas.sorting.FlowRunSort.ID_DESC),
507
+ limit: int = dependencies.LimitBody(),
508
+ offset: int = Body(0, ge=0),
509
+ flows: Optional[schemas.filters.FlowFilter] = None,
510
+ flow_runs: Optional[schemas.filters.FlowRunFilter] = None,
511
+ task_runs: Optional[schemas.filters.TaskRunFilter] = None,
512
+ deployments: Optional[schemas.filters.DeploymentFilter] = None,
513
+ work_pools: Optional[schemas.filters.WorkPoolFilter] = None,
514
+ work_pool_queues: Optional[schemas.filters.WorkQueueFilter] = None,
515
+ db: PrefectDBInterface = Depends(provide_database_interface),
516
+ ) -> List[schemas.responses.FlowRunResponse]:
517
+ """
518
+ Query for flow runs.
519
+ """
520
+ async with db.session_context() as session:
521
+ db_flow_runs = await models.flow_runs.read_flow_runs(
522
+ session=session,
523
+ flow_filter=flows,
524
+ flow_run_filter=flow_runs,
525
+ task_run_filter=task_runs,
526
+ deployment_filter=deployments,
527
+ work_pool_filter=work_pools,
528
+ work_queue_filter=work_pool_queues,
529
+ offset=offset,
530
+ limit=limit,
531
+ sort=sort,
532
+ )
533
+
534
+ # Instead of relying on fastapi.encoders.jsonable_encoder to convert the
535
+ # response to JSON, we do so more efficiently ourselves.
536
+ # In particular, the FastAPI encoder is very slow for large, nested objects.
537
+ # See: https://github.com/tiangolo/fastapi/issues/1224
538
+ encoded = [
539
+ schemas.responses.FlowRunResponse.model_validate(
540
+ fr, from_attributes=True
541
+ ).model_dump(mode="json")
542
+ for fr in db_flow_runs
543
+ ]
544
+ return ORJSONResponse(content=encoded)
545
+
546
+
547
+ @router.delete("/{id}", status_code=status.HTTP_204_NO_CONTENT)
548
+ async def delete_flow_run(
549
+ flow_run_id: UUID = Path(..., description="The flow run id", alias="id"),
550
+ db: PrefectDBInterface = Depends(provide_database_interface),
551
+ ) -> None:
552
+ """
553
+ Delete a flow run by id.
554
+ """
555
+ async with db.session_context(begin_transaction=True) as session:
556
+ result = await models.flow_runs.delete_flow_run(
557
+ session=session, flow_run_id=flow_run_id
558
+ )
559
+ if not result:
560
+ raise HTTPException(
561
+ status_code=status.HTTP_404_NOT_FOUND, detail="Flow run not found"
562
+ )
563
+
564
+
565
+ @router.post("/{id}/set_state")
566
+ async def set_flow_run_state(
567
+ flow_run_id: UUID = Path(..., description="The flow run id", alias="id"),
568
+ state: schemas.actions.StateCreate = Body(..., description="The intended state."),
569
+ force: bool = Body(
570
+ False,
571
+ description=(
572
+ "If false, orchestration rules will be applied that may alter or prevent"
573
+ " the state transition. If True, orchestration rules are not applied."
574
+ ),
575
+ ),
576
+ db: PrefectDBInterface = Depends(provide_database_interface),
577
+ flow_policy: type[FlowRunOrchestrationPolicy] = Depends(
578
+ orchestration_dependencies.provide_flow_policy
579
+ ),
580
+ orchestration_parameters: Dict[str, Any] = Depends(
581
+ orchestration_dependencies.provide_flow_orchestration_parameters
582
+ ),
583
+ response: Response = None,
584
+ api_version: str = Depends(dependencies.provide_request_api_version),
585
+ ) -> OrchestrationResult:
586
+ """Set a flow run state, invoking any orchestration rules."""
587
+
588
+ # pass the request version to the orchestration engine to support compatibility code
589
+ orchestration_parameters.update({"api-version": api_version})
590
+
591
+ right_now = now("UTC")
592
+
593
+ # create the state
594
+ async with db.session_context(
595
+ begin_transaction=True, with_for_update=True
596
+ ) as session:
597
+ orchestration_result = await models.flow_runs.set_flow_run_state(
598
+ session=session,
599
+ flow_run_id=flow_run_id,
600
+ # convert to a full State object
601
+ state=schemas.states.State.model_validate(state),
602
+ force=force,
603
+ flow_policy=flow_policy,
604
+ orchestration_parameters=orchestration_parameters,
605
+ )
606
+
607
+ # set the 201 if a new state was created
608
+ if orchestration_result.state and orchestration_result.state.timestamp >= right_now:
609
+ response.status_code = status.HTTP_201_CREATED
610
+ else:
611
+ response.status_code = status.HTTP_200_OK
612
+
613
+ return orchestration_result
614
+
615
+
616
+ @router.post("/{id}/input", status_code=status.HTTP_201_CREATED)
617
+ async def create_flow_run_input(
618
+ flow_run_id: UUID = Path(..., description="The flow run id", alias="id"),
619
+ key: str = Body(..., description="The input key"),
620
+ value: bytes = Body(..., description="The value of the input"),
621
+ sender: Optional[str] = Body(None, description="The sender of the input"),
622
+ db: PrefectDBInterface = Depends(provide_database_interface),
623
+ ) -> None:
624
+ """
625
+ Create a key/value input for a flow run.
626
+ """
627
+ async with db.session_context() as session:
628
+ try:
629
+ await models.flow_run_input.create_flow_run_input(
630
+ session=session,
631
+ flow_run_input=schemas.core.FlowRunInput(
632
+ flow_run_id=flow_run_id,
633
+ key=key,
634
+ sender=sender,
635
+ value=value.decode(),
636
+ ),
637
+ )
638
+ await session.commit()
639
+
640
+ except IntegrityError as exc:
641
+ if "unique constraint" in str(exc).lower():
642
+ raise HTTPException(
643
+ status_code=status.HTTP_409_CONFLICT,
644
+ detail="A flow run input with this key already exists.",
645
+ )
646
+ else:
647
+ raise HTTPException(
648
+ status_code=status.HTTP_404_NOT_FOUND, detail="Flow run not found"
649
+ )
650
+
651
+
652
+ @router.post("/{id}/input/filter")
653
+ async def filter_flow_run_input(
654
+ flow_run_id: UUID = Path(..., description="The flow run id", alias="id"),
655
+ prefix: str = Body(..., description="The input key prefix", embed=True),
656
+ limit: int = Body(
657
+ 1, description="The maximum number of results to return", embed=True
658
+ ),
659
+ exclude_keys: List[str] = Body(
660
+ [], description="Exclude inputs with these keys", embed=True
661
+ ),
662
+ db: PrefectDBInterface = Depends(provide_database_interface),
663
+ ) -> List[schemas.core.FlowRunInput]:
664
+ """
665
+ Filter flow run inputs by key prefix
666
+ """
667
+ async with db.session_context() as session:
668
+ return await models.flow_run_input.filter_flow_run_input(
669
+ session=session,
670
+ flow_run_id=flow_run_id,
671
+ prefix=prefix,
672
+ limit=limit,
673
+ exclude_keys=exclude_keys,
674
+ )
675
+
676
+
677
+ @router.get("/{id}/input/{key}")
678
+ async def read_flow_run_input(
679
+ flow_run_id: UUID = Path(..., description="The flow run id", alias="id"),
680
+ key: str = Path(..., description="The input key", alias="key"),
681
+ db: PrefectDBInterface = Depends(provide_database_interface),
682
+ ) -> PlainTextResponse:
683
+ """
684
+ Create a value from a flow run input
685
+ """
686
+
687
+ async with db.session_context() as session:
688
+ flow_run_input = await models.flow_run_input.read_flow_run_input(
689
+ session=session, flow_run_id=flow_run_id, key=key
690
+ )
691
+
692
+ if flow_run_input:
693
+ return PlainTextResponse(flow_run_input.value)
694
+ else:
695
+ raise HTTPException(
696
+ status_code=status.HTTP_404_NOT_FOUND, detail="Flow run input not found"
697
+ )
698
+
699
+
700
+ @router.delete("/{id}/input/{key}", status_code=status.HTTP_204_NO_CONTENT)
701
+ async def delete_flow_run_input(
702
+ flow_run_id: UUID = Path(..., description="The flow run id", alias="id"),
703
+ key: str = Path(..., description="The input key", alias="key"),
704
+ db: PrefectDBInterface = Depends(provide_database_interface),
705
+ ) -> None:
706
+ """
707
+ Delete a flow run input
708
+ """
709
+
710
+ async with db.session_context() as session:
711
+ deleted = await models.flow_run_input.delete_flow_run_input(
712
+ session=session, flow_run_id=flow_run_id, key=key
713
+ )
714
+ await session.commit()
715
+
716
+ if not deleted:
717
+ raise HTTPException(
718
+ status_code=status.HTTP_404_NOT_FOUND, detail="Flow run input not found"
719
+ )
720
+
721
+
722
+ @router.post("/paginate", response_class=ORJSONResponse)
723
+ async def paginate_flow_runs(
724
+ sort: schemas.sorting.FlowRunSort = Body(schemas.sorting.FlowRunSort.ID_DESC),
725
+ limit: int = dependencies.LimitBody(),
726
+ page: int = Body(1, ge=1),
727
+ flows: Optional[schemas.filters.FlowFilter] = None,
728
+ flow_runs: Optional[schemas.filters.FlowRunFilter] = None,
729
+ task_runs: Optional[schemas.filters.TaskRunFilter] = None,
730
+ deployments: Optional[schemas.filters.DeploymentFilter] = None,
731
+ work_pools: Optional[schemas.filters.WorkPoolFilter] = None,
732
+ work_pool_queues: Optional[schemas.filters.WorkQueueFilter] = None,
733
+ db: PrefectDBInterface = Depends(provide_database_interface),
734
+ ) -> FlowRunPaginationResponse:
735
+ """
736
+ Pagination query for flow runs.
737
+ """
738
+ offset = (page - 1) * limit
739
+
740
+ async with db.session_context() as session:
741
+ runs = await models.flow_runs.read_flow_runs(
742
+ session=session,
743
+ flow_filter=flows,
744
+ flow_run_filter=flow_runs,
745
+ task_run_filter=task_runs,
746
+ deployment_filter=deployments,
747
+ work_pool_filter=work_pools,
748
+ work_queue_filter=work_pool_queues,
749
+ offset=offset,
750
+ limit=limit,
751
+ sort=sort,
752
+ )
753
+
754
+ count = await models.flow_runs.count_flow_runs(
755
+ session=session,
756
+ flow_filter=flows,
757
+ flow_run_filter=flow_runs,
758
+ task_run_filter=task_runs,
759
+ deployment_filter=deployments,
760
+ work_pool_filter=work_pools,
761
+ work_queue_filter=work_pool_queues,
762
+ )
763
+
764
+ # Instead of relying on fastapi.encoders.jsonable_encoder to convert the
765
+ # response to JSON, we do so more efficiently ourselves.
766
+ # In particular, the FastAPI encoder is very slow for large, nested objects.
767
+ # See: https://github.com/tiangolo/fastapi/issues/1224
768
+ results = [
769
+ schemas.responses.FlowRunResponse.model_validate(
770
+ run, from_attributes=True
771
+ ).model_dump(mode="json")
772
+ for run in runs
773
+ ]
774
+
775
+ response = FlowRunPaginationResponse(
776
+ results=results,
777
+ count=count,
778
+ limit=limit,
779
+ pages=(count + limit - 1) // limit,
780
+ page=page,
781
+ ).model_dump(mode="json")
782
+
783
+ return ORJSONResponse(content=response)
784
+
785
+
786
+ FLOW_RUN_LOGS_DOWNLOAD_PAGE_LIMIT = 1000
787
+
788
+
789
+ @router.get("/{id}/logs/download")
790
+ async def download_logs(
791
+ flow_run_id: UUID = Path(..., description="The flow run id", alias="id"),
792
+ db: PrefectDBInterface = Depends(provide_database_interface),
793
+ ) -> StreamingResponse:
794
+ """
795
+ Download all flow run logs as a CSV file, collecting all logs until there are no more logs to retrieve.
796
+ """
797
+ async with db.session_context() as session:
798
+ flow_run = await models.flow_runs.read_flow_run(
799
+ session=session, flow_run_id=flow_run_id
800
+ )
801
+
802
+ if not flow_run:
803
+ raise HTTPException(status.HTTP_404_NOT_FOUND, detail="Flow run not found")
804
+
805
+ async def generate():
806
+ data = io.StringIO()
807
+ csv_writer = csv.writer(data)
808
+ csv_writer.writerow(
809
+ ["timestamp", "level", "flow_run_id", "task_run_id", "message"]
810
+ )
811
+
812
+ offset = 0
813
+ limit = FLOW_RUN_LOGS_DOWNLOAD_PAGE_LIMIT
814
+
815
+ while True:
816
+ results = await models.logs.read_logs(
817
+ session=session,
818
+ log_filter=schemas.filters.LogFilter(
819
+ flow_run_id={"any_": [flow_run_id]}
820
+ ),
821
+ offset=offset,
822
+ limit=limit,
823
+ sort=schemas.sorting.LogSort.TIMESTAMP_ASC,
824
+ )
825
+
826
+ if not results:
827
+ break
828
+
829
+ offset += limit
830
+
831
+ for log in results:
832
+ csv_writer.writerow(
833
+ [
834
+ log.timestamp,
835
+ log.level,
836
+ log.flow_run_id,
837
+ log.task_run_id,
838
+ log.message,
839
+ ]
840
+ )
841
+ data.seek(0)
842
+ yield data.read()
843
+ data.seek(0)
844
+ data.truncate(0)
845
+
846
+ return StreamingResponse(
847
+ generate(),
848
+ media_type="text/csv",
849
+ headers={
850
+ "Content-Disposition": f"attachment; filename={flow_run.name}-logs.csv"
851
+ },
852
+ )
853
+
854
+
855
+ @router.patch("/{id}/labels", status_code=status.HTTP_204_NO_CONTENT)
856
+ async def update_flow_run_labels(
857
+ flow_run_id: UUID = Path(..., description="The flow run id", alias="id"),
858
+ labels: Dict[str, Any] = Body(..., description="The labels to update"),
859
+ db: PrefectDBInterface = Depends(provide_database_interface),
860
+ ) -> None:
861
+ """
862
+ Update the labels of a flow run.
863
+ """
864
+ async with db.session_context(begin_transaction=True) as session:
865
+ await models.flow_runs.update_flow_run_labels(
866
+ session=session, flow_run_id=flow_run_id, labels=labels
867
+ )