prefect-client 3.2.2__py3-none-any.whl → 3.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. prefect/__init__.py +15 -8
  2. prefect/_build_info.py +5 -0
  3. prefect/client/orchestration/__init__.py +16 -5
  4. prefect/main.py +0 -2
  5. prefect/server/api/__init__.py +34 -0
  6. prefect/server/api/admin.py +85 -0
  7. prefect/server/api/artifacts.py +224 -0
  8. prefect/server/api/automations.py +239 -0
  9. prefect/server/api/block_capabilities.py +25 -0
  10. prefect/server/api/block_documents.py +164 -0
  11. prefect/server/api/block_schemas.py +153 -0
  12. prefect/server/api/block_types.py +211 -0
  13. prefect/server/api/clients.py +246 -0
  14. prefect/server/api/collections.py +75 -0
  15. prefect/server/api/concurrency_limits.py +286 -0
  16. prefect/server/api/concurrency_limits_v2.py +269 -0
  17. prefect/server/api/csrf_token.py +38 -0
  18. prefect/server/api/dependencies.py +196 -0
  19. prefect/server/api/deployments.py +941 -0
  20. prefect/server/api/events.py +300 -0
  21. prefect/server/api/flow_run_notification_policies.py +120 -0
  22. prefect/server/api/flow_run_states.py +52 -0
  23. prefect/server/api/flow_runs.py +867 -0
  24. prefect/server/api/flows.py +210 -0
  25. prefect/server/api/logs.py +43 -0
  26. prefect/server/api/middleware.py +73 -0
  27. prefect/server/api/root.py +35 -0
  28. prefect/server/api/run_history.py +170 -0
  29. prefect/server/api/saved_searches.py +99 -0
  30. prefect/server/api/server.py +891 -0
  31. prefect/server/api/task_run_states.py +52 -0
  32. prefect/server/api/task_runs.py +342 -0
  33. prefect/server/api/task_workers.py +31 -0
  34. prefect/server/api/templates.py +35 -0
  35. prefect/server/api/ui/__init__.py +3 -0
  36. prefect/server/api/ui/flow_runs.py +128 -0
  37. prefect/server/api/ui/flows.py +173 -0
  38. prefect/server/api/ui/schemas.py +63 -0
  39. prefect/server/api/ui/task_runs.py +175 -0
  40. prefect/server/api/validation.py +382 -0
  41. prefect/server/api/variables.py +181 -0
  42. prefect/server/api/work_queues.py +230 -0
  43. prefect/server/api/workers.py +656 -0
  44. prefect/settings/sources.py +18 -5
  45. {prefect_client-3.2.2.dist-info → prefect_client-3.2.4.dist-info}/METADATA +10 -15
  46. {prefect_client-3.2.2.dist-info → prefect_client-3.2.4.dist-info}/RECORD +48 -10
  47. {prefect_client-3.2.2.dist-info → prefect_client-3.2.4.dist-info}/WHEEL +1 -2
  48. prefect/_version.py +0 -21
  49. prefect_client-3.2.2.dist-info/top_level.txt +0 -1
  50. {prefect_client-3.2.2.dist-info → prefect_client-3.2.4.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,210 @@
1
+ """
2
+ Routes for interacting with flow objects.
3
+ """
4
+
5
+ from typing import List, Optional
6
+ from uuid import UUID
7
+
8
+ from fastapi import Depends, HTTPException, Path, Response, status
9
+ from fastapi.param_functions import Body
10
+
11
+ import prefect.server.api.dependencies as dependencies
12
+ import prefect.server.models as models
13
+ import prefect.server.schemas as schemas
14
+ from prefect.server.database import PrefectDBInterface, provide_database_interface
15
+ from prefect.server.schemas.responses import FlowPaginationResponse
16
+ from prefect.server.utilities.server import PrefectRouter
17
+ from prefect.types._datetime import now
18
+
19
+ router: PrefectRouter = PrefectRouter(prefix="/flows", tags=["Flows"])
20
+
21
+
22
+ @router.post("/")
23
+ async def create_flow(
24
+ flow: schemas.actions.FlowCreate,
25
+ response: Response,
26
+ db: PrefectDBInterface = Depends(provide_database_interface),
27
+ ) -> schemas.core.Flow:
28
+ """Gracefully creates a new flow from the provided schema. If a flow with the
29
+ same name already exists, the existing flow is returned.
30
+ """
31
+ # hydrate the input model into a full flow model
32
+ flow = schemas.core.Flow(**flow.model_dump())
33
+
34
+ right_now = now("UTC")
35
+
36
+ async with db.session_context(begin_transaction=True) as session:
37
+ model = await models.flows.create_flow(session=session, flow=flow)
38
+
39
+ if model.created >= right_now:
40
+ response.status_code = status.HTTP_201_CREATED
41
+ return model
42
+
43
+
44
+ @router.patch("/{id}", status_code=status.HTTP_204_NO_CONTENT)
45
+ async def update_flow(
46
+ flow: schemas.actions.FlowUpdate,
47
+ flow_id: UUID = Path(..., description="The flow id", alias="id"),
48
+ db: PrefectDBInterface = Depends(provide_database_interface),
49
+ ) -> None:
50
+ """
51
+ Updates a flow.
52
+ """
53
+ async with db.session_context(begin_transaction=True) as session:
54
+ result = await models.flows.update_flow(
55
+ session=session, flow=flow, flow_id=flow_id
56
+ )
57
+ if not result:
58
+ raise HTTPException(
59
+ status_code=status.HTTP_404_NOT_FOUND, detail="Flow not found"
60
+ )
61
+
62
+
63
+ @router.post("/count")
64
+ async def count_flows(
65
+ flows: schemas.filters.FlowFilter = None,
66
+ flow_runs: schemas.filters.FlowRunFilter = None,
67
+ task_runs: schemas.filters.TaskRunFilter = None,
68
+ deployments: schemas.filters.DeploymentFilter = None,
69
+ work_pools: schemas.filters.WorkPoolFilter = None,
70
+ db: PrefectDBInterface = Depends(provide_database_interface),
71
+ ) -> int:
72
+ """
73
+ Count flows.
74
+ """
75
+ async with db.session_context() as session:
76
+ return await models.flows.count_flows(
77
+ session=session,
78
+ flow_filter=flows,
79
+ flow_run_filter=flow_runs,
80
+ task_run_filter=task_runs,
81
+ deployment_filter=deployments,
82
+ work_pool_filter=work_pools,
83
+ )
84
+
85
+
86
+ @router.get("/name/{name}")
87
+ async def read_flow_by_name(
88
+ name: str = Path(..., description="The name of the flow"),
89
+ db: PrefectDBInterface = Depends(provide_database_interface),
90
+ ) -> schemas.core.Flow:
91
+ """
92
+ Get a flow by name.
93
+ """
94
+ async with db.session_context() as session:
95
+ flow = await models.flows.read_flow_by_name(session=session, name=name)
96
+ if not flow:
97
+ raise HTTPException(
98
+ status_code=status.HTTP_404_NOT_FOUND, detail="Flow not found"
99
+ )
100
+ return flow
101
+
102
+
103
+ @router.get("/{id}")
104
+ async def read_flow(
105
+ flow_id: UUID = Path(..., description="The flow id", alias="id"),
106
+ db: PrefectDBInterface = Depends(provide_database_interface),
107
+ ) -> schemas.core.Flow:
108
+ """
109
+ Get a flow by id.
110
+ """
111
+ async with db.session_context() as session:
112
+ flow = await models.flows.read_flow(session=session, flow_id=flow_id)
113
+ if not flow:
114
+ raise HTTPException(
115
+ status_code=status.HTTP_404_NOT_FOUND, detail="Flow not found"
116
+ )
117
+ return flow
118
+
119
+
120
+ @router.post("/filter")
121
+ async def read_flows(
122
+ limit: int = dependencies.LimitBody(),
123
+ offset: int = Body(0, ge=0),
124
+ flows: schemas.filters.FlowFilter = None,
125
+ flow_runs: schemas.filters.FlowRunFilter = None,
126
+ task_runs: schemas.filters.TaskRunFilter = None,
127
+ deployments: schemas.filters.DeploymentFilter = None,
128
+ work_pools: schemas.filters.WorkPoolFilter = None,
129
+ sort: schemas.sorting.FlowSort = Body(schemas.sorting.FlowSort.NAME_ASC),
130
+ db: PrefectDBInterface = Depends(provide_database_interface),
131
+ ) -> List[schemas.core.Flow]:
132
+ """
133
+ Query for flows.
134
+ """
135
+ async with db.session_context() as session:
136
+ return await models.flows.read_flows(
137
+ session=session,
138
+ flow_filter=flows,
139
+ flow_run_filter=flow_runs,
140
+ task_run_filter=task_runs,
141
+ deployment_filter=deployments,
142
+ work_pool_filter=work_pools,
143
+ sort=sort,
144
+ offset=offset,
145
+ limit=limit,
146
+ )
147
+
148
+
149
+ @router.delete("/{id}", status_code=status.HTTP_204_NO_CONTENT)
150
+ async def delete_flow(
151
+ flow_id: UUID = Path(..., description="The flow id", alias="id"),
152
+ db: PrefectDBInterface = Depends(provide_database_interface),
153
+ ) -> None:
154
+ """
155
+ Delete a flow by id.
156
+ """
157
+ async with db.session_context(begin_transaction=True) as session:
158
+ result = await models.flows.delete_flow(session=session, flow_id=flow_id)
159
+ if not result:
160
+ raise HTTPException(
161
+ status_code=status.HTTP_404_NOT_FOUND, detail="Flow not found"
162
+ )
163
+
164
+
165
+ @router.post("/paginate")
166
+ async def paginate_flows(
167
+ limit: int = dependencies.LimitBody(),
168
+ page: int = Body(1, ge=1),
169
+ flows: Optional[schemas.filters.FlowFilter] = None,
170
+ flow_runs: Optional[schemas.filters.FlowRunFilter] = None,
171
+ task_runs: Optional[schemas.filters.TaskRunFilter] = None,
172
+ deployments: Optional[schemas.filters.DeploymentFilter] = None,
173
+ work_pools: Optional[schemas.filters.WorkPoolFilter] = None,
174
+ sort: schemas.sorting.FlowSort = Body(schemas.sorting.FlowSort.NAME_ASC),
175
+ db: PrefectDBInterface = Depends(provide_database_interface),
176
+ ) -> FlowPaginationResponse:
177
+ """
178
+ Pagination query for flows.
179
+ """
180
+ offset = (page - 1) * limit
181
+
182
+ async with db.session_context() as session:
183
+ results = await models.flows.read_flows(
184
+ session=session,
185
+ flow_filter=flows,
186
+ flow_run_filter=flow_runs,
187
+ task_run_filter=task_runs,
188
+ deployment_filter=deployments,
189
+ work_pool_filter=work_pools,
190
+ sort=sort,
191
+ offset=offset,
192
+ limit=limit,
193
+ )
194
+
195
+ count = await models.flows.count_flows(
196
+ session=session,
197
+ flow_filter=flows,
198
+ flow_run_filter=flow_runs,
199
+ task_run_filter=task_runs,
200
+ deployment_filter=deployments,
201
+ work_pool_filter=work_pools,
202
+ )
203
+
204
+ return FlowPaginationResponse(
205
+ results=results,
206
+ count=count,
207
+ limit=limit,
208
+ pages=(count + limit - 1) // limit,
209
+ page=page,
210
+ )
@@ -0,0 +1,43 @@
1
+ """
2
+ Routes for interacting with log objects.
3
+ """
4
+
5
+ from typing import List
6
+
7
+ from fastapi import Body, Depends, status
8
+
9
+ import prefect.server.api.dependencies as dependencies
10
+ import prefect.server.models as models
11
+ import prefect.server.schemas as schemas
12
+ from prefect.server.database import PrefectDBInterface, provide_database_interface
13
+ from prefect.server.utilities.server import PrefectRouter
14
+
15
+ router: PrefectRouter = PrefectRouter(prefix="/logs", tags=["Logs"])
16
+
17
+
18
+ @router.post("/", status_code=status.HTTP_201_CREATED)
19
+ async def create_logs(
20
+ logs: List[schemas.actions.LogCreate],
21
+ db: PrefectDBInterface = Depends(provide_database_interface),
22
+ ) -> None:
23
+ """Create new logs from the provided schema."""
24
+ for batch in models.logs.split_logs_into_batches(logs):
25
+ async with db.session_context(begin_transaction=True) as session:
26
+ await models.logs.create_logs(session=session, logs=batch)
27
+
28
+
29
+ @router.post("/filter")
30
+ async def read_logs(
31
+ limit: int = dependencies.LimitBody(),
32
+ offset: int = Body(0, ge=0),
33
+ logs: schemas.filters.LogFilter = None,
34
+ sort: schemas.sorting.LogSort = Body(schemas.sorting.LogSort.TIMESTAMP_ASC),
35
+ db: PrefectDBInterface = Depends(provide_database_interface),
36
+ ) -> List[schemas.core.Log]:
37
+ """
38
+ Query for logs.
39
+ """
40
+ async with db.session_context() as session:
41
+ return await models.logs.read_logs(
42
+ session=session, log_filter=logs, offset=offset, limit=limit, sort=sort
43
+ )
@@ -0,0 +1,73 @@
1
+ from typing import Awaitable, Callable
2
+
3
+ from fastapi import status
4
+ from starlette.middleware.base import BaseHTTPMiddleware
5
+ from starlette.requests import Request
6
+ from starlette.responses import JSONResponse, Response
7
+
8
+ from prefect import settings
9
+ from prefect.server import models
10
+ from prefect.server.database import provide_database_interface
11
+
12
+ NextMiddlewareFunction = Callable[[Request], Awaitable[Response]]
13
+
14
+
15
+ class CsrfMiddleware(BaseHTTPMiddleware):
16
+ """
17
+ Middleware for CSRF protection. This middleware will check for a CSRF token
18
+ in the headers of any POST, PUT, PATCH, or DELETE request. If the token is
19
+ not present or does not match the token stored in the database for the
20
+ client, the request will be rejected with a 403 status code.
21
+ """
22
+
23
+ async def dispatch(
24
+ self, request: Request, call_next: NextMiddlewareFunction
25
+ ) -> Response:
26
+ """
27
+ Dispatch method for the middleware. This method will check for the
28
+ presence of a CSRF token in the headers of the request and compare it
29
+ to the token stored in the database for the client. If the token is not
30
+ present or does not match, the request will be rejected with a 403
31
+ status code.
32
+ """
33
+
34
+ request_needs_csrf_protection = request.method in {
35
+ "POST",
36
+ "PUT",
37
+ "PATCH",
38
+ "DELETE",
39
+ }
40
+
41
+ if (
42
+ settings.PREFECT_SERVER_CSRF_PROTECTION_ENABLED.value()
43
+ and request_needs_csrf_protection
44
+ ):
45
+ incoming_token = request.headers.get("Prefect-Csrf-Token")
46
+ incoming_client = request.headers.get("Prefect-Csrf-Client")
47
+
48
+ if incoming_token is None:
49
+ return JSONResponse(
50
+ {"detail": "Missing CSRF token."},
51
+ status_code=status.HTTP_403_FORBIDDEN,
52
+ )
53
+
54
+ if incoming_client is None:
55
+ return JSONResponse(
56
+ {"detail": "Missing client identifier."},
57
+ status_code=status.HTTP_403_FORBIDDEN,
58
+ )
59
+
60
+ db = provide_database_interface()
61
+ async with db.session_context() as session:
62
+ token = await models.csrf_token.read_token_for_client(
63
+ session=session, client=incoming_client
64
+ )
65
+
66
+ if token is None or token.token != incoming_token:
67
+ return JSONResponse(
68
+ {"detail": "Invalid CSRF token or client identifier."},
69
+ status_code=status.HTTP_403_FORBIDDEN,
70
+ headers={"Access-Control-Allow-Origin": "*"},
71
+ )
72
+
73
+ return await call_next(request)
@@ -0,0 +1,35 @@
1
+ """
2
+ Contains the `hello` route for testing and healthcheck purposes.
3
+ """
4
+
5
+ from fastapi import Depends, status
6
+ from fastapi.responses import JSONResponse
7
+
8
+ from prefect.server.database import PrefectDBInterface, provide_database_interface
9
+ from prefect.server.utilities.server import PrefectRouter
10
+
11
+ router: PrefectRouter = PrefectRouter(prefix="", tags=["Root"])
12
+
13
+
14
+ @router.get("/hello")
15
+ async def hello() -> str:
16
+ """Say hello!"""
17
+ return "👋"
18
+
19
+
20
+ @router.get("/ready")
21
+ async def perform_readiness_check(
22
+ db: PrefectDBInterface = Depends(provide_database_interface),
23
+ ) -> JSONResponse:
24
+ is_db_connectable = await db.is_db_connectable()
25
+
26
+ if is_db_connectable:
27
+ return JSONResponse(
28
+ status_code=status.HTTP_200_OK,
29
+ content={"message": "OK"},
30
+ )
31
+
32
+ return JSONResponse(
33
+ status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
34
+ content={"message": "Database is not available"},
35
+ )
@@ -0,0 +1,170 @@
1
+ """
2
+ Utilities for querying flow and task run history.
3
+ """
4
+
5
+ import datetime
6
+ import json
7
+ from typing import TYPE_CHECKING, List, Optional
8
+
9
+ import pydantic
10
+ import sqlalchemy as sa
11
+ from typing_extensions import Literal
12
+
13
+ import prefect.server.models as models
14
+ import prefect.server.schemas as schemas
15
+ from prefect.logging import get_logger
16
+ from prefect.server.database import PrefectDBInterface, db_injector
17
+ from prefect.types import DateTime
18
+
19
+ if TYPE_CHECKING:
20
+ import logging
21
+
22
+ logger: "logging.Logger" = get_logger("server.api")
23
+
24
+
25
+ @db_injector
26
+ async def run_history(
27
+ db: PrefectDBInterface,
28
+ session: sa.orm.Session,
29
+ run_type: Literal["flow_run", "task_run"],
30
+ history_start: DateTime,
31
+ history_end: DateTime,
32
+ history_interval: datetime.timedelta,
33
+ flows: Optional[schemas.filters.FlowFilter] = None,
34
+ flow_runs: Optional[schemas.filters.FlowRunFilter] = None,
35
+ task_runs: Optional[schemas.filters.TaskRunFilter] = None,
36
+ deployments: Optional[schemas.filters.DeploymentFilter] = None,
37
+ work_pools: Optional[schemas.filters.WorkPoolFilter] = None,
38
+ work_queues: Optional[schemas.filters.WorkQueueFilter] = None,
39
+ ) -> List[schemas.responses.HistoryResponse]:
40
+ """
41
+ Produce a history of runs aggregated by interval and state
42
+ """
43
+
44
+ # SQLite has issues with very small intervals
45
+ # (by 0.001 seconds it stops incrementing the interval)
46
+ if history_interval < datetime.timedelta(seconds=1):
47
+ raise ValueError("History interval must not be less than 1 second.")
48
+
49
+ # prepare run-specific models
50
+ if run_type == "flow_run":
51
+ run_model = db.FlowRun
52
+ run_filter_function = models.flow_runs._apply_flow_run_filters
53
+ elif run_type == "task_run":
54
+ run_model = db.TaskRun
55
+ run_filter_function = models.task_runs._apply_task_run_filters
56
+ else:
57
+ raise ValueError(
58
+ f"Unknown run type {run_type!r}. Expected 'flow_run' or 'task_run'."
59
+ )
60
+
61
+ # create a CTE for timestamp intervals
62
+ intervals = db.queries.make_timestamp_intervals(
63
+ history_start,
64
+ history_end,
65
+ history_interval,
66
+ ).cte("intervals")
67
+
68
+ # apply filters to the flow runs (and related states)
69
+ runs = (
70
+ await run_filter_function(
71
+ db,
72
+ sa.select(
73
+ run_model.id,
74
+ run_model.expected_start_time,
75
+ run_model.estimated_run_time,
76
+ run_model.estimated_start_time_delta,
77
+ run_model.state_type,
78
+ run_model.state_name,
79
+ ).select_from(run_model),
80
+ flow_filter=flows,
81
+ flow_run_filter=flow_runs,
82
+ task_run_filter=task_runs,
83
+ deployment_filter=deployments,
84
+ work_pool_filter=work_pools,
85
+ work_queue_filter=work_queues,
86
+ )
87
+ ).alias("runs")
88
+ # outer join intervals to the filtered runs to create a dataset composed of
89
+ # every interval and the aggregate of all its runs. The runs aggregate is represented
90
+ # by a descriptive JSON object
91
+ counts = (
92
+ sa.select(
93
+ intervals.c.interval_start,
94
+ intervals.c.interval_end,
95
+ # build a JSON object, ignoring the case where the count of runs is 0
96
+ sa.case(
97
+ (sa.func.count(runs.c.id) == 0, None),
98
+ else_=db.queries.build_json_object(
99
+ "state_type",
100
+ runs.c.state_type,
101
+ "state_name",
102
+ runs.c.state_name,
103
+ "count_runs",
104
+ sa.func.count(runs.c.id),
105
+ # estimated run times only includes positive run times (to avoid any unexpected corner cases)
106
+ "sum_estimated_run_time",
107
+ sa.func.sum(
108
+ sa.func.greatest(
109
+ 0, sa.extract("epoch", runs.c.estimated_run_time)
110
+ )
111
+ ),
112
+ # estimated lateness is the sum of any positive start time deltas
113
+ "sum_estimated_lateness",
114
+ sa.func.sum(
115
+ sa.func.greatest(
116
+ 0, sa.extract("epoch", runs.c.estimated_start_time_delta)
117
+ )
118
+ ),
119
+ ),
120
+ ).label("state_agg"),
121
+ )
122
+ .select_from(intervals)
123
+ .join(
124
+ runs,
125
+ sa.and_(
126
+ runs.c.expected_start_time >= intervals.c.interval_start,
127
+ runs.c.expected_start_time < intervals.c.interval_end,
128
+ ),
129
+ isouter=True,
130
+ )
131
+ .group_by(
132
+ intervals.c.interval_start,
133
+ intervals.c.interval_end,
134
+ runs.c.state_type,
135
+ runs.c.state_name,
136
+ )
137
+ ).alias("counts")
138
+
139
+ # aggregate all state-aggregate objects into a single array for each interval,
140
+ # ensuring that intervals with no runs have an empty array
141
+ query = (
142
+ sa.select(
143
+ counts.c.interval_start,
144
+ counts.c.interval_end,
145
+ sa.func.coalesce(
146
+ db.queries.json_arr_agg(
147
+ db.queries.cast_to_json(counts.c.state_agg)
148
+ ).filter(counts.c.state_agg.is_not(None)),
149
+ sa.literal("[]", literal_execute=True),
150
+ ).label("states"),
151
+ )
152
+ .group_by(counts.c.interval_start, counts.c.interval_end)
153
+ .order_by(counts.c.interval_start)
154
+ # return no more than 500 bars
155
+ .limit(500)
156
+ )
157
+
158
+ # issue the query
159
+ result = await session.execute(query)
160
+ records = result.mappings()
161
+
162
+ # load and parse the record if the database returns JSON as strings
163
+ if db.queries.uses_json_strings:
164
+ records = [dict(r) for r in records]
165
+ for r in records:
166
+ r["states"] = json.loads(r["states"])
167
+
168
+ return pydantic.TypeAdapter(
169
+ List[schemas.responses.HistoryResponse]
170
+ ).validate_python(records)
@@ -0,0 +1,99 @@
1
+ """
2
+ Routes for interacting with saved search objects.
3
+ """
4
+
5
+ from typing import List
6
+ from uuid import UUID
7
+
8
+ from fastapi import Body, Depends, HTTPException, Path, Response, status
9
+
10
+ import prefect.server.api.dependencies as dependencies
11
+ import prefect.server.models as models
12
+ import prefect.server.schemas as schemas
13
+ from prefect.server.database import PrefectDBInterface, provide_database_interface
14
+ from prefect.server.utilities.server import PrefectRouter
15
+ from prefect.types._datetime import now
16
+
17
+ router: PrefectRouter = PrefectRouter(prefix="/saved_searches", tags=["SavedSearches"])
18
+
19
+
20
+ @router.put("/")
21
+ async def create_saved_search(
22
+ saved_search: schemas.actions.SavedSearchCreate,
23
+ response: Response,
24
+ db: PrefectDBInterface = Depends(provide_database_interface),
25
+ ) -> schemas.core.SavedSearch:
26
+ """Gracefully creates a new saved search from the provided schema.
27
+
28
+ If a saved search with the same name already exists, the saved search's fields are
29
+ replaced.
30
+ """
31
+
32
+ # hydrate the input model into a full model
33
+ saved_search = schemas.core.SavedSearch(**saved_search.model_dump())
34
+
35
+ right_now = now("UTC")
36
+
37
+ async with db.session_context(begin_transaction=True) as session:
38
+ model = await models.saved_searches.create_saved_search(
39
+ session=session, saved_search=saved_search
40
+ )
41
+
42
+ if model.created >= right_now:
43
+ response.status_code = status.HTTP_201_CREATED
44
+
45
+ return model
46
+
47
+
48
+ @router.get("/{id}")
49
+ async def read_saved_search(
50
+ saved_search_id: UUID = Path(..., description="The saved search id", alias="id"),
51
+ db: PrefectDBInterface = Depends(provide_database_interface),
52
+ ) -> schemas.core.SavedSearch:
53
+ """
54
+ Get a saved search by id.
55
+ """
56
+ async with db.session_context() as session:
57
+ saved_search = await models.saved_searches.read_saved_search(
58
+ session=session, saved_search_id=saved_search_id
59
+ )
60
+ if not saved_search:
61
+ raise HTTPException(
62
+ status_code=status.HTTP_404_NOT_FOUND, detail="Saved search not found"
63
+ )
64
+ return saved_search
65
+
66
+
67
+ @router.post("/filter")
68
+ async def read_saved_searches(
69
+ limit: int = dependencies.LimitBody(),
70
+ offset: int = Body(0, ge=0),
71
+ db: PrefectDBInterface = Depends(provide_database_interface),
72
+ ) -> List[schemas.core.SavedSearch]:
73
+ """
74
+ Query for saved searches.
75
+ """
76
+ async with db.session_context() as session:
77
+ return await models.saved_searches.read_saved_searches(
78
+ session=session,
79
+ offset=offset,
80
+ limit=limit,
81
+ )
82
+
83
+
84
+ @router.delete("/{id}", status_code=status.HTTP_204_NO_CONTENT)
85
+ async def delete_saved_search(
86
+ saved_search_id: UUID = Path(..., description="The saved search id", alias="id"),
87
+ db: PrefectDBInterface = Depends(provide_database_interface),
88
+ ) -> None:
89
+ """
90
+ Delete a saved search by id.
91
+ """
92
+ async with db.session_context(begin_transaction=True) as session:
93
+ result = await models.saved_searches.delete_saved_search(
94
+ session=session, saved_search_id=saved_search_id
95
+ )
96
+ if not result:
97
+ raise HTTPException(
98
+ status_code=status.HTTP_404_NOT_FOUND, detail="Saved search not found"
99
+ )