prefect-client 2.14.10__py3-none-any.whl → 2.14.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,88 @@
1
+ from typing import TYPE_CHECKING, Any, Optional
2
+ from uuid import UUID
3
+
4
+ import orjson
5
+
6
+ from prefect.client.utilities import inject_client
7
+ from prefect.context import FlowRunContext
8
+ from prefect.exceptions import PrefectHTTPStatusError
9
+ from prefect.utilities.asyncutils import sync_compatible
10
+
11
+ if TYPE_CHECKING:
12
+ from prefect.client.orchestration import PrefectClient
13
+
14
+
15
+ def _ensure_flow_run_id(flow_run_id: Optional[UUID] = None) -> UUID:
16
+ if flow_run_id:
17
+ return flow_run_id
18
+
19
+ context = FlowRunContext.get()
20
+ if context is None or context.flow_run is None:
21
+ raise RuntimeError("Must either provide a flow run ID or be within a flow run.")
22
+
23
+ return context.flow_run.id
24
+
25
+
26
+ @sync_compatible
27
+ @inject_client
28
+ async def create_flow_run_input(
29
+ key: str,
30
+ value: Any,
31
+ flow_run_id: Optional[UUID] = None,
32
+ client: "PrefectClient" = None,
33
+ ):
34
+ """
35
+ Create a new flow run input. The given `value` will be serialized to JSON
36
+ and stored as a flow run input value.
37
+
38
+ Args:
39
+ - key (str): the flow run input key
40
+ - value (Any): the flow run input value
41
+ - flow_run_id (UUID): the, optional, flow run ID. If not given will
42
+ default to pulling the flow run ID from the current context.
43
+ """
44
+ flow_run_id = _ensure_flow_run_id(flow_run_id)
45
+
46
+ await client.create_flow_run_input(
47
+ flow_run_id=flow_run_id, key=key, value=orjson.dumps(value).decode()
48
+ )
49
+
50
+
51
+ @sync_compatible
52
+ @inject_client
53
+ async def read_flow_run_input(
54
+ key: str, flow_run_id: Optional[UUID] = None, client: "PrefectClient" = None
55
+ ) -> Any:
56
+ """Read a flow run input.
57
+
58
+ Args:
59
+ - key (str): the flow run input key
60
+ - flow_run_id (UUID): the flow run ID
61
+ """
62
+ flow_run_id = _ensure_flow_run_id(flow_run_id)
63
+
64
+ try:
65
+ value = await client.read_flow_run_input(flow_run_id=flow_run_id, key=key)
66
+ except PrefectHTTPStatusError as exc:
67
+ if exc.response.status_code == 404:
68
+ return None
69
+ raise
70
+ else:
71
+ return orjson.loads(value)
72
+
73
+
74
+ @sync_compatible
75
+ @inject_client
76
+ async def delete_flow_run_input(
77
+ key: str, flow_run_id: Optional[UUID] = None, client: "PrefectClient" = None
78
+ ):
79
+ """Delete a flow run input.
80
+
81
+ Args:
82
+ - flow_run_id (UUID): the flow run ID
83
+ - key (str): the flow run input key
84
+ """
85
+
86
+ flow_run_id = _ensure_flow_run_id(flow_run_id)
87
+
88
+ await client.delete_flow_run_input(flow_run_id=flow_run_id, key=key)
@@ -0,0 +1,107 @@
1
+ from typing import TYPE_CHECKING, Any, Dict, Literal, Optional, Type, TypeVar, Union
2
+ from uuid import UUID
3
+
4
+ import pydantic
5
+
6
+ from prefect._internal.pydantic import HAS_PYDANTIC_V2
7
+ from prefect.input.actions import create_flow_run_input, read_flow_run_input
8
+ from prefect.utilities.asyncutils import sync_compatible
9
+
10
+ if TYPE_CHECKING:
11
+ from prefect.states import State
12
+
13
+
14
+ if HAS_PYDANTIC_V2:
15
+ from prefect._internal.pydantic.v2_schema import create_v2_schema
16
+
17
+
18
+ T = TypeVar("T", bound="RunInput")
19
+ KeysetNames = Union[Literal["response"], Literal["schema"]]
20
+ Keyset = Dict[KeysetNames, str]
21
+
22
+
23
+ def keyset_from_paused_state(state: "State") -> Keyset:
24
+ """
25
+ Get the keyset for the given Paused state.
26
+
27
+ Args:
28
+ - state (State): the state to get the keyset for
29
+ """
30
+
31
+ if not state.is_paused():
32
+ raise RuntimeError(f"{state.type.value!r} is unsupported.")
33
+
34
+ return keyset_from_base_key(
35
+ f"{state.name.lower()}-{str(state.state_details.pause_key)}"
36
+ )
37
+
38
+
39
+ def keyset_from_base_key(base_key: str) -> Keyset:
40
+ """
41
+ Get the keyset for the given base key.
42
+
43
+ Args:
44
+ - base_key (str): the base key to get the keyset for
45
+
46
+ Returns:
47
+ - Dict[str, str]: the keyset
48
+ """
49
+ return {
50
+ "response": f"{base_key}-response",
51
+ "schema": f"{base_key}-schema",
52
+ }
53
+
54
+
55
+ class RunInput(pydantic.BaseModel):
56
+ class Config:
57
+ extra = "forbid"
58
+
59
+ title: str = "Run is asking for input"
60
+ description: Optional[str] = None
61
+
62
+ @classmethod
63
+ @sync_compatible
64
+ async def save(cls, keyset: Keyset, flow_run_id: Optional[UUID] = None):
65
+ """
66
+ Save the run input response to the given key.
67
+
68
+ Args:
69
+ - keyset (Keyset): the keyset to save the input for
70
+ - flow_run_id (UUID, optional): the flow run ID to save the input for
71
+ """
72
+
73
+ if HAS_PYDANTIC_V2:
74
+ schema = create_v2_schema(cls.__name__, model_base=cls)
75
+ else:
76
+ schema = cls.schema(by_alias=True)
77
+
78
+ await create_flow_run_input(
79
+ key=keyset["schema"], value=schema, flow_run_id=flow_run_id
80
+ )
81
+
82
+ @classmethod
83
+ @sync_compatible
84
+ async def load(cls, keyset: Keyset, flow_run_id: Optional[UUID] = None):
85
+ """
86
+ Load the run input response from the given key.
87
+
88
+ Args:
89
+ - keyset (Keyset): the keyset to load the input for
90
+ - flow_run_id (UUID, optional): the flow run ID to load the input for
91
+ """
92
+ value = await read_flow_run_input(keyset["response"], flow_run_id=flow_run_id)
93
+ return cls(**value)
94
+
95
+ @classmethod
96
+ def with_initial_data(cls: Type[T], **kwargs: Any) -> Type[T]:
97
+ """
98
+ Create a new `RunInput` subclass with the given initial data as field
99
+ defaults.
100
+
101
+ Args:
102
+ - kwargs (Any): the initial data
103
+ """
104
+ fields = {}
105
+ for key, value in kwargs.items():
106
+ fields[key] = (type(value), value)
107
+ return pydantic.create_model(cls.__name__, **fields, __base__=cls)
prefect/runner/runner.py CHANGED
@@ -204,6 +204,7 @@ class Runner:
204
204
  cron: Optional[str] = None,
205
205
  rrule: Optional[str] = None,
206
206
  schedule: Optional[SCHEDULE_TYPES] = None,
207
+ is_schedule_active: Optional[bool] = None,
207
208
  parameters: Optional[dict] = None,
208
209
  triggers: Optional[List[DeploymentTrigger]] = None,
209
210
  description: Optional[str] = None,
@@ -227,6 +228,9 @@ class Runner:
227
228
  rrule: An rrule schedule of when to execute runs of this flow.
228
229
  schedule: A schedule object of when to execute runs of this flow. Used for
229
230
  advanced scheduling options like timezone.
231
+ is_schedule_active: Whether or not to set the schedule for this deployment as active. If
232
+ not provided when creating a deployment, the schedule will be set as active. If not
233
+ provided when updating a deployment, the schedule's activation will not be changed.
230
234
  triggers: A list of triggers that should kick of a run of this flow.
231
235
  parameters: A dictionary of default parameter values to pass to runs of this flow.
232
236
  description: A description for the created deployment. Defaults to the flow's
@@ -249,6 +253,7 @@ class Runner:
249
253
  cron=cron,
250
254
  rrule=rrule,
251
255
  schedule=schedule,
256
+ is_schedule_active=is_schedule_active,
252
257
  triggers=triggers,
253
258
  parameters=parameters,
254
259
  description=description,
prefect/runner/server.py CHANGED
@@ -1,15 +1,26 @@
1
+ from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple
2
+
1
3
  import pendulum
2
4
  import uvicorn
3
- from prefect._vendor.fastapi import APIRouter, FastAPI, status
5
+ from prefect._vendor.fastapi import APIRouter, FastAPI, HTTPException, status
4
6
  from prefect._vendor.fastapi.responses import JSONResponse
5
7
 
8
+ from prefect.client.orchestration import get_client
9
+ from prefect.runner.utils import inject_schemas_into_openapi
6
10
  from prefect.settings import (
11
+ PREFECT_EXPERIMENTAL_ENABLE_EXTRA_RUNNER_ENDPOINTS,
7
12
  PREFECT_RUNNER_POLL_FREQUENCY,
8
13
  PREFECT_RUNNER_SERVER_HOST,
9
14
  PREFECT_RUNNER_SERVER_LOG_LEVEL,
10
15
  PREFECT_RUNNER_SERVER_MISSED_POLLS_TOLERANCE,
11
16
  PREFECT_RUNNER_SERVER_PORT,
12
17
  )
18
+ from prefect.utilities.asyncutils import sync_compatible
19
+ from prefect.utilities.validation import validate_values_conform_to_schema
20
+
21
+ if TYPE_CHECKING:
22
+ from prefect.deployments import Deployment
23
+ from prefect.runner import Runner
13
24
 
14
25
 
15
26
  def perform_health_check(runner, delay_threshold: int = None) -> JSONResponse:
@@ -49,12 +60,61 @@ def shutdown(runner) -> int:
49
60
  return _shutdown
50
61
 
51
62
 
52
- def start_webserver(
53
- runner,
54
- log_level: str = None,
55
- ) -> None:
63
+ async def _build_endpoint_for_deployment(deployment: "Deployment"):
64
+ async def _create_flow_run_for_deployment(
65
+ body: Optional[Dict[Any, Any]] = None
66
+ ) -> JSONResponse:
67
+ body = body or {}
68
+ if deployment.enforce_parameter_schema and deployment.parameter_openapi_schema:
69
+ try:
70
+ validate_values_conform_to_schema(
71
+ body, deployment.parameter_openapi_schema
72
+ )
73
+ except ValueError as exc:
74
+ raise HTTPException(
75
+ status.HTTP_400_BAD_REQUEST,
76
+ detail=f"Error creating flow run: {exc}",
77
+ )
78
+
79
+ async with get_client() as client:
80
+ flow_run = await client.create_flow_run_from_deployment(
81
+ deployment_id=deployment.id, parameters=body
82
+ )
83
+ return JSONResponse(
84
+ status_code=status.HTTP_201_CREATED,
85
+ content={"flow_run_id": str(flow_run.id)},
86
+ )
87
+
88
+ return _create_flow_run_for_deployment
89
+
90
+
91
+ @sync_compatible
92
+ async def get_deployment_router(
93
+ runner: "Runner",
94
+ ) -> Tuple[APIRouter, Dict[str, Dict]]:
95
+ from prefect import get_client
96
+
97
+ router = APIRouter()
98
+ schemas = {}
99
+ async with get_client() as client:
100
+ for deployment_id in runner._deployment_ids:
101
+ deployment = await client.read_deployment(deployment_id)
102
+ router.add_api_route(
103
+ f"/deployment/{deployment.id}/run",
104
+ await _build_endpoint_for_deployment(deployment),
105
+ methods=["POST"],
106
+ )
107
+
108
+ # Used for updating the route schemas later on
109
+ schemas[deployment.name] = deployment.parameter_openapi_schema
110
+ schemas[deployment.id] = deployment.name
111
+ return router, schemas
112
+
113
+
114
+ @sync_compatible
115
+ async def build_server(runner: "Runner") -> FastAPI:
56
116
  """
57
- Run a FastAPI server for a runner.
117
+ Build a FastAPI server for a runner.
58
118
 
59
119
  Args:
60
120
  runner (Runner): the runner this server interacts with and monitors
@@ -68,11 +128,35 @@ def start_webserver(
68
128
  )
69
129
  router.add_api_route("/run_count", run_count(runner=runner), methods=["GET"])
70
130
  router.add_api_route("/shutdown", shutdown(runner=runner), methods=["POST"])
71
-
72
131
  webserver.include_router(router)
73
132
 
133
+ if PREFECT_EXPERIMENTAL_ENABLE_EXTRA_RUNNER_ENDPOINTS.value():
134
+ deployments_router, deployment_schemas = await get_deployment_router(runner)
135
+ webserver.include_router(deployments_router)
136
+
137
+ def customize_openapi():
138
+ if webserver.openapi_schema:
139
+ return webserver.openapi_schema
140
+
141
+ openapi_schema = inject_schemas_into_openapi(webserver, deployment_schemas)
142
+ webserver.openapi_schema = openapi_schema
143
+ return webserver.openapi_schema
144
+
145
+ webserver.openapi = customize_openapi
146
+
147
+ return webserver
148
+
149
+
150
+ def start_webserver(runner: "Runner", log_level: Optional[str] = None) -> None:
151
+ """
152
+ Run a FastAPI server for a runner.
153
+
154
+ Args:
155
+ runner (Runner): the runner this server interacts with and monitors
156
+ log_level (str): the log level to use for the server
157
+ """
74
158
  host = PREFECT_RUNNER_SERVER_HOST.value()
75
159
  port = PREFECT_RUNNER_SERVER_PORT.value()
76
160
  log_level = log_level or PREFECT_RUNNER_SERVER_LOG_LEVEL.value()
77
-
161
+ webserver = build_server(runner)
78
162
  uvicorn.run(webserver, host=host, port=port, log_level=log_level)
@@ -0,0 +1,92 @@
1
+ from copy import deepcopy
2
+ from typing import Any, Dict
3
+
4
+ from prefect._vendor.fastapi import FastAPI
5
+ from prefect._vendor.fastapi.openapi.utils import get_openapi
6
+
7
+ from prefect import __version__ as PREFECT_VERSION
8
+
9
+
10
+ def inject_schemas_into_openapi(
11
+ webserver: FastAPI, deployment_schemas: Dict[str, Any]
12
+ ) -> Dict[str, Any]:
13
+ """
14
+ Augments the webserver's OpenAPI schema with additional schemas from deployments.
15
+
16
+ Args:
17
+ webserver: The FastAPI instance representing the webserver.
18
+ deployment_schemas: A dictionary of deployment schemas to integrate.
19
+
20
+ Returns:
21
+ The augmented OpenAPI schema dictionary.
22
+ """
23
+ openapi_schema = get_openapi(
24
+ title="FastAPI Prefect Runner", version=PREFECT_VERSION, routes=webserver.routes
25
+ )
26
+
27
+ augmented_schema = merge_definitions(deployment_schemas, openapi_schema)
28
+ return update_refs_to_components(augmented_schema)
29
+
30
+
31
+ def merge_definitions(
32
+ deployment_schemas: Dict[str, Any], openapi_schema: Dict[str, Any]
33
+ ) -> Dict[str, Any]:
34
+ """
35
+ Integrates definitions from deployment schemas into the OpenAPI components.
36
+
37
+ Args:
38
+ deployment_schemas: A dictionary of deployment-specific schemas.
39
+ openapi_schema: The base OpenAPI schema to update.
40
+ """
41
+ openapi_schema_copy = deepcopy(openapi_schema)
42
+ components = openapi_schema_copy.setdefault("components", {}).setdefault(
43
+ "schemas", {}
44
+ )
45
+ for definitions in deployment_schemas.values():
46
+ if "definitions" in definitions:
47
+ for def_name, def_schema in definitions["definitions"].items():
48
+ def_schema_copy = deepcopy(def_schema)
49
+ update_refs_in_schema(def_schema_copy, "#/components/schemas/")
50
+ components[def_name] = def_schema_copy
51
+ return openapi_schema_copy
52
+
53
+
54
+ def update_refs_in_schema(schema_item: Any, new_ref: str) -> None:
55
+ """
56
+ Recursively replaces `$ref` with a new reference base in a schema item.
57
+
58
+ Args:
59
+ schema_item: A schema or part of a schema to update references in.
60
+ new_ref: The new base string to replace in `$ref` values.
61
+ """
62
+ if isinstance(schema_item, dict):
63
+ if "$ref" in schema_item:
64
+ schema_item["$ref"] = schema_item["$ref"].replace("#/definitions/", new_ref)
65
+ for value in schema_item.values():
66
+ update_refs_in_schema(value, new_ref)
67
+ elif isinstance(schema_item, list):
68
+ for item in schema_item:
69
+ update_refs_in_schema(item, new_ref)
70
+
71
+
72
+ def update_refs_to_components(openapi_schema: Dict[str, Any]) -> Dict[str, Any]:
73
+ """
74
+ Updates all `$ref` fields in the OpenAPI schema to reference the components section.
75
+
76
+ Args:
77
+ openapi_schema: The OpenAPI schema to modify `$ref` fields in.
78
+ """
79
+ for path_item in openapi_schema.get("paths", {}).values():
80
+ for operation in path_item.values():
81
+ schema = (
82
+ operation.get("requestBody", {})
83
+ .get("content", {})
84
+ .get("application/json", {})
85
+ .get("schema", {})
86
+ )
87
+ update_refs_in_schema(schema, "#/components/schemas/")
88
+
89
+ for definition in openapi_schema.get("definitions", {}).values():
90
+ update_refs_in_schema(definition, "#/components/schemas/")
91
+
92
+ return openapi_schema
prefect/settings.py CHANGED
@@ -773,12 +773,6 @@ PREFECT_LOCAL_STORAGE_PATH = Setting(
773
773
  )
774
774
  """The path to a block storage directory to store things in."""
775
775
 
776
- PREFECT_DEFAULT_RESULT_STORAGE_BLOCK = Setting(
777
- str,
778
- default=None,
779
- )
780
- """The `block-type/block-document` slug of a block to use as the default result storage."""
781
-
782
776
  PREFECT_MEMO_STORE_PATH = Setting(
783
777
  Path,
784
778
  default=Path("${PREFECT_HOME}") / "memo_store.toml",
@@ -939,12 +933,12 @@ PREFECT_TASK_INTROSPECTION_WARN_THRESHOLD = Setting(
939
933
  default=10.0,
940
934
  )
941
935
  """
942
- Threshold time in seconds for logging a warning if task parameter introspection
936
+ Threshold time in seconds for logging a warning if task parameter introspection
943
937
  exceeds this duration. Parameter introspection can be a significant performance hit
944
938
  when the parameter is a large collection object, e.g. a large dictionary or DataFrame,
945
- and each element needs to be inspected. See `prefect.utilities.annotations.quote`
939
+ and each element needs to be inspected. See `prefect.utilities.annotations.quote`
946
940
  for more details.
947
- Defaults to `10.0`.
941
+ Defaults to `10.0`.
948
942
  Set to `0` to disable logging the warning.
949
943
  """
950
944
 
@@ -1338,6 +1332,16 @@ PREFECT_EXPERIMENTAL_WARN_DEPLOYMENT_STATUS = Setting(bool, default=False)
1338
1332
  Whether or not to warn when deployment status is used.
1339
1333
  """
1340
1334
 
1335
+ PREFECT_EXPERIMENTAL_FLOW_RUN_INPUT = Setting(bool, default=False)
1336
+ """
1337
+ Whether or not to enable flow run input.
1338
+ """
1339
+
1340
+ PREFECT_EXPERIMENTAL_WARN_FLOW_RUN_INPUT = Setting(bool, default=True)
1341
+ """
1342
+ Whether or not to enable flow run input.
1343
+ """
1344
+
1341
1345
  PREFECT_RUNNER_PROCESS_LIMIT = Setting(int, default=5)
1342
1346
  """
1343
1347
  Maximum number of processes a runner will execute in parallel.
@@ -1394,6 +1398,11 @@ PREFECT_WORKER_WEBSERVER_PORT = Setting(int, default=8080)
1394
1398
  The port the worker's webserver should bind to.
1395
1399
  """
1396
1400
 
1401
+ PREFECT_EXPERIMENTAL_ENABLE_EXTRA_RUNNER_ENDPOINTS = Setting(bool, default=False)
1402
+ """
1403
+ Whether or not to enable experimental worker webserver endpoints.
1404
+ """
1405
+
1397
1406
  PREFECT_EXPERIMENTAL_ENABLE_ARTIFACTS = Setting(bool, default=True)
1398
1407
  """
1399
1408
  Whether or not to enable experimental Prefect artifacts.
@@ -1416,11 +1425,27 @@ Whether or not to warn when the experimental workspace dashboard is enabled.
1416
1425
 
1417
1426
  # Defaults -----------------------------------------------------------------------------
1418
1427
 
1428
+ PREFECT_DEFAULT_RESULT_STORAGE_BLOCK = Setting(
1429
+ str,
1430
+ default=None,
1431
+ )
1432
+ """The `block-type/block-document` slug of a block to use as the default result storage."""
1433
+
1419
1434
  PREFECT_DEFAULT_WORK_POOL_NAME = Setting(str, default=None)
1420
1435
  """
1421
1436
  The default work pool to deploy to.
1422
1437
  """
1423
1438
 
1439
+ PREFECT_DEFAULT_DOCKER_BUILD_NAMESPACE = Setting(
1440
+ str,
1441
+ default=None,
1442
+ )
1443
+ """
1444
+ The default Docker namespace to use when building images.
1445
+
1446
+ Can be either an organization/username or a registry URL with an organization/username.
1447
+ """
1448
+
1424
1449
  # Deprecated settings ------------------------------------------------------------------
1425
1450
 
1426
1451
 
@@ -506,6 +506,37 @@ def parse_image_tag(name: str) -> Tuple[str, Optional[str]]:
506
506
  return image_name, tag
507
507
 
508
508
 
509
+ def split_repository_path(repository_path: str) -> Tuple[Optional[str], str]:
510
+ """
511
+ Splits a Docker repository path into its namespace and repository components.
512
+
513
+ Args:
514
+ repository_path: The Docker repository path to split.
515
+
516
+ Returns:
517
+ Tuple[Optional[str], str]: A tuple containing the namespace and repository components.
518
+ - namespace (Optional[str]): The Docker namespace, combining the registry and organization. None if not present.
519
+ - repository (Optionals[str]): The repository name.
520
+ """
521
+ parts = repository_path.split("/", 2)
522
+
523
+ # Check if the path includes a registry and organization or just organization/repository
524
+ if len(parts) == 3 or (len(parts) == 2 and ("." in parts[0] or ":" in parts[0])):
525
+ # Namespace includes registry and organization
526
+ namespace = "/".join(parts[:-1])
527
+ repository = parts[-1]
528
+ elif len(parts) == 2:
529
+ # Only organization/repository provided, so namespace is just the first part
530
+ namespace = parts[0]
531
+ repository = parts[1]
532
+ else:
533
+ # No namespace provided
534
+ namespace = None
535
+ repository = parts[0]
536
+
537
+ return namespace, repository
538
+
539
+
509
540
  def format_outlier_version_name(version: str):
510
541
  """
511
542
  Formats outlier docker version names to pass `packaging.version.parse` validation
@@ -6,7 +6,6 @@ import sys
6
6
  from contextlib import asynccontextmanager
7
7
  from dataclasses import dataclass
8
8
  from functools import partial
9
- from io import TextIOBase
10
9
  from typing import (
11
10
  IO,
12
11
  Any,
@@ -299,7 +298,11 @@ async def consume_process_output(
299
298
 
300
299
  async def stream_text(source: TextReceiveStream, *sinks: TextSink):
301
300
  wrapped_sinks = [
302
- anyio.wrap_file(sink) if isinstance(sink, TextIOBase) else sink
301
+ (
302
+ anyio.wrap_file(sink)
303
+ if hasattr(sink, "write") and hasattr(sink, "flush")
304
+ else sink
305
+ )
303
306
  for sink in sinks
304
307
  ]
305
308
  async for item in source:
@@ -0,0 +1,63 @@
1
+ import jsonschema
2
+
3
+ from prefect.utilities.collections import remove_nested_keys
4
+
5
+
6
+ def validate_schema(schema: dict):
7
+ """
8
+ Validate that the provided schema is a valid json schema.
9
+
10
+ Args:
11
+ schema: The schema to validate.
12
+
13
+ Raises:
14
+ ValueError: If the provided schema is not a valid json schema.
15
+
16
+ """
17
+ try:
18
+ if schema is not None:
19
+ # Most closely matches the schemas generated by pydantic
20
+ jsonschema.Draft4Validator.check_schema(schema)
21
+ except jsonschema.SchemaError as exc:
22
+ raise ValueError(
23
+ "The provided schema is not a valid json schema. Schema error:"
24
+ f" {exc.message}"
25
+ ) from exc
26
+
27
+
28
+ def validate_values_conform_to_schema(
29
+ values: dict, schema: dict, ignore_required: bool = False
30
+ ):
31
+ """
32
+ Validate that the provided values conform to the provided json schema.
33
+
34
+ Args:
35
+ values: The values to validate.
36
+ schema: The schema to validate against.
37
+ ignore_required: Whether to ignore the required fields in the schema. Should be
38
+ used when a partial set of values is acceptable.
39
+
40
+ Raises:
41
+ ValueError: If the parameters do not conform to the schema.
42
+
43
+ """
44
+ if ignore_required:
45
+ schema = remove_nested_keys(["required"], schema)
46
+
47
+ try:
48
+ if schema is not None and values is not None:
49
+ jsonschema.validate(values, schema)
50
+ except jsonschema.ValidationError as exc:
51
+ if exc.json_path == "$":
52
+ error_message = "Validation failed."
53
+ else:
54
+ error_message = (
55
+ f"Validation failed for field {exc.json_path.replace('$.', '')!r}."
56
+ )
57
+ error_message += f" Failure reason: {exc.message}"
58
+ raise ValueError(error_message) from exc
59
+ except jsonschema.SchemaError as exc:
60
+ raise ValueError(
61
+ "The provided schema is not a valid json schema. Schema error:"
62
+ f" {exc.message}"
63
+ ) from exc
@@ -43,7 +43,6 @@ async def get_default_base_job_template_for_infrastructure_type(
43
43
  async with get_collections_metadata_client() as collections_client:
44
44
  try:
45
45
  worker_metadata = await collections_client.read_worker_metadata()
46
-
47
46
  for collection in worker_metadata.values():
48
47
  for worker in collection.values():
49
48
  if worker.get("type") == infra_type:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: prefect-client
3
- Version: 2.14.10
3
+ Version: 2.14.11
4
4
  Summary: Workflow orchestration and management.
5
5
  Home-page: https://www.prefect.io
6
6
  Author: Prefect Technologies, Inc.