prefect-client 2.16.2__py3-none-any.whl → 2.16.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/_internal/concurrency/services.py +5 -0
- prefect/_internal/concurrency/threads.py +3 -0
- prefect/deployments/deployments.py +29 -6
- prefect/deployments/runner.py +15 -33
- prefect/deployments/schedules.py +37 -0
- prefect/engine.py +59 -22
- prefect/events/schemas.py +253 -43
- prefect/flows.py +26 -2
- prefect/runner/runner.py +2 -2
- prefect/server/api/collections_data/views/aggregate-worker-metadata.json +9 -2
- prefect/settings.py +34 -9
- prefect/task_engine.py +16 -8
- prefect/tasks.py +39 -4
- prefect/utilities/schema_tools/__init__.py +0 -0
- prefect/utilities/schema_tools/hydration.py +218 -0
- prefect/utilities/schema_tools/validation.py +240 -0
- {prefect_client-2.16.2.dist-info → prefect_client-2.16.3.dist-info}/METADATA +52 -49
- {prefect_client-2.16.2.dist-info → prefect_client-2.16.3.dist-info}/RECORD +21 -17
- {prefect_client-2.16.2.dist-info → prefect_client-2.16.3.dist-info}/LICENSE +0 -0
- {prefect_client-2.16.2.dist-info → prefect_client-2.16.3.dist-info}/WHEEL +0 -0
- {prefect_client-2.16.2.dist-info → prefect_client-2.16.3.dist-info}/top_level.txt +0 -0
prefect/task_engine.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
from contextlib import AsyncExitStack
|
2
2
|
from typing import (
|
3
|
+
Any,
|
3
4
|
Dict,
|
4
5
|
Iterable,
|
5
6
|
Optional,
|
@@ -9,6 +10,7 @@ import anyio
|
|
9
10
|
from typing_extensions import Literal
|
10
11
|
|
11
12
|
from prefect._internal.concurrency.api import create_call, from_async, from_sync
|
13
|
+
from prefect.client.orchestration import PrefectClient
|
12
14
|
from prefect.client.schemas.objects import TaskRun
|
13
15
|
from prefect.context import EngineContext
|
14
16
|
from prefect.engine import (
|
@@ -30,11 +32,11 @@ async def submit_autonomous_task_run_to_engine(
|
|
30
32
|
task: Task,
|
31
33
|
task_run: TaskRun,
|
32
34
|
task_runner: BaseTaskRunner,
|
33
|
-
parameters: Optional[Dict] = None,
|
35
|
+
parameters: Optional[Dict[str, Any]] = None,
|
34
36
|
wait_for: Optional[Iterable[PrefectFuture]] = None,
|
35
37
|
mapped: bool = False,
|
36
38
|
return_type: EngineReturnType = "future",
|
37
|
-
client=None,
|
39
|
+
client: Optional[PrefectClient] = None,
|
38
40
|
) -> PrefectFuture:
|
39
41
|
async with AsyncExitStack() as stack:
|
40
42
|
parameters = parameters or {}
|
@@ -58,11 +60,17 @@ async def submit_autonomous_task_run_to_engine(
|
|
58
60
|
task_runner=task_runner,
|
59
61
|
)
|
60
62
|
if task.isasync:
|
61
|
-
|
63
|
+
future_result_or_state = await from_async.wait_for_call_in_loop_thread(
|
64
|
+
begin_run
|
65
|
+
)
|
62
66
|
else:
|
63
|
-
|
67
|
+
future_result_or_state = from_sync.wait_for_call_in_loop_thread(
|
68
|
+
begin_run
|
69
|
+
)
|
64
70
|
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
71
|
+
if return_type == "future":
|
72
|
+
await wait_for_task_runs_and_report_crashes(
|
73
|
+
task_run_futures=[future_result_or_state],
|
74
|
+
client=client,
|
75
|
+
)
|
76
|
+
return future_result_or_state
|
prefect/tasks.py
CHANGED
@@ -555,6 +555,7 @@ class Task(Generic[P, R]):
|
|
555
555
|
the result is wrapped in a Prefect State which provides error handling.
|
556
556
|
"""
|
557
557
|
from prefect.engine import enter_task_run_engine
|
558
|
+
from prefect.task_engine import submit_autonomous_task_run_to_engine
|
558
559
|
from prefect.task_runners import SequentialTaskRunner
|
559
560
|
|
560
561
|
# Convert the call args/kwargs to a parameter dict
|
@@ -568,6 +569,21 @@ class Task(Generic[P, R]):
|
|
568
569
|
self.isasync, self.name, parameters, self.viz_return_value
|
569
570
|
)
|
570
571
|
|
572
|
+
if (
|
573
|
+
PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING.value()
|
574
|
+
and not FlowRunContext.get()
|
575
|
+
):
|
576
|
+
from prefect import get_client
|
577
|
+
|
578
|
+
return submit_autonomous_task_run_to_engine(
|
579
|
+
task=self,
|
580
|
+
task_run=None,
|
581
|
+
task_runner=SequentialTaskRunner(),
|
582
|
+
parameters=parameters,
|
583
|
+
return_type=return_type,
|
584
|
+
client=get_client(),
|
585
|
+
)
|
586
|
+
|
571
587
|
return enter_task_run_engine(
|
572
588
|
self,
|
573
589
|
parameters=parameters,
|
@@ -792,16 +808,16 @@ class Task(Generic[P, R]):
|
|
792
808
|
PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING.value()
|
793
809
|
and not FlowRunContext.get()
|
794
810
|
):
|
795
|
-
|
811
|
+
create_autonomous_task_run_call = create_call(
|
796
812
|
create_autonomous_task_run, task=self, parameters=parameters
|
797
813
|
)
|
798
814
|
if self.isasync:
|
799
815
|
return from_async.wait_for_call_in_loop_thread(
|
800
|
-
|
816
|
+
create_autonomous_task_run_call
|
801
817
|
)
|
802
818
|
else:
|
803
819
|
return from_sync.wait_for_call_in_loop_thread(
|
804
|
-
|
820
|
+
create_autonomous_task_run_call
|
805
821
|
)
|
806
822
|
|
807
823
|
return enter_task_run_engine(
|
@@ -968,7 +984,7 @@ class Task(Generic[P, R]):
|
|
968
984
|
[[11, 21], [12, 22], [13, 23]]
|
969
985
|
"""
|
970
986
|
|
971
|
-
from prefect.engine import enter_task_run_engine
|
987
|
+
from prefect.engine import begin_task_map, enter_task_run_engine
|
972
988
|
|
973
989
|
# Convert the call args/kwargs to a parameter dict; do not apply defaults
|
974
990
|
# since they should not be mapped over
|
@@ -981,6 +997,25 @@ class Task(Generic[P, R]):
|
|
981
997
|
"`task.map()` is not currently supported by `flow.visualize()`"
|
982
998
|
)
|
983
999
|
|
1000
|
+
if (
|
1001
|
+
PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING.value()
|
1002
|
+
and not FlowRunContext.get()
|
1003
|
+
):
|
1004
|
+
map_call = create_call(
|
1005
|
+
begin_task_map,
|
1006
|
+
task=self,
|
1007
|
+
parameters=parameters,
|
1008
|
+
flow_run_context=None,
|
1009
|
+
wait_for=wait_for,
|
1010
|
+
return_type=return_type,
|
1011
|
+
task_runner=None,
|
1012
|
+
autonomous=True,
|
1013
|
+
)
|
1014
|
+
if self.isasync:
|
1015
|
+
return from_async.wait_for_call_in_loop_thread(map_call)
|
1016
|
+
else:
|
1017
|
+
return from_sync.wait_for_call_in_loop_thread(map_call)
|
1018
|
+
|
984
1019
|
return enter_task_run_engine(
|
985
1020
|
self,
|
986
1021
|
parameters=parameters,
|
File without changes
|
@@ -0,0 +1,218 @@
|
|
1
|
+
import json
|
2
|
+
from typing import Any, Callable, Dict, Optional
|
3
|
+
|
4
|
+
from prefect._internal.pydantic import HAS_PYDANTIC_V2
|
5
|
+
|
6
|
+
if HAS_PYDANTIC_V2:
|
7
|
+
from pydantic.v1 import BaseModel, Field
|
8
|
+
else:
|
9
|
+
from pydantic import BaseModel, Field
|
10
|
+
|
11
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
12
|
+
from typing_extensions import TypeAlias
|
13
|
+
|
14
|
+
from prefect.server.models.variables import read_variables
|
15
|
+
|
16
|
+
|
17
|
+
class HydrationContext(BaseModel):
|
18
|
+
workspace_variables: Dict[str, str] = Field(default_factory=dict)
|
19
|
+
raise_on_error: bool = Field(default=False)
|
20
|
+
|
21
|
+
@classmethod
|
22
|
+
async def build(
|
23
|
+
cls,
|
24
|
+
session: AsyncSession,
|
25
|
+
raise_on_error: bool = False,
|
26
|
+
) -> "HydrationContext":
|
27
|
+
variables = await read_variables(
|
28
|
+
session=session,
|
29
|
+
)
|
30
|
+
return cls(
|
31
|
+
workspace_variables={
|
32
|
+
variable.name: variable.value for variable in variables
|
33
|
+
},
|
34
|
+
raise_on_error=raise_on_error,
|
35
|
+
)
|
36
|
+
|
37
|
+
|
38
|
+
Handler: TypeAlias = Callable[[Dict, HydrationContext], Any]
|
39
|
+
PrefectKind: TypeAlias = Optional[str]
|
40
|
+
|
41
|
+
_handlers: Dict[PrefectKind, Handler] = {}
|
42
|
+
|
43
|
+
|
44
|
+
class Placeholder:
|
45
|
+
def __eq__(self, other):
|
46
|
+
return isinstance(other, type(self))
|
47
|
+
|
48
|
+
@property
|
49
|
+
def is_error(self) -> bool:
|
50
|
+
return False
|
51
|
+
|
52
|
+
|
53
|
+
class RemoveValue(Placeholder):
|
54
|
+
pass
|
55
|
+
|
56
|
+
|
57
|
+
def _remove_value(value) -> bool:
|
58
|
+
return isinstance(value, RemoveValue)
|
59
|
+
|
60
|
+
|
61
|
+
class HydrationError(Placeholder, Exception):
|
62
|
+
def __init__(self, detail: Optional[str] = None):
|
63
|
+
self.detail = detail
|
64
|
+
|
65
|
+
@property
|
66
|
+
def is_error(self) -> bool:
|
67
|
+
return True
|
68
|
+
|
69
|
+
@property
|
70
|
+
def message(self):
|
71
|
+
raise NotImplementedError("Must be implemented by subclass")
|
72
|
+
|
73
|
+
def __eq__(self, other):
|
74
|
+
return isinstance(other, type(self)) and self.message == other.message
|
75
|
+
|
76
|
+
def __str__(self):
|
77
|
+
return self.message
|
78
|
+
|
79
|
+
|
80
|
+
class KeyNotFound(HydrationError):
|
81
|
+
@property
|
82
|
+
def message(self):
|
83
|
+
return f"Missing '{self.key}' key in __prefect object"
|
84
|
+
|
85
|
+
@property
|
86
|
+
def key(self) -> str:
|
87
|
+
raise NotImplementedError("Must be implemented by subclass")
|
88
|
+
|
89
|
+
|
90
|
+
class ValueNotFound(KeyNotFound):
|
91
|
+
@property
|
92
|
+
def key(self):
|
93
|
+
return "value"
|
94
|
+
|
95
|
+
|
96
|
+
class VariableNameNotFound(KeyNotFound):
|
97
|
+
@property
|
98
|
+
def key(self):
|
99
|
+
return "variable_name"
|
100
|
+
|
101
|
+
|
102
|
+
class InvalidJSON(HydrationError):
|
103
|
+
@property
|
104
|
+
def message(self):
|
105
|
+
message = "Invalid JSON"
|
106
|
+
if self.detail:
|
107
|
+
message += f": {self.detail}"
|
108
|
+
return message
|
109
|
+
|
110
|
+
|
111
|
+
class WorkspaceVariableNotFound(HydrationError):
|
112
|
+
@property
|
113
|
+
def variable_name(self) -> str:
|
114
|
+
assert self.detail is not None
|
115
|
+
return self.detail
|
116
|
+
|
117
|
+
@property
|
118
|
+
def message(self):
|
119
|
+
return f"Variable '{self.detail}' not found."
|
120
|
+
|
121
|
+
|
122
|
+
def handler(kind: PrefectKind) -> Callable:
|
123
|
+
def decorator(func: Handler) -> Handler:
|
124
|
+
_handlers[kind] = func
|
125
|
+
return func
|
126
|
+
|
127
|
+
return decorator
|
128
|
+
|
129
|
+
|
130
|
+
def call_handler(kind: PrefectKind, obj: Dict, ctx: HydrationContext) -> Any:
|
131
|
+
if kind not in _handlers:
|
132
|
+
return (obj or {}).get("value", None)
|
133
|
+
|
134
|
+
res = _handlers[kind](obj, ctx)
|
135
|
+
if ctx.raise_on_error and isinstance(res, HydrationError):
|
136
|
+
raise res
|
137
|
+
return res
|
138
|
+
|
139
|
+
|
140
|
+
@handler("none")
|
141
|
+
def null_handler(obj: Dict, ctx: HydrationContext):
|
142
|
+
if "value" in obj:
|
143
|
+
# null handler is a pass through, so we want to continue to hydrate
|
144
|
+
return _hydrate(obj["value"], ctx)
|
145
|
+
else:
|
146
|
+
return ValueNotFound()
|
147
|
+
|
148
|
+
|
149
|
+
@handler("json")
|
150
|
+
def json_handler(obj: Dict, ctx: HydrationContext):
|
151
|
+
if "value" in obj:
|
152
|
+
try:
|
153
|
+
return json.loads(obj["value"])
|
154
|
+
except json.decoder.JSONDecodeError as e:
|
155
|
+
return InvalidJSON(detail=str(e))
|
156
|
+
else:
|
157
|
+
# If `value` is not in the object, we need special handling to help
|
158
|
+
# the UI. For now if an object looks like {"__prefect_kind": "json"}
|
159
|
+
# We will remove it from the parent object. e.x.
|
160
|
+
# {"a": {"__prefect_kind": "json"}} -> {}
|
161
|
+
# or
|
162
|
+
# [{"__prefect_kind": "json"}] -> []
|
163
|
+
return RemoveValue()
|
164
|
+
|
165
|
+
|
166
|
+
@handler("workspace_variable")
|
167
|
+
def workspace_variable_handler(obj: Dict, ctx: HydrationContext):
|
168
|
+
if "variable_name" in obj:
|
169
|
+
variable = obj["variable_name"]
|
170
|
+
if variable in ctx.workspace_variables:
|
171
|
+
return ctx.workspace_variables[variable]
|
172
|
+
else:
|
173
|
+
return WorkspaceVariableNotFound(detail=variable)
|
174
|
+
else:
|
175
|
+
# Special handling if `variable_name` is not in the object.
|
176
|
+
# If an object looks like {"__prefect_kind": "workspace_variable"}
|
177
|
+
# we will remove it from the parent object. e.x.
|
178
|
+
# {"a": {"__prefect_kind": "workspace_variable"}} -> {}
|
179
|
+
# or
|
180
|
+
# [{"__prefect_kind": "workspace_variable"}] -> []
|
181
|
+
# or
|
182
|
+
# {"__prefect_kind": "workspace_variable"} -> {}
|
183
|
+
return RemoveValue()
|
184
|
+
|
185
|
+
|
186
|
+
def hydrate(obj: Dict, ctx: Optional[HydrationContext] = None):
|
187
|
+
res = _hydrate(obj, ctx)
|
188
|
+
|
189
|
+
if _remove_value(res):
|
190
|
+
return {}
|
191
|
+
|
192
|
+
return res
|
193
|
+
|
194
|
+
|
195
|
+
def _hydrate(obj, ctx: Optional[HydrationContext] = None):
|
196
|
+
if ctx is None:
|
197
|
+
ctx = HydrationContext()
|
198
|
+
|
199
|
+
prefect_object = isinstance(obj, dict) and "__prefect_kind" in obj
|
200
|
+
|
201
|
+
if prefect_object:
|
202
|
+
prefect_kind = obj.get("__prefect_kind")
|
203
|
+
return call_handler(prefect_kind, obj, ctx)
|
204
|
+
else:
|
205
|
+
if isinstance(obj, dict):
|
206
|
+
return {
|
207
|
+
key: hydrated_value
|
208
|
+
for key, value in obj.items()
|
209
|
+
if not _remove_value(hydrated_value := _hydrate(value, ctx))
|
210
|
+
}
|
211
|
+
elif isinstance(obj, list):
|
212
|
+
return [
|
213
|
+
hydrated_element
|
214
|
+
for element in obj
|
215
|
+
if not _remove_value(hydrated_element := _hydrate(element, ctx))
|
216
|
+
]
|
217
|
+
else:
|
218
|
+
return obj
|
@@ -0,0 +1,240 @@
|
|
1
|
+
from collections import defaultdict, deque
|
2
|
+
from copy import deepcopy
|
3
|
+
from typing import Dict, List
|
4
|
+
|
5
|
+
import jsonschema
|
6
|
+
from jsonschema.exceptions import ValidationError as JSONSchemaValidationError
|
7
|
+
from jsonschema.validators import Draft202012Validator, create
|
8
|
+
|
9
|
+
from prefect.utilities.collections import remove_nested_keys
|
10
|
+
from prefect.utilities.schema_tools.hydration import HydrationError, Placeholder
|
11
|
+
|
12
|
+
|
13
|
+
class CircularSchemaRefError(Exception):
|
14
|
+
pass
|
15
|
+
|
16
|
+
|
17
|
+
class ValidationError(Exception):
|
18
|
+
pass
|
19
|
+
|
20
|
+
|
21
|
+
PLACEHOLDERS_VALIDATOR_NAME = "_placeholders"
|
22
|
+
|
23
|
+
|
24
|
+
def _build_validator():
|
25
|
+
def _applicable_validators(schema):
|
26
|
+
# the default implementation returns `schema.items()`
|
27
|
+
return {**schema, PLACEHOLDERS_VALIDATOR_NAME: None}.items()
|
28
|
+
|
29
|
+
def _placeholders(validator, _, instance, schema):
|
30
|
+
if isinstance(instance, HydrationError):
|
31
|
+
yield JSONSchemaValidationError(instance.message)
|
32
|
+
|
33
|
+
validators = dict(Draft202012Validator.VALIDATORS)
|
34
|
+
validators.update({PLACEHOLDERS_VALIDATOR_NAME: _placeholders})
|
35
|
+
|
36
|
+
# It is necessary to `create` a new validator instead of using `extend` because
|
37
|
+
# the `extend` method does not accept an `application_validators` parameter.
|
38
|
+
# We want `_placeholders` to be applicable always, without needing to modify
|
39
|
+
# the schema itself.
|
40
|
+
return create(
|
41
|
+
meta_schema=Draft202012Validator.META_SCHEMA,
|
42
|
+
validators=validators,
|
43
|
+
version="prefect",
|
44
|
+
type_checker=Draft202012Validator.TYPE_CHECKER,
|
45
|
+
format_checker=Draft202012Validator.FORMAT_CHECKER,
|
46
|
+
id_of=Draft202012Validator.ID_OF,
|
47
|
+
applicable_validators=_applicable_validators,
|
48
|
+
)
|
49
|
+
|
50
|
+
|
51
|
+
_VALIDATOR = _build_validator()
|
52
|
+
|
53
|
+
|
54
|
+
def is_valid_schema(schema: Dict, preprocess: bool = True):
|
55
|
+
if preprocess:
|
56
|
+
schema = preprocess_schema(schema)
|
57
|
+
try:
|
58
|
+
if schema is not None:
|
59
|
+
_VALIDATOR.check_schema(schema, format_checker=_VALIDATOR.FORMAT_CHECKER)
|
60
|
+
except jsonschema.SchemaError as exc:
|
61
|
+
raise ValueError(f"Invalid schema: {exc.message}") from exc
|
62
|
+
|
63
|
+
|
64
|
+
def validate(
|
65
|
+
obj: Dict,
|
66
|
+
schema: Dict,
|
67
|
+
raise_on_error: bool = False,
|
68
|
+
preprocess: bool = True,
|
69
|
+
ignore_required: bool = False,
|
70
|
+
) -> List[JSONSchemaValidationError]:
|
71
|
+
if preprocess:
|
72
|
+
schema = preprocess_schema(schema)
|
73
|
+
|
74
|
+
if ignore_required:
|
75
|
+
schema = remove_nested_keys(["required"], schema)
|
76
|
+
|
77
|
+
if raise_on_error:
|
78
|
+
try:
|
79
|
+
jsonschema.validate(obj, schema, _VALIDATOR)
|
80
|
+
except RecursionError:
|
81
|
+
raise CircularSchemaRefError
|
82
|
+
except JSONSchemaValidationError as exc:
|
83
|
+
if exc.json_path == "$":
|
84
|
+
error_message = "Validation failed."
|
85
|
+
else:
|
86
|
+
error_message = (
|
87
|
+
f"Validation failed for field {exc.json_path.replace('$.', '')!r}."
|
88
|
+
)
|
89
|
+
error_message += f" Failure reason: {exc.message}"
|
90
|
+
raise ValidationError(error_message) from exc
|
91
|
+
return []
|
92
|
+
else:
|
93
|
+
try:
|
94
|
+
validator = _VALIDATOR(schema, format_checker=_VALIDATOR.FORMAT_CHECKER)
|
95
|
+
errors = list(validator.iter_errors(obj))
|
96
|
+
except RecursionError:
|
97
|
+
raise CircularSchemaRefError
|
98
|
+
return errors
|
99
|
+
|
100
|
+
|
101
|
+
def is_valid(
|
102
|
+
obj: Dict,
|
103
|
+
schema: Dict,
|
104
|
+
) -> bool:
|
105
|
+
errors = validate(obj, schema)
|
106
|
+
return len(errors) == 0
|
107
|
+
|
108
|
+
|
109
|
+
def prioritize_placeholder_errors(errors):
|
110
|
+
errors_by_path = defaultdict(list)
|
111
|
+
for error in errors:
|
112
|
+
path_str = "->".join(str(p) for p in error.relative_path)
|
113
|
+
errors_by_path[path_str].append(error)
|
114
|
+
|
115
|
+
filtered_errors = []
|
116
|
+
for path, grouped_errors in errors_by_path.items():
|
117
|
+
placeholders_errors = [
|
118
|
+
error
|
119
|
+
for error in grouped_errors
|
120
|
+
if error.validator == PLACEHOLDERS_VALIDATOR_NAME
|
121
|
+
]
|
122
|
+
|
123
|
+
if placeholders_errors:
|
124
|
+
filtered_errors.extend(placeholders_errors)
|
125
|
+
else:
|
126
|
+
filtered_errors.extend(grouped_errors)
|
127
|
+
|
128
|
+
return filtered_errors
|
129
|
+
|
130
|
+
|
131
|
+
def build_error_obj(errors: List[JSONSchemaValidationError]) -> Dict:
|
132
|
+
error_response: dict = {"errors": []}
|
133
|
+
|
134
|
+
# If multiple errors are present for the same path and one of them
|
135
|
+
# is a placeholder error, we want only want to use the placeholder error.
|
136
|
+
errors = prioritize_placeholder_errors(errors)
|
137
|
+
|
138
|
+
for error in errors:
|
139
|
+
# If the Placeholder is not representing an error, we can skip it
|
140
|
+
if isinstance(error.instance, Placeholder) and not error.instance.is_error:
|
141
|
+
continue
|
142
|
+
|
143
|
+
path = deque(error.relative_path)
|
144
|
+
|
145
|
+
# Required errors should be moved one level down to the property
|
146
|
+
# they're associated with, so we add an extra level to the path.
|
147
|
+
if error.validator == "required":
|
148
|
+
required_field = error.message.split(" ")[0].strip("'")
|
149
|
+
path.append(required_field)
|
150
|
+
|
151
|
+
current = error_response["errors"]
|
152
|
+
|
153
|
+
# error at the root, just append the error message
|
154
|
+
if not path:
|
155
|
+
current.append(error.message)
|
156
|
+
|
157
|
+
while path:
|
158
|
+
part = path.popleft()
|
159
|
+
if isinstance(part, int):
|
160
|
+
if not path:
|
161
|
+
current.append({"index": part, "errors": [error.message]})
|
162
|
+
else:
|
163
|
+
for entry in current:
|
164
|
+
if entry.get("index") == part:
|
165
|
+
current = entry["errors"]
|
166
|
+
break
|
167
|
+
else:
|
168
|
+
new_entry = {"index": part, "errors": []}
|
169
|
+
current.append(new_entry)
|
170
|
+
current = new_entry["errors"]
|
171
|
+
else:
|
172
|
+
if not path:
|
173
|
+
current.append({"property": part, "errors": [error.message]})
|
174
|
+
else:
|
175
|
+
for entry in current:
|
176
|
+
if entry.get("property") == part:
|
177
|
+
current = entry.get("errors", [])
|
178
|
+
break
|
179
|
+
else:
|
180
|
+
new_entry = {"property": part, "errors": []}
|
181
|
+
current.append(new_entry)
|
182
|
+
current = new_entry["errors"]
|
183
|
+
|
184
|
+
valid = len(error_response["errors"]) == 0
|
185
|
+
error_response["valid"] = valid
|
186
|
+
|
187
|
+
return error_response
|
188
|
+
|
189
|
+
|
190
|
+
def _fix_null_typing(key: str, schema: Dict, required_fields: List[str]):
|
191
|
+
"""
|
192
|
+
Pydantic V1 does not generate a valid Draft2020-12 schema for null types.
|
193
|
+
"""
|
194
|
+
if (
|
195
|
+
key not in required_fields
|
196
|
+
and "type" in schema
|
197
|
+
and schema.get("type") != "null"
|
198
|
+
and "default" not in schema
|
199
|
+
):
|
200
|
+
schema["anyOf"] = [{"type": schema["type"]}, {"type": "null"}]
|
201
|
+
del schema["type"]
|
202
|
+
|
203
|
+
|
204
|
+
def _fix_tuple_items(schema: Dict):
|
205
|
+
"""
|
206
|
+
Pydantic V1 does not generate a valid Draft2020-12 schema for tuples.
|
207
|
+
"""
|
208
|
+
if (
|
209
|
+
schema.get("items")
|
210
|
+
and isinstance(schema["items"], list)
|
211
|
+
and not schema.get("prefixItems")
|
212
|
+
):
|
213
|
+
schema["prefixItems"] = deepcopy(schema["items"])
|
214
|
+
del schema["items"]
|
215
|
+
|
216
|
+
|
217
|
+
def process_properties(properties, required_fields):
|
218
|
+
for key, schema in properties.items():
|
219
|
+
_fix_null_typing(key, schema, required_fields)
|
220
|
+
_fix_tuple_items(schema)
|
221
|
+
|
222
|
+
if "properties" in schema:
|
223
|
+
required_fields = schema.get("required", [])
|
224
|
+
process_properties(schema["properties"], required_fields)
|
225
|
+
|
226
|
+
|
227
|
+
def preprocess_schema(schema):
|
228
|
+
schema = deepcopy(schema)
|
229
|
+
|
230
|
+
if "properties" in schema:
|
231
|
+
required_fields = schema.get("required", [])
|
232
|
+
process_properties(schema["properties"], required_fields)
|
233
|
+
|
234
|
+
if "definitions" in schema: # Also process definitions for reused models
|
235
|
+
for definition in schema["definitions"].values():
|
236
|
+
if "properties" in definition:
|
237
|
+
required_fields = definition.get("required", [])
|
238
|
+
process_properties(definition["properties"], required_fields)
|
239
|
+
|
240
|
+
return schema
|