datamodel-code-generator 0.27.2__py3-none-any.whl → 0.28.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datamodel-code-generator might be problematic. Click here for more details.
- datamodel_code_generator/__init__.py +168 -196
- datamodel_code_generator/__main__.py +146 -189
- datamodel_code_generator/arguments.py +227 -230
- datamodel_code_generator/format.py +77 -129
- datamodel_code_generator/http.py +12 -10
- datamodel_code_generator/imports.py +59 -65
- datamodel_code_generator/model/__init__.py +28 -31
- datamodel_code_generator/model/base.py +100 -144
- datamodel_code_generator/model/dataclass.py +62 -70
- datamodel_code_generator/model/enum.py +34 -30
- datamodel_code_generator/model/imports.py +13 -11
- datamodel_code_generator/model/msgspec.py +116 -138
- datamodel_code_generator/model/pydantic/__init__.py +18 -28
- datamodel_code_generator/model/pydantic/base_model.py +121 -140
- datamodel_code_generator/model/pydantic/custom_root_type.py +2 -2
- datamodel_code_generator/model/pydantic/dataclass.py +6 -4
- datamodel_code_generator/model/pydantic/imports.py +35 -33
- datamodel_code_generator/model/pydantic/types.py +91 -119
- datamodel_code_generator/model/pydantic_v2/__init__.py +21 -18
- datamodel_code_generator/model/pydantic_v2/base_model.py +118 -127
- datamodel_code_generator/model/pydantic_v2/imports.py +5 -3
- datamodel_code_generator/model/pydantic_v2/root_model.py +6 -6
- datamodel_code_generator/model/pydantic_v2/types.py +11 -7
- datamodel_code_generator/model/rootmodel.py +1 -1
- datamodel_code_generator/model/scalar.py +33 -32
- datamodel_code_generator/model/typed_dict.py +41 -51
- datamodel_code_generator/model/types.py +24 -19
- datamodel_code_generator/model/union.py +21 -17
- datamodel_code_generator/parser/__init__.py +16 -12
- datamodel_code_generator/parser/base.py +327 -515
- datamodel_code_generator/parser/graphql.py +87 -119
- datamodel_code_generator/parser/jsonschema.py +438 -607
- datamodel_code_generator/parser/openapi.py +180 -220
- datamodel_code_generator/pydantic_patch.py +8 -9
- datamodel_code_generator/reference.py +199 -297
- datamodel_code_generator/types.py +149 -215
- datamodel_code_generator/util.py +23 -36
- {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/METADATA +10 -5
- datamodel_code_generator-0.28.0.dist-info/RECORD +59 -0
- datamodel_code_generator-0.27.2.dist-info/RECORD +0 -59
- {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/WHEEL +0 -0
- {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/entry_points.txt +0 -0
- {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,32 +1,15 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
import re
|
|
2
4
|
from collections import defaultdict
|
|
3
5
|
from contextlib import contextmanager
|
|
4
6
|
from enum import Enum, auto
|
|
5
|
-
from functools import lru_cache
|
|
7
|
+
from functools import cached_property, lru_cache
|
|
6
8
|
from itertools import zip_longest
|
|
7
9
|
from keyword import iskeyword
|
|
8
10
|
from pathlib import Path, PurePath
|
|
9
|
-
from
|
|
10
|
-
|
|
11
|
-
AbstractSet,
|
|
12
|
-
Any,
|
|
13
|
-
Callable,
|
|
14
|
-
ClassVar,
|
|
15
|
-
DefaultDict,
|
|
16
|
-
Dict,
|
|
17
|
-
Generator,
|
|
18
|
-
List,
|
|
19
|
-
Mapping,
|
|
20
|
-
NamedTuple,
|
|
21
|
-
Optional,
|
|
22
|
-
Pattern,
|
|
23
|
-
Sequence,
|
|
24
|
-
Set,
|
|
25
|
-
Tuple,
|
|
26
|
-
Type,
|
|
27
|
-
TypeVar,
|
|
28
|
-
Union,
|
|
29
|
-
)
|
|
11
|
+
from re import Pattern
|
|
12
|
+
from typing import TYPE_CHECKING, Any, Callable, ClassVar, NamedTuple, Optional, TypeVar
|
|
30
13
|
from urllib.parse import ParseResult, urlparse
|
|
31
14
|
|
|
32
15
|
import inflect
|
|
@@ -34,20 +17,18 @@ import pydantic
|
|
|
34
17
|
from packaging import version
|
|
35
18
|
from pydantic import BaseModel
|
|
36
19
|
|
|
37
|
-
from datamodel_code_generator.util import
|
|
38
|
-
PYDANTIC_V2,
|
|
39
|
-
ConfigDict,
|
|
40
|
-
cached_property,
|
|
41
|
-
model_validator,
|
|
42
|
-
)
|
|
20
|
+
from datamodel_code_generator.util import PYDANTIC_V2, ConfigDict, model_validator
|
|
43
21
|
|
|
44
22
|
if TYPE_CHECKING:
|
|
23
|
+
from collections.abc import Generator, Mapping, Sequence
|
|
24
|
+
from collections.abc import Set as AbstractSet
|
|
25
|
+
|
|
45
26
|
from pydantic.typing import DictStrAny
|
|
46
27
|
|
|
47
28
|
|
|
48
29
|
class _BaseModel(BaseModel):
|
|
49
|
-
_exclude_fields: ClassVar[
|
|
50
|
-
_pass_fields: ClassVar[
|
|
30
|
+
_exclude_fields: ClassVar[set[str]] = set()
|
|
31
|
+
_pass_fields: ClassVar[set[str]] = set()
|
|
51
32
|
|
|
52
33
|
if not TYPE_CHECKING:
|
|
53
34
|
|
|
@@ -60,20 +41,16 @@ class _BaseModel(BaseModel):
|
|
|
60
41
|
if not TYPE_CHECKING:
|
|
61
42
|
if PYDANTIC_V2:
|
|
62
43
|
|
|
63
|
-
def dict(
|
|
44
|
+
def dict( # noqa: PLR0913
|
|
64
45
|
self,
|
|
65
46
|
*,
|
|
66
|
-
include:
|
|
67
|
-
|
|
68
|
-
] = None,
|
|
69
|
-
exclude: Union[
|
|
70
|
-
AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], None
|
|
71
|
-
] = None,
|
|
47
|
+
include: AbstractSet[int | str] | Mapping[int | str, Any] | None = None,
|
|
48
|
+
exclude: AbstractSet[int | str] | Mapping[int | str, Any] | None = None,
|
|
72
49
|
by_alias: bool = False,
|
|
73
50
|
exclude_unset: bool = False,
|
|
74
51
|
exclude_defaults: bool = False,
|
|
75
52
|
exclude_none: bool = False,
|
|
76
|
-
) ->
|
|
53
|
+
) -> DictStrAny:
|
|
77
54
|
return self.model_dump(
|
|
78
55
|
include=include,
|
|
79
56
|
exclude=set(exclude or ()) | self._exclude_fields,
|
|
@@ -85,21 +62,17 @@ class _BaseModel(BaseModel):
|
|
|
85
62
|
|
|
86
63
|
else:
|
|
87
64
|
|
|
88
|
-
def dict(
|
|
65
|
+
def dict( # noqa: PLR0913
|
|
89
66
|
self,
|
|
90
67
|
*,
|
|
91
|
-
include:
|
|
92
|
-
|
|
93
|
-
] = None,
|
|
94
|
-
exclude: Union[
|
|
95
|
-
AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], None
|
|
96
|
-
] = None,
|
|
68
|
+
include: AbstractSet[int | str] | Mapping[int | str, Any] | None = None,
|
|
69
|
+
exclude: AbstractSet[int | str] | Mapping[int | str, Any] | None = None,
|
|
97
70
|
by_alias: bool = False,
|
|
98
|
-
skip_defaults:
|
|
71
|
+
skip_defaults: bool | None = None,
|
|
99
72
|
exclude_unset: bool = False,
|
|
100
73
|
exclude_defaults: bool = False,
|
|
101
74
|
exclude_none: bool = False,
|
|
102
|
-
) ->
|
|
75
|
+
) -> DictStrAny:
|
|
103
76
|
return super().dict(
|
|
104
77
|
include=include,
|
|
105
78
|
exclude=set(exclude or ()) | self._exclude_fields,
|
|
@@ -113,63 +86,57 @@ class _BaseModel(BaseModel):
|
|
|
113
86
|
|
|
114
87
|
class Reference(_BaseModel):
|
|
115
88
|
path: str
|
|
116
|
-
original_name: str =
|
|
89
|
+
original_name: str = ""
|
|
117
90
|
name: str
|
|
118
|
-
duplicate_name: Optional[str] = None
|
|
91
|
+
duplicate_name: Optional[str] = None # noqa: UP045
|
|
119
92
|
loaded: bool = True
|
|
120
|
-
source: Optional[Any] = None
|
|
121
|
-
children:
|
|
122
|
-
_exclude_fields: ClassVar[
|
|
93
|
+
source: Optional[Any] = None # noqa: UP045
|
|
94
|
+
children: list[Any] = []
|
|
95
|
+
_exclude_fields: ClassVar[set[str]] = {"children"}
|
|
123
96
|
|
|
124
|
-
@model_validator(mode=
|
|
125
|
-
def validate_original_name(cls, values: Any) -> Any:
|
|
97
|
+
@model_validator(mode="before")
|
|
98
|
+
def validate_original_name(cls, values: Any) -> Any: # noqa: N805
|
|
126
99
|
"""
|
|
127
100
|
If original_name is empty then, `original_name` is assigned `name`
|
|
128
101
|
"""
|
|
129
102
|
if not isinstance(values, dict): # pragma: no cover
|
|
130
103
|
return values
|
|
131
|
-
original_name = values.get(
|
|
104
|
+
original_name = values.get("original_name")
|
|
132
105
|
if original_name:
|
|
133
106
|
return values
|
|
134
107
|
|
|
135
|
-
values[
|
|
108
|
+
values["original_name"] = values.get("name", original_name)
|
|
136
109
|
return values
|
|
137
110
|
|
|
138
111
|
if PYDANTIC_V2:
|
|
139
112
|
# TODO[pydantic]: The following keys were removed: `copy_on_model_validation`.
|
|
140
113
|
# Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
|
|
141
|
-
model_config = ConfigDict( # pyright: ignore
|
|
114
|
+
model_config = ConfigDict( # pyright: ignore[reportAssignmentType]
|
|
142
115
|
arbitrary_types_allowed=True,
|
|
143
116
|
ignored_types=(cached_property,),
|
|
144
|
-
revalidate_instances=
|
|
117
|
+
revalidate_instances="never",
|
|
145
118
|
)
|
|
146
119
|
else:
|
|
147
120
|
|
|
148
121
|
class Config:
|
|
149
122
|
arbitrary_types_allowed = True
|
|
150
123
|
keep_untouched = (cached_property,)
|
|
151
|
-
copy_on_model_validation = (
|
|
152
|
-
False
|
|
153
|
-
if version.parse(pydantic.VERSION) < version.parse('1.9.2')
|
|
154
|
-
else 'none'
|
|
155
|
-
)
|
|
124
|
+
copy_on_model_validation = False if version.parse(pydantic.VERSION) < version.parse("1.9.2") else "none"
|
|
156
125
|
|
|
157
126
|
@property
|
|
158
127
|
def short_name(self) -> str:
|
|
159
|
-
return self.name.rsplit(
|
|
128
|
+
return self.name.rsplit(".", 1)[-1]
|
|
160
129
|
|
|
161
130
|
|
|
162
|
-
SINGULAR_NAME_SUFFIX: str =
|
|
131
|
+
SINGULAR_NAME_SUFFIX: str = "Item"
|
|
163
132
|
|
|
164
|
-
ID_PATTERN: Pattern[str] = re.compile(r
|
|
133
|
+
ID_PATTERN: Pattern[str] = re.compile(r"^#[^/].*")
|
|
165
134
|
|
|
166
|
-
T = TypeVar(
|
|
135
|
+
T = TypeVar("T")
|
|
167
136
|
|
|
168
137
|
|
|
169
138
|
@contextmanager
|
|
170
|
-
def context_variable(
|
|
171
|
-
setter: Callable[[T], None], current_value: T, new_value: T
|
|
172
|
-
) -> Generator[None, None, None]:
|
|
139
|
+
def context_variable(setter: Callable[[T], None], current_value: T, new_value: T) -> Generator[None, None, None]:
|
|
173
140
|
previous_value: T = current_value
|
|
174
141
|
setter(new_value)
|
|
175
142
|
try:
|
|
@@ -178,83 +145,75 @@ def context_variable(
|
|
|
178
145
|
setter(previous_value)
|
|
179
146
|
|
|
180
147
|
|
|
181
|
-
_UNDER_SCORE_1: Pattern[str] = re.compile(r
|
|
182
|
-
_UNDER_SCORE_2: Pattern[str] = re.compile(
|
|
148
|
+
_UNDER_SCORE_1: Pattern[str] = re.compile(r"([^_])([A-Z][a-z]+)")
|
|
149
|
+
_UNDER_SCORE_2: Pattern[str] = re.compile(r"([a-z0-9])([A-Z])")
|
|
183
150
|
|
|
184
151
|
|
|
185
152
|
@lru_cache
|
|
186
153
|
def camel_to_snake(string: str) -> str:
|
|
187
|
-
subbed = _UNDER_SCORE_1.sub(r
|
|
188
|
-
return _UNDER_SCORE_2.sub(r
|
|
154
|
+
subbed = _UNDER_SCORE_1.sub(r"\1_\2", string)
|
|
155
|
+
return _UNDER_SCORE_2.sub(r"\1_\2", subbed).lower()
|
|
189
156
|
|
|
190
157
|
|
|
191
158
|
class FieldNameResolver:
|
|
192
|
-
def __init__(
|
|
159
|
+
def __init__( # noqa: PLR0913, PLR0917
|
|
193
160
|
self,
|
|
194
|
-
aliases:
|
|
195
|
-
snake_case_field: bool = False,
|
|
196
|
-
empty_field_name:
|
|
197
|
-
original_delimiter:
|
|
198
|
-
special_field_name_prefix:
|
|
199
|
-
remove_special_field_name_prefix: bool = False,
|
|
200
|
-
capitalise_enum_members: bool = False,
|
|
201
|
-
no_alias: bool = False,
|
|
202
|
-
):
|
|
161
|
+
aliases: Mapping[str, str] | None = None,
|
|
162
|
+
snake_case_field: bool = False, # noqa: FBT001, FBT002
|
|
163
|
+
empty_field_name: str | None = None,
|
|
164
|
+
original_delimiter: str | None = None,
|
|
165
|
+
special_field_name_prefix: str | None = None,
|
|
166
|
+
remove_special_field_name_prefix: bool = False, # noqa: FBT001, FBT002
|
|
167
|
+
capitalise_enum_members: bool = False, # noqa: FBT001, FBT002
|
|
168
|
+
no_alias: bool = False, # noqa: FBT001, FBT002
|
|
169
|
+
) -> None:
|
|
203
170
|
self.aliases: Mapping[str, str] = {} if aliases is None else {**aliases}
|
|
204
|
-
self.empty_field_name: str = empty_field_name or
|
|
171
|
+
self.empty_field_name: str = empty_field_name or "_"
|
|
205
172
|
self.snake_case_field = snake_case_field
|
|
206
|
-
self.original_delimiter:
|
|
207
|
-
self.special_field_name_prefix:
|
|
208
|
-
|
|
173
|
+
self.original_delimiter: str | None = original_delimiter
|
|
174
|
+
self.special_field_name_prefix: str | None = (
|
|
175
|
+
"field" if special_field_name_prefix is None else special_field_name_prefix
|
|
209
176
|
)
|
|
210
177
|
self.remove_special_field_name_prefix: bool = remove_special_field_name_prefix
|
|
211
178
|
self.capitalise_enum_members: bool = capitalise_enum_members
|
|
212
179
|
self.no_alias = no_alias
|
|
213
180
|
|
|
214
181
|
@classmethod
|
|
215
|
-
def _validate_field_name(cls, field_name: str) -> bool:
|
|
182
|
+
def _validate_field_name(cls, field_name: str) -> bool: # noqa: ARG003
|
|
216
183
|
return True
|
|
217
184
|
|
|
218
|
-
def get_valid_name(
|
|
185
|
+
def get_valid_name( # noqa: PLR0912
|
|
219
186
|
self,
|
|
220
187
|
name: str,
|
|
221
|
-
excludes:
|
|
222
|
-
ignore_snake_case_field: bool = False,
|
|
223
|
-
upper_camel: bool = False,
|
|
188
|
+
excludes: set[str] | None = None,
|
|
189
|
+
ignore_snake_case_field: bool = False, # noqa: FBT001, FBT002
|
|
190
|
+
upper_camel: bool = False, # noqa: FBT001, FBT002
|
|
224
191
|
) -> str:
|
|
225
192
|
if not name:
|
|
226
193
|
name = self.empty_field_name
|
|
227
|
-
if name[0] ==
|
|
194
|
+
if name[0] == "#":
|
|
228
195
|
name = name[1:] or self.empty_field_name
|
|
229
196
|
|
|
230
|
-
if
|
|
231
|
-
self.snake_case_field
|
|
232
|
-
and not ignore_snake_case_field
|
|
233
|
-
and self.original_delimiter is not None
|
|
234
|
-
):
|
|
197
|
+
if self.snake_case_field and not ignore_snake_case_field and self.original_delimiter is not None:
|
|
235
198
|
name = snake_to_upper_camel(name, delimiter=self.original_delimiter)
|
|
236
199
|
|
|
237
|
-
name = re.sub(r
|
|
200
|
+
name = re.sub(r"[¹²³⁴⁵⁶⁷⁸⁹]|\W", "_", name)
|
|
238
201
|
if name[0].isnumeric():
|
|
239
|
-
name = f
|
|
202
|
+
name = f"{self.special_field_name_prefix}_{name}"
|
|
240
203
|
|
|
241
204
|
# We should avoid having a field begin with an underscore, as it
|
|
242
205
|
# causes pydantic to consider it as private
|
|
243
|
-
while name.startswith(
|
|
206
|
+
while name.startswith("_"):
|
|
244
207
|
if self.remove_special_field_name_prefix:
|
|
245
208
|
name = name[1:]
|
|
246
209
|
else:
|
|
247
|
-
name = f
|
|
210
|
+
name = f"{self.special_field_name_prefix}{name}"
|
|
248
211
|
break
|
|
249
|
-
if (
|
|
250
|
-
self.capitalise_enum_members
|
|
251
|
-
or self.snake_case_field
|
|
252
|
-
and not ignore_snake_case_field
|
|
253
|
-
):
|
|
212
|
+
if self.capitalise_enum_members or (self.snake_case_field and not ignore_snake_case_field):
|
|
254
213
|
name = camel_to_snake(name)
|
|
255
214
|
count = 1
|
|
256
215
|
if iskeyword(name) or not self._validate_field_name(name):
|
|
257
|
-
name +=
|
|
216
|
+
name += "_"
|
|
258
217
|
if upper_camel:
|
|
259
218
|
new_name = snake_to_upper_camel(name)
|
|
260
219
|
elif self.capitalise_enum_members:
|
|
@@ -266,13 +225,13 @@ class FieldNameResolver:
|
|
|
266
225
|
or iskeyword(new_name)
|
|
267
226
|
or (excludes and new_name in excludes)
|
|
268
227
|
):
|
|
269
|
-
new_name = f
|
|
228
|
+
new_name = f"{name}{count}" if upper_camel else f"{name}_{count}"
|
|
270
229
|
count += 1
|
|
271
230
|
return new_name
|
|
272
231
|
|
|
273
232
|
def get_valid_field_name_and_alias(
|
|
274
|
-
self, field_name: str, excludes:
|
|
275
|
-
) ->
|
|
233
|
+
self, field_name: str, excludes: set[str] | None = None
|
|
234
|
+
) -> tuple[str, str | None]:
|
|
276
235
|
if field_name in self.aliases:
|
|
277
236
|
return self.aliases[field_name], field_name
|
|
278
237
|
valid_name = self.get_valid_name(field_name, excludes=excludes)
|
|
@@ -293,13 +252,13 @@ class EnumFieldNameResolver(FieldNameResolver):
|
|
|
293
252
|
def get_valid_name(
|
|
294
253
|
self,
|
|
295
254
|
name: str,
|
|
296
|
-
excludes:
|
|
297
|
-
ignore_snake_case_field: bool = False,
|
|
298
|
-
upper_camel: bool = False,
|
|
255
|
+
excludes: set[str] | None = None,
|
|
256
|
+
ignore_snake_case_field: bool = False, # noqa: FBT001, FBT002
|
|
257
|
+
upper_camel: bool = False, # noqa: FBT001, FBT002
|
|
299
258
|
) -> str:
|
|
300
259
|
return super().get_valid_name(
|
|
301
|
-
name=
|
|
302
|
-
excludes={
|
|
260
|
+
name="mro_" if name == "mro" else name,
|
|
261
|
+
excludes={"mro"} | (excludes or set()),
|
|
303
262
|
ignore_snake_case_field=ignore_snake_case_field,
|
|
304
263
|
upper_camel=upper_camel,
|
|
305
264
|
)
|
|
@@ -311,7 +270,7 @@ class ModelType(Enum):
|
|
|
311
270
|
CLASS = auto()
|
|
312
271
|
|
|
313
272
|
|
|
314
|
-
DEFAULT_FIELD_NAME_RESOLVERS:
|
|
273
|
+
DEFAULT_FIELD_NAME_RESOLVERS: dict[ModelType, type[FieldNameResolver]] = {
|
|
315
274
|
ModelType.ENUM: EnumFieldNameResolver,
|
|
316
275
|
ModelType.PYDANTIC: PydanticFieldNameResolver,
|
|
317
276
|
ModelType.CLASS: FieldNameResolver,
|
|
@@ -320,16 +279,16 @@ DEFAULT_FIELD_NAME_RESOLVERS: Dict[ModelType, Type[FieldNameResolver]] = {
|
|
|
320
279
|
|
|
321
280
|
class ClassName(NamedTuple):
|
|
322
281
|
name: str
|
|
323
|
-
duplicate_name:
|
|
282
|
+
duplicate_name: str | None
|
|
324
283
|
|
|
325
284
|
|
|
326
285
|
def get_relative_path(base_path: PurePath, target_path: PurePath) -> PurePath:
|
|
327
286
|
if base_path == target_path:
|
|
328
|
-
return Path(
|
|
287
|
+
return Path()
|
|
329
288
|
if not target_path.is_absolute():
|
|
330
289
|
return target_path
|
|
331
290
|
parent_count: int = 0
|
|
332
|
-
children:
|
|
291
|
+
children: list[str] = []
|
|
333
292
|
for base_part, target_part in zip_longest(base_path.parts, target_path.parts):
|
|
334
293
|
if base_part == target_part and not parent_count:
|
|
335
294
|
continue
|
|
@@ -337,48 +296,44 @@ def get_relative_path(base_path: PurePath, target_path: PurePath) -> PurePath:
|
|
|
337
296
|
parent_count += 1
|
|
338
297
|
if target_part:
|
|
339
298
|
children.append(target_part)
|
|
340
|
-
return Path(*[
|
|
299
|
+
return Path(*[".." for _ in range(parent_count)], *children)
|
|
341
300
|
|
|
342
301
|
|
|
343
|
-
class ModelResolver:
|
|
344
|
-
def __init__(
|
|
302
|
+
class ModelResolver: # noqa: PLR0904
|
|
303
|
+
def __init__( # noqa: PLR0913, PLR0917
|
|
345
304
|
self,
|
|
346
|
-
exclude_names:
|
|
347
|
-
duplicate_name_suffix:
|
|
348
|
-
base_url:
|
|
349
|
-
singular_name_suffix:
|
|
350
|
-
aliases:
|
|
351
|
-
snake_case_field: bool = False,
|
|
352
|
-
empty_field_name:
|
|
353
|
-
custom_class_name_generator:
|
|
354
|
-
base_path:
|
|
355
|
-
field_name_resolver_classes:
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
capitalise_enum_members: bool = False,
|
|
362
|
-
no_alias: bool = False,
|
|
305
|
+
exclude_names: set[str] | None = None,
|
|
306
|
+
duplicate_name_suffix: str | None = None,
|
|
307
|
+
base_url: str | None = None,
|
|
308
|
+
singular_name_suffix: str | None = None,
|
|
309
|
+
aliases: Mapping[str, str] | None = None,
|
|
310
|
+
snake_case_field: bool = False, # noqa: FBT001, FBT002
|
|
311
|
+
empty_field_name: str | None = None,
|
|
312
|
+
custom_class_name_generator: Callable[[str], str] | None = None,
|
|
313
|
+
base_path: Path | None = None,
|
|
314
|
+
field_name_resolver_classes: dict[ModelType, type[FieldNameResolver]] | None = None,
|
|
315
|
+
original_field_name_delimiter: str | None = None,
|
|
316
|
+
special_field_name_prefix: str | None = None,
|
|
317
|
+
remove_special_field_name_prefix: bool = False, # noqa: FBT001, FBT002
|
|
318
|
+
capitalise_enum_members: bool = False, # noqa: FBT001, FBT002
|
|
319
|
+
no_alias: bool = False, # noqa: FBT001, FBT002
|
|
363
320
|
) -> None:
|
|
364
|
-
self.references:
|
|
321
|
+
self.references: dict[str, Reference] = {}
|
|
365
322
|
self._current_root: Sequence[str] = []
|
|
366
|
-
self._root_id:
|
|
367
|
-
self._root_id_base_path:
|
|
368
|
-
self.ids:
|
|
369
|
-
self.after_load_files:
|
|
370
|
-
self.exclude_names:
|
|
371
|
-
self.duplicate_name_suffix:
|
|
372
|
-
self._base_url:
|
|
323
|
+
self._root_id: str | None = None
|
|
324
|
+
self._root_id_base_path: str | None = None
|
|
325
|
+
self.ids: defaultdict[str, dict[str, str]] = defaultdict(dict)
|
|
326
|
+
self.after_load_files: set[str] = set()
|
|
327
|
+
self.exclude_names: set[str] = exclude_names or set()
|
|
328
|
+
self.duplicate_name_suffix: str | None = duplicate_name_suffix
|
|
329
|
+
self._base_url: str | None = base_url
|
|
373
330
|
self.singular_name_suffix: str = (
|
|
374
|
-
singular_name_suffix
|
|
375
|
-
if isinstance(singular_name_suffix, str)
|
|
376
|
-
else SINGULAR_NAME_SUFFIX
|
|
331
|
+
singular_name_suffix if isinstance(singular_name_suffix, str) else SINGULAR_NAME_SUFFIX
|
|
377
332
|
)
|
|
378
333
|
merged_field_name_resolver_classes = DEFAULT_FIELD_NAME_RESOLVERS.copy()
|
|
379
334
|
if field_name_resolver_classes: # pragma: no cover
|
|
380
335
|
merged_field_name_resolver_classes.update(field_name_resolver_classes)
|
|
381
|
-
self.field_name_resolvers:
|
|
336
|
+
self.field_name_resolvers: dict[ModelType, FieldNameResolver] = {
|
|
382
337
|
k: v(
|
|
383
338
|
aliases=aliases,
|
|
384
339
|
snake_case_field=snake_case_field,
|
|
@@ -386,42 +341,34 @@ class ModelResolver:
|
|
|
386
341
|
original_delimiter=original_field_name_delimiter,
|
|
387
342
|
special_field_name_prefix=special_field_name_prefix,
|
|
388
343
|
remove_special_field_name_prefix=remove_special_field_name_prefix,
|
|
389
|
-
capitalise_enum_members=capitalise_enum_members
|
|
390
|
-
if k == ModelType.ENUM
|
|
391
|
-
else False,
|
|
344
|
+
capitalise_enum_members=capitalise_enum_members if k == ModelType.ENUM else False,
|
|
392
345
|
no_alias=no_alias,
|
|
393
346
|
)
|
|
394
347
|
for k, v in merged_field_name_resolver_classes.items()
|
|
395
348
|
}
|
|
396
|
-
self.class_name_generator =
|
|
397
|
-
custom_class_name_generator or self.default_class_name_generator
|
|
398
|
-
)
|
|
349
|
+
self.class_name_generator = custom_class_name_generator or self.default_class_name_generator
|
|
399
350
|
self._base_path: Path = base_path or Path.cwd()
|
|
400
|
-
self._current_base_path:
|
|
351
|
+
self._current_base_path: Path | None = self._base_path
|
|
401
352
|
|
|
402
353
|
@property
|
|
403
|
-
def current_base_path(self) ->
|
|
354
|
+
def current_base_path(self) -> Path | None:
|
|
404
355
|
return self._current_base_path
|
|
405
356
|
|
|
406
|
-
def set_current_base_path(self, base_path:
|
|
357
|
+
def set_current_base_path(self, base_path: Path | None) -> None:
|
|
407
358
|
self._current_base_path = base_path
|
|
408
359
|
|
|
409
360
|
@property
|
|
410
|
-
def base_url(self) ->
|
|
361
|
+
def base_url(self) -> str | None:
|
|
411
362
|
return self._base_url
|
|
412
363
|
|
|
413
|
-
def set_base_url(self, base_url:
|
|
364
|
+
def set_base_url(self, base_url: str | None) -> None:
|
|
414
365
|
self._base_url = base_url
|
|
415
366
|
|
|
416
367
|
@contextmanager
|
|
417
|
-
def current_base_path_context(
|
|
418
|
-
self, base_path: Optional[Path]
|
|
419
|
-
) -> Generator[None, None, None]:
|
|
368
|
+
def current_base_path_context(self, base_path: Path | None) -> Generator[None, None, None]:
|
|
420
369
|
if base_path:
|
|
421
370
|
base_path = (self._base_path / base_path).resolve()
|
|
422
|
-
with context_variable(
|
|
423
|
-
self.set_current_base_path, self.current_base_path, base_path
|
|
424
|
-
):
|
|
371
|
+
with context_variable(self.set_current_base_path, self.current_base_path, base_path):
|
|
425
372
|
yield
|
|
426
373
|
|
|
427
374
|
@contextmanager
|
|
@@ -442,80 +389,66 @@ class ModelResolver:
|
|
|
442
389
|
self._current_root = current_root
|
|
443
390
|
|
|
444
391
|
@contextmanager
|
|
445
|
-
def current_root_context(
|
|
446
|
-
self, current_root: Sequence[str]
|
|
447
|
-
) -> Generator[None, None, None]:
|
|
392
|
+
def current_root_context(self, current_root: Sequence[str]) -> Generator[None, None, None]:
|
|
448
393
|
with context_variable(self.set_current_root, self.current_root, current_root):
|
|
449
394
|
yield
|
|
450
395
|
|
|
451
396
|
@property
|
|
452
|
-
def root_id(self) ->
|
|
397
|
+
def root_id(self) -> str | None:
|
|
453
398
|
return self._root_id
|
|
454
399
|
|
|
455
400
|
@property
|
|
456
|
-
def root_id_base_path(self) ->
|
|
401
|
+
def root_id_base_path(self) -> str | None:
|
|
457
402
|
return self._root_id_base_path
|
|
458
403
|
|
|
459
|
-
def set_root_id(self, root_id:
|
|
460
|
-
if root_id and
|
|
461
|
-
self._root_id_base_path = root_id.rsplit(
|
|
404
|
+
def set_root_id(self, root_id: str | None) -> None:
|
|
405
|
+
if root_id and "/" in root_id:
|
|
406
|
+
self._root_id_base_path = root_id.rsplit("/", 1)[0]
|
|
462
407
|
else:
|
|
463
408
|
self._root_id_base_path = None
|
|
464
409
|
|
|
465
410
|
self._root_id = root_id
|
|
466
411
|
|
|
467
412
|
def add_id(self, id_: str, path: Sequence[str]) -> None:
|
|
468
|
-
self.ids[
|
|
413
|
+
self.ids["/".join(self.current_root)][id_] = self.resolve_ref(path)
|
|
469
414
|
|
|
470
|
-
def resolve_ref(self, path:
|
|
471
|
-
if isinstance(path, str)
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
if joined_path == '#':
|
|
476
|
-
return f'{"/".join(self.current_root)}#'
|
|
477
|
-
if (
|
|
478
|
-
self.current_base_path
|
|
479
|
-
and not self.base_url
|
|
480
|
-
and joined_path[0] != '#'
|
|
481
|
-
and not is_url(joined_path)
|
|
482
|
-
):
|
|
415
|
+
def resolve_ref(self, path: Sequence[str] | str) -> str: # noqa: PLR0911, PLR0912
|
|
416
|
+
joined_path = path if isinstance(path, str) else self.join_path(path)
|
|
417
|
+
if joined_path == "#":
|
|
418
|
+
return f"{'/'.join(self.current_root)}#"
|
|
419
|
+
if self.current_base_path and not self.base_url and joined_path[0] != "#" and not is_url(joined_path):
|
|
483
420
|
# resolve local file path
|
|
484
|
-
file_path, *object_part = joined_path.split(
|
|
421
|
+
file_path, *object_part = joined_path.split("#", 1)
|
|
485
422
|
resolved_file_path = Path(self.current_base_path, file_path).resolve()
|
|
486
|
-
joined_path = get_relative_path(
|
|
487
|
-
self._base_path, resolved_file_path
|
|
488
|
-
).as_posix()
|
|
423
|
+
joined_path = get_relative_path(self._base_path, resolved_file_path).as_posix()
|
|
489
424
|
if object_part:
|
|
490
|
-
joined_path += f
|
|
425
|
+
joined_path += f"#{object_part[0]}"
|
|
491
426
|
if ID_PATTERN.match(joined_path):
|
|
492
|
-
ref: str = self.ids[
|
|
427
|
+
ref: str = self.ids["/".join(self.current_root)][joined_path]
|
|
493
428
|
else:
|
|
494
|
-
if
|
|
495
|
-
joined_path +=
|
|
496
|
-
elif joined_path[0] ==
|
|
497
|
-
joined_path = f'
|
|
498
|
-
|
|
499
|
-
delimiter = joined_path.index(
|
|
500
|
-
file_path =
|
|
501
|
-
ref = f'
|
|
502
|
-
if self.root_id_base_path and not (
|
|
503
|
-
|
|
504
|
-
):
|
|
505
|
-
ref = f'{self.root_id_base_path}/{ref}'
|
|
429
|
+
if "#" not in joined_path:
|
|
430
|
+
joined_path += "#"
|
|
431
|
+
elif joined_path[0] == "#":
|
|
432
|
+
joined_path = f"{'/'.join(self.current_root)}{joined_path}"
|
|
433
|
+
|
|
434
|
+
delimiter = joined_path.index("#")
|
|
435
|
+
file_path = "".join(joined_path[:delimiter])
|
|
436
|
+
ref = f"{''.join(joined_path[:delimiter])}#{''.join(joined_path[delimiter + 1 :])}"
|
|
437
|
+
if self.root_id_base_path and not (is_url(joined_path) or Path(self._base_path, file_path).is_file()):
|
|
438
|
+
ref = f"{self.root_id_base_path}/{ref}"
|
|
506
439
|
|
|
507
440
|
if self.base_url:
|
|
508
|
-
from .http import join_url
|
|
441
|
+
from .http import join_url # noqa: PLC0415
|
|
509
442
|
|
|
510
443
|
joined_url = join_url(self.base_url, ref)
|
|
511
|
-
if
|
|
444
|
+
if "#" in joined_url:
|
|
512
445
|
return joined_url
|
|
513
|
-
return f
|
|
446
|
+
return f"{joined_url}#"
|
|
514
447
|
|
|
515
448
|
if is_url(ref):
|
|
516
|
-
file_part, path_part = ref.split(
|
|
449
|
+
file_part, path_part = ref.split("#", 1)
|
|
517
450
|
if file_part == self.root_id:
|
|
518
|
-
return f'
|
|
451
|
+
return f"{'/'.join(self.current_root)}#{path_part}"
|
|
519
452
|
target_url: ParseResult = urlparse(file_part)
|
|
520
453
|
if not (self.root_id and self.current_base_path):
|
|
521
454
|
return ref
|
|
@@ -525,64 +458,47 @@ class ModelResolver:
|
|
|
525
458
|
root_id_url.netloc,
|
|
526
459
|
): # pragma: no cover
|
|
527
460
|
target_url_path = Path(target_url.path)
|
|
528
|
-
relative_target_base = get_relative_path(
|
|
529
|
-
|
|
530
|
-
)
|
|
531
|
-
target_path = (
|
|
532
|
-
self.current_base_path / relative_target_base / target_url_path.name
|
|
533
|
-
)
|
|
461
|
+
relative_target_base = get_relative_path(Path(root_id_url.path).parent, target_url_path.parent)
|
|
462
|
+
target_path = self.current_base_path / relative_target_base / target_url_path.name
|
|
534
463
|
if target_path.exists():
|
|
535
|
-
return f
|
|
464
|
+
return f"{target_path.resolve().relative_to(self._base_path)}#{path_part}"
|
|
536
465
|
|
|
537
466
|
return ref
|
|
538
467
|
|
|
539
468
|
def is_after_load(self, ref: str) -> bool:
|
|
540
469
|
if is_url(ref) or not self.current_base_path:
|
|
541
470
|
return False
|
|
542
|
-
file_part, *_ = ref.split(
|
|
471
|
+
file_part, *_ = ref.split("#", 1)
|
|
543
472
|
absolute_path = Path(self._base_path, file_part).resolve().as_posix()
|
|
544
|
-
if self.is_external_root_ref(ref):
|
|
545
|
-
return absolute_path in self.after_load_files
|
|
546
|
-
elif self.is_external_ref(ref):
|
|
473
|
+
if self.is_external_root_ref(ref) or self.is_external_ref(ref):
|
|
547
474
|
return absolute_path in self.after_load_files
|
|
548
475
|
return False # pragma: no cover
|
|
549
476
|
|
|
550
477
|
@staticmethod
|
|
551
478
|
def is_external_ref(ref: str) -> bool:
|
|
552
|
-
return
|
|
479
|
+
return "#" in ref and ref[0] != "#"
|
|
553
480
|
|
|
554
481
|
@staticmethod
|
|
555
482
|
def is_external_root_ref(ref: str) -> bool:
|
|
556
|
-
return ref[-1] ==
|
|
483
|
+
return ref[-1] == "#"
|
|
557
484
|
|
|
558
485
|
@staticmethod
|
|
559
486
|
def join_path(path: Sequence[str]) -> str:
|
|
560
|
-
joined_path =
|
|
561
|
-
if
|
|
562
|
-
joined_path +=
|
|
487
|
+
joined_path = "/".join(p for p in path if p).replace("/#", "#")
|
|
488
|
+
if "#" not in joined_path:
|
|
489
|
+
joined_path += "#"
|
|
563
490
|
return joined_path
|
|
564
491
|
|
|
565
|
-
def add_ref(self, ref: str, resolved: bool = False) -> Reference:
|
|
566
|
-
if not resolved
|
|
567
|
-
path = self.resolve_ref(ref)
|
|
568
|
-
else:
|
|
569
|
-
path = ref
|
|
492
|
+
def add_ref(self, ref: str, resolved: bool = False) -> Reference: # noqa: FBT001, FBT002
|
|
493
|
+
path = self.resolve_ref(ref) if not resolved else ref
|
|
570
494
|
reference = self.references.get(path)
|
|
571
495
|
if reference:
|
|
572
496
|
return reference
|
|
573
|
-
split_ref = ref.rsplit(
|
|
497
|
+
split_ref = ref.rsplit("/", 1)
|
|
574
498
|
if len(split_ref) == 1:
|
|
575
|
-
original_name = Path(
|
|
576
|
-
split_ref[0].rstrip('#')
|
|
577
|
-
if self.is_external_root_ref(path)
|
|
578
|
-
else split_ref[0]
|
|
579
|
-
).stem
|
|
499
|
+
original_name = Path(split_ref[0].rstrip("#") if self.is_external_root_ref(path) else split_ref[0]).stem
|
|
580
500
|
else:
|
|
581
|
-
original_name = (
|
|
582
|
-
Path(split_ref[1].rstrip('#')).stem
|
|
583
|
-
if self.is_external_root_ref(path)
|
|
584
|
-
else split_ref[1]
|
|
585
|
-
)
|
|
501
|
+
original_name = Path(split_ref[1].rstrip("#")).stem if self.is_external_root_ref(path) else split_ref[1]
|
|
586
502
|
name = self.get_class_name(original_name, unique=False).name
|
|
587
503
|
reference = Reference(
|
|
588
504
|
path=path,
|
|
@@ -594,7 +510,7 @@ class ModelResolver:
|
|
|
594
510
|
self.references[path] = reference
|
|
595
511
|
return reference
|
|
596
512
|
|
|
597
|
-
def add(
|
|
513
|
+
def add( # noqa: PLR0913
|
|
598
514
|
self,
|
|
599
515
|
path: Sequence[str],
|
|
600
516
|
original_name: str,
|
|
@@ -602,22 +518,18 @@ class ModelResolver:
|
|
|
602
518
|
class_name: bool = False,
|
|
603
519
|
singular_name: bool = False,
|
|
604
520
|
unique: bool = True,
|
|
605
|
-
singular_name_suffix:
|
|
521
|
+
singular_name_suffix: str | None = None,
|
|
606
522
|
loaded: bool = False,
|
|
607
523
|
) -> Reference:
|
|
608
524
|
joined_path = self.join_path(path)
|
|
609
|
-
reference:
|
|
525
|
+
reference: Reference | None = self.references.get(joined_path)
|
|
610
526
|
if reference:
|
|
611
527
|
if loaded and not reference.loaded:
|
|
612
528
|
reference.loaded = True
|
|
613
|
-
if
|
|
614
|
-
not original_name
|
|
615
|
-
or original_name == reference.original_name
|
|
616
|
-
or original_name == reference.name
|
|
617
|
-
):
|
|
529
|
+
if not original_name or original_name in {reference.original_name, reference.name}:
|
|
618
530
|
return reference
|
|
619
531
|
name = original_name
|
|
620
|
-
duplicate_name:
|
|
532
|
+
duplicate_name: str | None = None
|
|
621
533
|
if class_name:
|
|
622
534
|
name, duplicate_name = self.get_class_name(
|
|
623
535
|
name=name,
|
|
@@ -630,9 +542,7 @@ class ModelResolver:
|
|
|
630
542
|
# TODO: create a validate for module name
|
|
631
543
|
name = self.get_valid_field_name(name, model_type=ModelType.CLASS)
|
|
632
544
|
if singular_name: # pragma: no cover
|
|
633
|
-
name = get_singular_name(
|
|
634
|
-
name, singular_name_suffix or self.singular_name_suffix
|
|
635
|
-
)
|
|
545
|
+
name = get_singular_name(name, singular_name_suffix or self.singular_name_suffix)
|
|
636
546
|
elif unique: # pragma: no cover
|
|
637
547
|
unique_name = self._get_unique_name(name)
|
|
638
548
|
if unique_name == name:
|
|
@@ -654,10 +564,10 @@ class ModelResolver:
|
|
|
654
564
|
self.references[joined_path] = reference
|
|
655
565
|
return reference
|
|
656
566
|
|
|
657
|
-
def get(self, path:
|
|
567
|
+
def get(self, path: Sequence[str] | str) -> Reference | None:
|
|
658
568
|
return self.references.get(self.resolve_ref(path))
|
|
659
569
|
|
|
660
|
-
def delete(self, path:
|
|
570
|
+
def delete(self, path: Sequence[str] | str) -> None:
|
|
661
571
|
if self.resolve_ref(path) in self.references:
|
|
662
572
|
del self.references[self.resolve_ref(path)]
|
|
663
573
|
|
|
@@ -670,33 +580,29 @@ class ModelResolver:
|
|
|
670
580
|
def get_class_name(
|
|
671
581
|
self,
|
|
672
582
|
name: str,
|
|
673
|
-
unique: bool = True,
|
|
674
|
-
reserved_name:
|
|
675
|
-
singular_name: bool = False,
|
|
676
|
-
singular_name_suffix:
|
|
583
|
+
unique: bool = True, # noqa: FBT001, FBT002
|
|
584
|
+
reserved_name: str | None = None,
|
|
585
|
+
singular_name: bool = False, # noqa: FBT001, FBT002
|
|
586
|
+
singular_name_suffix: str | None = None,
|
|
677
587
|
) -> ClassName:
|
|
678
|
-
if
|
|
679
|
-
split_name = name.split(
|
|
680
|
-
prefix =
|
|
588
|
+
if "." in name:
|
|
589
|
+
split_name = name.split(".")
|
|
590
|
+
prefix = ".".join(
|
|
681
591
|
# TODO: create a validate for class name
|
|
682
|
-
self.field_name_resolvers[ModelType.CLASS].get_valid_name(
|
|
683
|
-
n, ignore_snake_case_field=True
|
|
684
|
-
)
|
|
592
|
+
self.field_name_resolvers[ModelType.CLASS].get_valid_name(n, ignore_snake_case_field=True)
|
|
685
593
|
for n in split_name[:-1]
|
|
686
594
|
)
|
|
687
|
-
prefix +=
|
|
595
|
+
prefix += "."
|
|
688
596
|
class_name = split_name[-1]
|
|
689
597
|
else:
|
|
690
|
-
prefix =
|
|
598
|
+
prefix = ""
|
|
691
599
|
class_name = name
|
|
692
600
|
|
|
693
601
|
class_name = self.class_name_generator(class_name)
|
|
694
602
|
|
|
695
603
|
if singular_name:
|
|
696
|
-
class_name = get_singular_name(
|
|
697
|
-
|
|
698
|
-
)
|
|
699
|
-
duplicate_name: Optional[str] = None
|
|
604
|
+
class_name = get_singular_name(class_name, singular_name_suffix or self.singular_name_suffix)
|
|
605
|
+
duplicate_name: str | None = None
|
|
700
606
|
if unique:
|
|
701
607
|
if reserved_name == class_name:
|
|
702
608
|
return ClassName(name=class_name, duplicate_name=duplicate_name)
|
|
@@ -705,24 +611,22 @@ class ModelResolver:
|
|
|
705
611
|
if unique_name != class_name:
|
|
706
612
|
duplicate_name = class_name
|
|
707
613
|
class_name = unique_name
|
|
708
|
-
return ClassName(name=f
|
|
614
|
+
return ClassName(name=f"{prefix}{class_name}", duplicate_name=duplicate_name)
|
|
709
615
|
|
|
710
|
-
def _get_unique_name(self, name: str, camel: bool = False) -> str:
|
|
616
|
+
def _get_unique_name(self, name: str, camel: bool = False) -> str: # noqa: FBT001, FBT002
|
|
711
617
|
unique_name: str = name
|
|
712
618
|
count: int = 1
|
|
713
|
-
reference_names = {
|
|
714
|
-
r.name for r in self.references.values()
|
|
715
|
-
} | self.exclude_names
|
|
619
|
+
reference_names = {r.name for r in self.references.values()} | self.exclude_names
|
|
716
620
|
while unique_name in reference_names:
|
|
717
621
|
if self.duplicate_name_suffix:
|
|
718
|
-
name_parts:
|
|
622
|
+
name_parts: list[str | int] = [
|
|
719
623
|
name,
|
|
720
624
|
self.duplicate_name_suffix,
|
|
721
625
|
count - 1,
|
|
722
626
|
]
|
|
723
627
|
else:
|
|
724
628
|
name_parts = [name, count]
|
|
725
|
-
delimiter =
|
|
629
|
+
delimiter = "" if camel else "_"
|
|
726
630
|
unique_name = delimiter.join(str(p) for p in name_parts if p)
|
|
727
631
|
count += 1
|
|
728
632
|
return unique_name
|
|
@@ -734,7 +638,7 @@ class ModelResolver:
|
|
|
734
638
|
def get_valid_field_name(
|
|
735
639
|
self,
|
|
736
640
|
name: str,
|
|
737
|
-
excludes:
|
|
641
|
+
excludes: set[str] | None = None,
|
|
738
642
|
model_type: ModelType = ModelType.PYDANTIC,
|
|
739
643
|
) -> str:
|
|
740
644
|
return self.field_name_resolvers[model_type].get_valid_name(name, excludes)
|
|
@@ -742,34 +646,32 @@ class ModelResolver:
|
|
|
742
646
|
def get_valid_field_name_and_alias(
|
|
743
647
|
self,
|
|
744
648
|
field_name: str,
|
|
745
|
-
excludes:
|
|
649
|
+
excludes: set[str] | None = None,
|
|
746
650
|
model_type: ModelType = ModelType.PYDANTIC,
|
|
747
|
-
) ->
|
|
748
|
-
return self.field_name_resolvers[model_type].get_valid_field_name_and_alias(
|
|
749
|
-
field_name, excludes
|
|
750
|
-
)
|
|
651
|
+
) -> tuple[str, str | None]:
|
|
652
|
+
return self.field_name_resolvers[model_type].get_valid_field_name_and_alias(field_name, excludes)
|
|
751
653
|
|
|
752
654
|
|
|
753
655
|
@lru_cache
|
|
754
656
|
def get_singular_name(name: str, suffix: str = SINGULAR_NAME_SUFFIX) -> str:
|
|
755
657
|
singular_name = inflect_engine.singular_noun(name)
|
|
756
658
|
if singular_name is False:
|
|
757
|
-
singular_name = f
|
|
758
|
-
return singular_name # pyright: ignore
|
|
659
|
+
singular_name = f"{name}{suffix}"
|
|
660
|
+
return singular_name # pyright: ignore[reportReturnType]
|
|
759
661
|
|
|
760
662
|
|
|
761
663
|
@lru_cache
|
|
762
|
-
def snake_to_upper_camel(word: str, delimiter: str =
|
|
763
|
-
prefix =
|
|
664
|
+
def snake_to_upper_camel(word: str, delimiter: str = "_") -> str:
|
|
665
|
+
prefix = ""
|
|
764
666
|
if word.startswith(delimiter):
|
|
765
|
-
prefix =
|
|
667
|
+
prefix = "_"
|
|
766
668
|
word = word[1:]
|
|
767
669
|
|
|
768
|
-
return prefix +
|
|
670
|
+
return prefix + "".join(x[0].upper() + x[1:] for x in word.split(delimiter) if x)
|
|
769
671
|
|
|
770
672
|
|
|
771
673
|
def is_url(ref: str) -> bool:
|
|
772
|
-
return ref.startswith((
|
|
674
|
+
return ref.startswith(("https://", "http://"))
|
|
773
675
|
|
|
774
676
|
|
|
775
677
|
inflect_engine = inflect.engine()
|