datamodel-code-generator 0.11.12__py3-none-any.whl → 0.45.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. datamodel_code_generator/__init__.py +654 -185
  2. datamodel_code_generator/__main__.py +872 -388
  3. datamodel_code_generator/arguments.py +798 -0
  4. datamodel_code_generator/cli_options.py +295 -0
  5. datamodel_code_generator/format.py +292 -54
  6. datamodel_code_generator/http.py +85 -10
  7. datamodel_code_generator/imports.py +152 -43
  8. datamodel_code_generator/model/__init__.py +138 -1
  9. datamodel_code_generator/model/base.py +531 -120
  10. datamodel_code_generator/model/dataclass.py +211 -0
  11. datamodel_code_generator/model/enum.py +133 -12
  12. datamodel_code_generator/model/imports.py +22 -0
  13. datamodel_code_generator/model/msgspec.py +462 -0
  14. datamodel_code_generator/model/pydantic/__init__.py +30 -25
  15. datamodel_code_generator/model/pydantic/base_model.py +304 -100
  16. datamodel_code_generator/model/pydantic/custom_root_type.py +11 -2
  17. datamodel_code_generator/model/pydantic/dataclass.py +15 -4
  18. datamodel_code_generator/model/pydantic/imports.py +40 -27
  19. datamodel_code_generator/model/pydantic/types.py +188 -96
  20. datamodel_code_generator/model/pydantic_v2/__init__.py +51 -0
  21. datamodel_code_generator/model/pydantic_v2/base_model.py +268 -0
  22. datamodel_code_generator/model/pydantic_v2/imports.py +15 -0
  23. datamodel_code_generator/model/pydantic_v2/root_model.py +35 -0
  24. datamodel_code_generator/model/pydantic_v2/types.py +143 -0
  25. datamodel_code_generator/model/scalar.py +124 -0
  26. datamodel_code_generator/model/template/Enum.jinja2 +15 -2
  27. datamodel_code_generator/model/template/ScalarTypeAliasAnnotation.jinja2 +6 -0
  28. datamodel_code_generator/model/template/ScalarTypeAliasType.jinja2 +6 -0
  29. datamodel_code_generator/model/template/ScalarTypeStatement.jinja2 +6 -0
  30. datamodel_code_generator/model/template/TypeAliasAnnotation.jinja2 +20 -0
  31. datamodel_code_generator/model/template/TypeAliasType.jinja2 +20 -0
  32. datamodel_code_generator/model/template/TypeStatement.jinja2 +20 -0
  33. datamodel_code_generator/model/template/TypedDict.jinja2 +5 -0
  34. datamodel_code_generator/model/template/TypedDictClass.jinja2 +25 -0
  35. datamodel_code_generator/model/template/TypedDictFunction.jinja2 +24 -0
  36. datamodel_code_generator/model/template/UnionTypeAliasAnnotation.jinja2 +10 -0
  37. datamodel_code_generator/model/template/UnionTypeAliasType.jinja2 +10 -0
  38. datamodel_code_generator/model/template/UnionTypeStatement.jinja2 +10 -0
  39. datamodel_code_generator/model/template/dataclass.jinja2 +50 -0
  40. datamodel_code_generator/model/template/msgspec.jinja2 +55 -0
  41. datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 +17 -4
  42. datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 +12 -4
  43. datamodel_code_generator/model/template/pydantic/Config.jinja2 +1 -1
  44. datamodel_code_generator/model/template/pydantic/dataclass.jinja2 +15 -2
  45. datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 +57 -0
  46. datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 +5 -0
  47. datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 +48 -0
  48. datamodel_code_generator/model/type_alias.py +70 -0
  49. datamodel_code_generator/model/typed_dict.py +161 -0
  50. datamodel_code_generator/model/types.py +106 -0
  51. datamodel_code_generator/model/union.py +105 -0
  52. datamodel_code_generator/parser/__init__.py +30 -12
  53. datamodel_code_generator/parser/_graph.py +67 -0
  54. datamodel_code_generator/parser/_scc.py +171 -0
  55. datamodel_code_generator/parser/base.py +2426 -380
  56. datamodel_code_generator/parser/graphql.py +652 -0
  57. datamodel_code_generator/parser/jsonschema.py +2518 -647
  58. datamodel_code_generator/parser/openapi.py +631 -222
  59. datamodel_code_generator/py.typed +0 -0
  60. datamodel_code_generator/pydantic_patch.py +28 -0
  61. datamodel_code_generator/reference.py +672 -290
  62. datamodel_code_generator/types.py +521 -145
  63. datamodel_code_generator/util.py +155 -0
  64. datamodel_code_generator/watch.py +65 -0
  65. datamodel_code_generator-0.45.0.dist-info/METADATA +301 -0
  66. datamodel_code_generator-0.45.0.dist-info/RECORD +69 -0
  67. {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info}/WHEEL +1 -1
  68. datamodel_code_generator-0.45.0.dist-info/entry_points.txt +2 -0
  69. datamodel_code_generator/version.py +0 -1
  70. datamodel_code_generator-0.11.12.dist-info/METADATA +0 -440
  71. datamodel_code_generator-0.11.12.dist-info/RECORD +0 -31
  72. datamodel_code_generator-0.11.12.dist-info/entry_points.txt +0 -3
  73. {datamodel_code_generator-0.11.12.dist-info → datamodel_code_generator-0.45.0.dist-info/licenses}/LICENSE +0 -0
@@ -1,13 +1,26 @@
1
+ """Pydantic v1 type manager.
2
+
3
+ Maps schema types to Pydantic v1 specific types (constr, conint, AnyUrl, etc.).
4
+ """
5
+
6
+ from __future__ import annotations
7
+
1
8
  from decimal import Decimal
2
- from typing import Any, Dict, Optional, Sequence, Set, Type
9
+ from typing import TYPE_CHECKING, Any, ClassVar
3
10
 
4
- from datamodel_code_generator.format import PythonVersion
11
+ from datamodel_code_generator.format import DatetimeClassType, PythonVersion, PythonVersionMin
5
12
  from datamodel_code_generator.imports import (
6
13
  IMPORT_ANY,
7
14
  IMPORT_DATE,
8
15
  IMPORT_DATETIME,
9
16
  IMPORT_DECIMAL,
17
+ IMPORT_PATH,
18
+ IMPORT_PENDULUM_DATE,
19
+ IMPORT_PENDULUM_DATETIME,
20
+ IMPORT_PENDULUM_DURATION,
21
+ IMPORT_PENDULUM_TIME,
10
22
  IMPORT_TIME,
23
+ IMPORT_TIMEDELTA,
11
24
  IMPORT_UUID,
12
25
  )
13
26
  from datamodel_code_generator.model.pydantic.imports import (
@@ -19,9 +32,15 @@ from datamodel_code_generator.model.pydantic.imports import (
19
32
  IMPORT_CONSTR,
20
33
  IMPORT_EMAIL_STR,
21
34
  IMPORT_IPV4ADDRESS,
35
+ IMPORT_IPV4NETWORKS,
22
36
  IMPORT_IPV6ADDRESS,
37
+ IMPORT_IPV6NETWORKS,
23
38
  IMPORT_NEGATIVE_FLOAT,
24
39
  IMPORT_NEGATIVE_INT,
40
+ IMPORT_NON_NEGATIVE_FLOAT,
41
+ IMPORT_NON_NEGATIVE_INT,
42
+ IMPORT_NON_POSITIVE_FLOAT,
43
+ IMPORT_NON_POSITIVE_INT,
25
44
  IMPORT_POSITIVE_FLOAT,
26
45
  IMPORT_POSITIVE_INT,
27
46
  IMPORT_SECRET_STR,
@@ -36,19 +55,24 @@ from datamodel_code_generator.model.pydantic.imports import (
36
55
  IMPORT_UUID4,
37
56
  IMPORT_UUID5,
38
57
  )
39
- from datamodel_code_generator.types import DataType
58
+ from datamodel_code_generator.types import DataType, StrictTypes, Types, UnionIntFloat
40
59
  from datamodel_code_generator.types import DataTypeManager as _DataTypeManager
41
- from datamodel_code_generator.types import StrictTypes, Types
60
+
61
+ if TYPE_CHECKING:
62
+ from collections.abc import Sequence
42
63
 
43
64
 
44
65
  def type_map_factory(
45
- data_type: Type[DataType],
66
+ data_type: type[DataType],
46
67
  strict_types: Sequence[StrictTypes],
47
- ) -> Dict[Types, DataType]:
48
- data_type_int = data_type(type='int')
49
- data_type_float = data_type(type='float')
50
- data_type_str = data_type(type='str')
51
- return {
68
+ pattern_key: str,
69
+ use_pendulum: bool, # noqa: FBT001
70
+ ) -> dict[Types, DataType]:
71
+ """Create a mapping of schema types to Pydantic v1 data types."""
72
+ data_type_int = data_type(type="int")
73
+ data_type_float = data_type(type="float")
74
+ data_type_str = data_type(type="str")
75
+ result = {
52
76
  Types.integer: data_type_int,
53
77
  Types.int32: data_type_int,
54
78
  Types.int64: data_type_int,
@@ -59,9 +83,11 @@ def type_map_factory(
59
83
  Types.time: data_type.from_import(IMPORT_TIME),
60
84
  Types.string: data_type_str,
61
85
  Types.byte: data_type_str, # base64 encoded string
62
- Types.binary: data_type(type='bytes'),
86
+ Types.binary: data_type(type="bytes"),
63
87
  Types.date: data_type.from_import(IMPORT_DATE),
64
88
  Types.date_time: data_type.from_import(IMPORT_DATETIME),
89
+ Types.timedelta: data_type.from_import(IMPORT_TIMEDELTA),
90
+ Types.path: data_type.from_import(IMPORT_PATH),
65
91
  Types.password: data_type.from_import(IMPORT_SECRET_STR),
66
92
  Types.email: data_type.from_import(IMPORT_EMAIL_STR),
67
93
  Types.uuid: data_type.from_import(IMPORT_UUID),
@@ -76,21 +102,32 @@ def type_map_factory(
76
102
  strict=StrictTypes.str in strict_types,
77
103
  # https://github.com/horejsek/python-fastjsonschema/blob/61c6997a8348b8df9b22e029ca2ba35ef441fbb8/fastjsonschema/draft04.py#L31
78
104
  kwargs={
79
- 'regex': r"r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])\Z'",
80
- **({'strict': True} if StrictTypes.str in strict_types else {}),
105
+ pattern_key: r"r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*"
106
+ r"([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])\Z'",
107
+ **({"strict": True} if StrictTypes.str in strict_types else {}),
81
108
  },
82
109
  ),
83
110
  Types.ipv4: data_type.from_import(IMPORT_IPV4ADDRESS),
84
111
  Types.ipv6: data_type.from_import(IMPORT_IPV6ADDRESS),
85
- Types.boolean: data_type(type='bool'),
112
+ Types.ipv4_network: data_type.from_import(IMPORT_IPV4NETWORKS),
113
+ Types.ipv6_network: data_type.from_import(IMPORT_IPV6NETWORKS),
114
+ Types.boolean: data_type(type="bool"),
86
115
  Types.object: data_type.from_import(IMPORT_ANY, is_dict=True),
87
- Types.null: data_type.from_import(IMPORT_ANY, is_optional=True),
116
+ Types.null: data_type(type="None"),
88
117
  Types.array: data_type.from_import(IMPORT_ANY, is_list=True),
89
118
  Types.any: data_type.from_import(IMPORT_ANY),
90
119
  }
120
+ if use_pendulum:
121
+ result[Types.date] = data_type.from_import(IMPORT_PENDULUM_DATE)
122
+ result[Types.date_time] = data_type.from_import(IMPORT_PENDULUM_DATETIME)
123
+ result[Types.time] = data_type.from_import(IMPORT_PENDULUM_TIME)
124
+ result[Types.timedelta] = data_type.from_import(IMPORT_PENDULUM_DURATION)
125
+
126
+ return result
91
127
 
92
128
 
93
- def strict_type_map_factory(data_type: Type[DataType]) -> Dict[StrictTypes, DataType]:
129
+ def strict_type_map_factory(data_type: type[DataType]) -> dict[StrictTypes, DataType]:
130
+ """Create a mapping of strict types to Pydantic v1 strict data types."""
94
131
  return {
95
132
  StrictTypes.int: data_type.from_import(IMPORT_STRICT_INT, strict=True),
96
133
  StrictTypes.float: data_type.from_import(IMPORT_STRICT_FLOAT, strict=True),
@@ -100,162 +137,217 @@ def strict_type_map_factory(data_type: Type[DataType]) -> Dict[StrictTypes, Data
100
137
  }
101
138
 
102
139
 
103
- kwargs_schema_to_model: Dict[str, str] = {
104
- 'exclusiveMinimum': 'gt',
105
- 'minimum': 'ge',
106
- 'exclusiveMaximum': 'lt',
107
- 'maximum': 'le',
108
- 'multipleOf': 'multiple_of',
109
- 'minItems': 'min_items',
110
- 'maxItems': 'max_items',
111
- 'minLength': 'min_length',
112
- 'maxLength': 'max_length',
113
- 'pattern': 'regex',
140
+ number_kwargs: set[str] = {
141
+ "exclusiveMinimum",
142
+ "minimum",
143
+ "exclusiveMaximum",
144
+ "maximum",
145
+ "multipleOf",
114
146
  }
115
147
 
116
- number_kwargs: Set[str] = {
117
- 'exclusiveMinimum',
118
- 'minimum',
119
- 'exclusiveMaximum',
120
- 'maximum',
121
- 'multipleOf',
122
- }
148
+ string_kwargs: set[str] = {"minItems", "maxItems", "minLength", "maxLength", "pattern"}
123
149
 
124
- string_kwargs: Set[str] = {'minItems', 'maxItems', 'minLength', 'maxLength', 'pattern'}
150
+ bytes_kwargs: set[str] = {"minLength", "maxLength"}
125
151
 
126
- byes_kwargs: Set[str] = {'minLength', 'maxLength'}
127
-
128
- escape_characters = str.maketrans(
129
- {
130
- "'": r"\'",
131
- '\b': r'\b',
132
- '\f': r'\f',
133
- '\n': r'\n',
134
- '\r': r'\r',
135
- '\t': r'\t',
136
- }
137
- )
152
+ escape_characters = str.maketrans({
153
+ "'": r"\'",
154
+ "\b": r"\b",
155
+ "\f": r"\f",
156
+ "\n": r"\n",
157
+ "\r": r"\r",
158
+ "\t": r"\t",
159
+ })
138
160
 
139
161
 
140
- def transform_kwargs(kwargs: Dict[str, Any], filter_: Set[str]) -> Dict[str, str]:
141
- return {
142
- kwargs_schema_to_model.get(k, k): v
143
- for (k, v) in kwargs.items()
144
- if v is not None and k in filter_
145
- }
162
+ class DataTypeManager(_DataTypeManager):
163
+ """Manage data type mappings for Pydantic v1 models."""
146
164
 
165
+ PATTERN_KEY: ClassVar[str] = "regex"
147
166
 
148
- class DataTypeManager(_DataTypeManager):
149
- def __init__(
167
+ def __init__( # noqa: PLR0913, PLR0917
150
168
  self,
151
- python_version: PythonVersion = PythonVersion.PY_37,
152
- use_standard_collections: bool = False,
153
- use_generic_container_types: bool = False,
154
- strict_types: Optional[Sequence[StrictTypes]] = None,
155
- ):
169
+ python_version: PythonVersion = PythonVersionMin,
170
+ use_standard_collections: bool = False, # noqa: FBT001, FBT002
171
+ use_generic_container_types: bool = False, # noqa: FBT001, FBT002
172
+ strict_types: Sequence[StrictTypes] | None = None,
173
+ use_non_positive_negative_number_constrained_types: bool = False, # noqa: FBT001, FBT002
174
+ use_decimal_for_multiple_of: bool = False, # noqa: FBT001, FBT002
175
+ use_union_operator: bool = False, # noqa: FBT001, FBT002
176
+ use_pendulum: bool = False, # noqa: FBT001, FBT002
177
+ target_datetime_class: DatetimeClassType | None = None,
178
+ treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
179
+ use_serialize_as_any: bool = False, # noqa: FBT001, FBT002
180
+ ) -> None:
181
+ """Initialize the DataTypeManager with Pydantic v1 type mappings."""
156
182
  super().__init__(
157
183
  python_version,
158
184
  use_standard_collections,
159
185
  use_generic_container_types,
160
186
  strict_types,
187
+ use_non_positive_negative_number_constrained_types,
188
+ use_decimal_for_multiple_of,
189
+ use_union_operator,
190
+ use_pendulum,
191
+ target_datetime_class,
192
+ treat_dot_as_module,
193
+ use_serialize_as_any,
161
194
  )
162
195
 
163
- self.type_map: Dict[Types, DataType] = type_map_factory(
196
+ self.type_map: dict[Types, DataType] = self.type_map_factory(
164
197
  self.data_type,
165
198
  strict_types=self.strict_types,
199
+ pattern_key=self.PATTERN_KEY,
200
+ target_datetime_class=self.target_datetime_class,
166
201
  )
167
- self.strict_type_map: Dict[StrictTypes, DataType] = strict_type_map_factory(
202
+ self.strict_type_map: dict[StrictTypes, DataType] = strict_type_map_factory(
168
203
  self.data_type,
169
204
  )
170
205
 
171
- def get_data_int_type(self, types: Types, **kwargs: Any) -> DataType:
172
- data_type_kwargs: Dict[str, Any] = transform_kwargs(kwargs, number_kwargs)
206
+ self.kwargs_schema_to_model: dict[str, str] = {
207
+ "exclusiveMinimum": "gt",
208
+ "minimum": "ge",
209
+ "exclusiveMaximum": "lt",
210
+ "maximum": "le",
211
+ "multipleOf": "multiple_of",
212
+ "minItems": "min_items",
213
+ "maxItems": "max_items",
214
+ "minLength": "min_length",
215
+ "maxLength": "max_length",
216
+ "pattern": self.PATTERN_KEY,
217
+ }
218
+
219
+ def type_map_factory(
220
+ self,
221
+ data_type: type[DataType],
222
+ strict_types: Sequence[StrictTypes],
223
+ pattern_key: str,
224
+ target_datetime_class: DatetimeClassType | None, # noqa: ARG002
225
+ ) -> dict[Types, DataType]:
226
+ """Create type mapping with Pydantic v1 specific types."""
227
+ return type_map_factory(
228
+ data_type,
229
+ strict_types,
230
+ pattern_key,
231
+ self.use_pendulum,
232
+ )
233
+
234
+ def transform_kwargs(self, kwargs: dict[str, Any], filter_: set[str]) -> dict[str, str]:
235
+ """Transform schema kwargs to Pydantic v1 field kwargs."""
236
+ return {self.kwargs_schema_to_model.get(k, k): v for (k, v) in kwargs.items() if v is not None and k in filter_}
237
+
238
+ def get_data_int_type( # noqa: PLR0911
239
+ self,
240
+ types: Types,
241
+ **kwargs: Any,
242
+ ) -> DataType:
243
+ """Get int data type with constraints (conint, PositiveInt, etc.)."""
244
+ data_type_kwargs: dict[str, Any] = self.transform_kwargs(kwargs, number_kwargs)
173
245
  strict = StrictTypes.int in self.strict_types
174
246
  if data_type_kwargs:
175
247
  if not strict:
176
- if data_type_kwargs == {'gt': 0}:
248
+ if data_type_kwargs == {"gt": 0}:
177
249
  return self.data_type.from_import(IMPORT_POSITIVE_INT)
178
- if data_type_kwargs == {'lt': 0}:
250
+ if data_type_kwargs == {"lt": 0}:
179
251
  return self.data_type.from_import(IMPORT_NEGATIVE_INT)
252
+ if data_type_kwargs == {"ge": 0} and self.use_non_positive_negative_number_constrained_types:
253
+ return self.data_type.from_import(IMPORT_NON_NEGATIVE_INT)
254
+ if data_type_kwargs == {"le": 0} and self.use_non_positive_negative_number_constrained_types:
255
+ return self.data_type.from_import(IMPORT_NON_POSITIVE_INT)
180
256
  kwargs = {k: int(v) for k, v in data_type_kwargs.items()}
181
257
  if strict:
182
- kwargs['strict'] = True
258
+ kwargs["strict"] = True
183
259
  return self.data_type.from_import(IMPORT_CONINT, kwargs=kwargs)
184
260
  if strict:
185
261
  return self.strict_type_map[StrictTypes.int]
186
262
  return self.type_map[types]
187
263
 
188
- def get_data_float_type(self, types: Types, **kwargs: Any) -> DataType:
189
- data_type_kwargs = transform_kwargs(kwargs, number_kwargs)
264
+ def get_data_float_type( # noqa: PLR0911
265
+ self,
266
+ types: Types,
267
+ **kwargs: Any,
268
+ ) -> DataType:
269
+ """Get float data type with constraints (confloat, PositiveFloat, etc.)."""
270
+ data_type_kwargs = self.transform_kwargs(kwargs, number_kwargs)
190
271
  strict = StrictTypes.float in self.strict_types
191
272
  if data_type_kwargs:
273
+ # Use Decimal instead of float when multipleOf is present to avoid floating-point precision issues
274
+ if self.use_decimal_for_multiple_of and "multiple_of" in data_type_kwargs:
275
+ return self.data_type.from_import(
276
+ IMPORT_CONDECIMAL,
277
+ kwargs={k: Decimal(str(v)) for k, v in data_type_kwargs.items()},
278
+ )
192
279
  if not strict:
193
- if data_type_kwargs == {'gt': 0}:
280
+ if data_type_kwargs == {"gt": 0}:
194
281
  return self.data_type.from_import(IMPORT_POSITIVE_FLOAT)
195
- if data_type_kwargs == {'lt': 0}:
282
+ if data_type_kwargs == {"lt": 0}:
196
283
  return self.data_type.from_import(IMPORT_NEGATIVE_FLOAT)
284
+ if data_type_kwargs == {"ge": 0} and self.use_non_positive_negative_number_constrained_types:
285
+ return self.data_type.from_import(IMPORT_NON_NEGATIVE_FLOAT)
286
+ if data_type_kwargs == {"le": 0} and self.use_non_positive_negative_number_constrained_types:
287
+ return self.data_type.from_import(IMPORT_NON_POSITIVE_FLOAT)
197
288
  kwargs = {k: float(v) for k, v in data_type_kwargs.items()}
198
289
  if strict:
199
- kwargs['strict'] = True
290
+ kwargs["strict"] = True
200
291
  return self.data_type.from_import(IMPORT_CONFLOAT, kwargs=kwargs)
201
292
  if strict:
202
293
  return self.strict_type_map[StrictTypes.float]
203
294
  return self.type_map[types]
204
295
 
205
296
  def get_data_decimal_type(self, types: Types, **kwargs: Any) -> DataType:
206
- data_type_kwargs = transform_kwargs(kwargs, number_kwargs)
297
+ """Get decimal data type with constraints (condecimal)."""
298
+ data_type_kwargs = self.transform_kwargs(kwargs, number_kwargs)
207
299
  if data_type_kwargs:
208
300
  return self.data_type.from_import(
209
301
  IMPORT_CONDECIMAL,
210
- kwargs={k: Decimal(v) for k, v in data_type_kwargs.items()},
302
+ kwargs={k: Decimal(str(v) if isinstance(v, UnionIntFloat) else v) for k, v in data_type_kwargs.items()},
211
303
  )
212
304
  return self.type_map[types]
213
305
 
214
306
  def get_data_str_type(self, types: Types, **kwargs: Any) -> DataType:
215
- data_type_kwargs: Dict[str, Any] = transform_kwargs(kwargs, string_kwargs)
307
+ """Get string data type with constraints (constr)."""
308
+ data_type_kwargs: dict[str, Any] = self.transform_kwargs(kwargs, string_kwargs)
216
309
  strict = StrictTypes.str in self.strict_types
217
310
  if data_type_kwargs:
218
311
  if strict:
219
- data_type_kwargs['strict'] = True
220
- if 'regex' in data_type_kwargs:
221
- escaped_regex = data_type_kwargs['regex'].translate(escape_characters)
312
+ data_type_kwargs["strict"] = True
313
+ if self.PATTERN_KEY in data_type_kwargs:
314
+ escaped_regex = data_type_kwargs[self.PATTERN_KEY].translate(escape_characters)
222
315
  # TODO: remove unneeded escaped characters
223
- data_type_kwargs['regex'] = f"r'{escaped_regex}'"
316
+ data_type_kwargs[self.PATTERN_KEY] = f"r'{escaped_regex}'"
224
317
  return self.data_type.from_import(IMPORT_CONSTR, kwargs=data_type_kwargs)
225
318
  if strict:
226
319
  return self.strict_type_map[StrictTypes.str]
227
320
  return self.type_map[types]
228
321
 
229
322
  def get_data_bytes_type(self, types: Types, **kwargs: Any) -> DataType:
230
- data_type_kwargs: Dict[str, Any] = transform_kwargs(kwargs, byes_kwargs)
323
+ """Get bytes data type with constraints (conbytes)."""
324
+ data_type_kwargs: dict[str, Any] = self.transform_kwargs(kwargs, bytes_kwargs)
231
325
  strict = StrictTypes.bytes in self.strict_types
232
- if data_type_kwargs:
233
- if not strict:
234
- return self.data_type.from_import(
235
- IMPORT_CONBYTES, kwargs=data_type_kwargs
236
- )
326
+ if data_type_kwargs and not strict:
327
+ return self.data_type.from_import(IMPORT_CONBYTES, kwargs=data_type_kwargs)
237
328
  # conbytes doesn't accept strict argument
238
329
  # https://github.com/samuelcolvin/pydantic/issues/2489
239
- # if strict:
240
- # data_type_kwargs['strict'] = True
241
- # return self.data_type.from_import(IMPORT_CONBYTES, kwargs=data_type_kwargs)
242
330
  if strict:
243
331
  return self.strict_type_map[StrictTypes.bytes]
244
332
  return self.type_map[types]
245
333
 
246
- def get_data_type(self, types: Types, **kwargs: Any) -> DataType:
334
+ def get_data_type( # noqa: PLR0911
335
+ self,
336
+ types: Types,
337
+ **kwargs: Any,
338
+ ) -> DataType:
339
+ """Get data type with appropriate constraints for the given type."""
247
340
  if types == Types.string:
248
341
  return self.get_data_str_type(types, **kwargs)
249
- elif types in (Types.int32, Types.int64, Types.integer):
342
+ if types in {Types.int32, Types.int64, Types.integer}:
250
343
  return self.get_data_int_type(types, **kwargs)
251
- elif types in (Types.float, Types.double, Types.number, Types.time):
344
+ if types in {Types.float, Types.double, Types.number, Types.time}:
252
345
  return self.get_data_float_type(types, **kwargs)
253
- elif types == Types.decimal:
346
+ if types == Types.decimal:
254
347
  return self.get_data_decimal_type(types, **kwargs)
255
- elif types == Types.binary:
348
+ if types == Types.binary:
256
349
  return self.get_data_bytes_type(types, **kwargs)
257
- elif types == Types.boolean:
258
- if StrictTypes.bool in self.strict_types:
259
- return self.strict_type_map[StrictTypes.bool]
350
+ if types == Types.boolean and StrictTypes.bool in self.strict_types:
351
+ return self.strict_type_map[StrictTypes.bool]
260
352
 
261
353
  return self.type_map[types]
@@ -0,0 +1,51 @@
1
+ """Pydantic v2 model generator.
2
+
3
+ Provides BaseModel, RootModel, and DataModelField for generating
4
+ Pydantic v2 compatible data models with ConfigDict support.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from typing import TYPE_CHECKING, Optional
10
+
11
+ from pydantic import BaseModel as _BaseModel
12
+
13
+ from .base_model import BaseModel, DataModelField, UnionMode
14
+ from .root_model import RootModel
15
+ from .types import DataTypeManager
16
+
17
+ if TYPE_CHECKING:
18
+ from collections.abc import Iterable
19
+
20
+
21
+ def dump_resolve_reference_action(class_names: Iterable[str]) -> str:
22
+ """Generate model_rebuild() calls for Pydantic v2 models."""
23
+ return "\n".join(f"{class_name}.model_rebuild()" for class_name in class_names)
24
+
25
+
26
+ class ConfigDict(_BaseModel):
27
+ """Pydantic v2 model_config options."""
28
+
29
+ extra: Optional[str] = None # noqa: UP045
30
+ title: Optional[str] = None # noqa: UP045
31
+ populate_by_name: Optional[bool] = None # noqa: UP045
32
+ allow_extra_fields: Optional[bool] = None # noqa: UP045
33
+ extra_fields: Optional[str] = None # noqa: UP045
34
+ from_attributes: Optional[bool] = None # noqa: UP045
35
+ frozen: Optional[bool] = None # noqa: UP045
36
+ arbitrary_types_allowed: Optional[bool] = None # noqa: UP045
37
+ protected_namespaces: Optional[tuple[str, ...]] = None # noqa: UP045
38
+ regex_engine: Optional[str] = None # noqa: UP045
39
+ use_enum_values: Optional[bool] = None # noqa: UP045
40
+ coerce_numbers_to_str: Optional[bool] = None # noqa: UP045
41
+ use_attribute_docstrings: Optional[bool] = None # noqa: UP045
42
+
43
+
44
+ __all__ = [
45
+ "BaseModel",
46
+ "DataModelField",
47
+ "DataTypeManager",
48
+ "RootModel",
49
+ "UnionMode",
50
+ "dump_resolve_reference_action",
51
+ ]