dhi 1.1.1__cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dhi/__init__.py +173 -0
- dhi/_dhi_native.cpython-313-aarch64-linux-gnu.so +0 -0
- dhi/_native.c +379 -0
- dhi/batch.py +236 -0
- dhi/constraints.py +358 -0
- dhi/datetime_types.py +108 -0
- dhi/fields.py +187 -0
- dhi/functional_validators.py +108 -0
- dhi/libsatya.so +0 -0
- dhi/model.py +658 -0
- dhi/networks.py +290 -0
- dhi/secret.py +105 -0
- dhi/special_types.py +359 -0
- dhi/types.py +345 -0
- dhi/validator.py +212 -0
- dhi-1.1.1.dist-info/METADATA +115 -0
- dhi-1.1.1.dist-info/RECORD +21 -0
- dhi-1.1.1.dist-info/WHEEL +6 -0
- dhi-1.1.1.dist-info/licenses/LICENSE +21 -0
- dhi-1.1.1.dist-info/top_level.txt +1 -0
- dhi.libs/libsatya-a22d98f4.so +0 -0
dhi/special_types.py
ADDED
|
@@ -0,0 +1,359 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Special types for dhi - Pydantic v2 compatible.
|
|
3
|
+
|
|
4
|
+
Provides UUID, Path, Base64, Json, ByteSize, and other specialized
|
|
5
|
+
validation types matching Pydantic's type system.
|
|
6
|
+
|
|
7
|
+
Example:
|
|
8
|
+
from dhi import BaseModel, UUID4, FilePath, Json, Base64Str
|
|
9
|
+
|
|
10
|
+
class Document(BaseModel):
|
|
11
|
+
id: UUID4
|
|
12
|
+
path: FilePath
|
|
13
|
+
metadata: Json
|
|
14
|
+
encoded: Base64Str
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
import uuid
|
|
18
|
+
import base64
|
|
19
|
+
import json as json_module
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
from typing import Annotated, Any
|
|
22
|
+
|
|
23
|
+
from .validator import ValidationError
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
# ============================================================
|
|
27
|
+
# UUID Validators
|
|
28
|
+
# ============================================================
|
|
29
|
+
|
|
30
|
+
class _UUIDVersionValidator:
|
|
31
|
+
"""Validates UUID version."""
|
|
32
|
+
|
|
33
|
+
__slots__ = ('version',)
|
|
34
|
+
|
|
35
|
+
def __init__(self, version: int):
|
|
36
|
+
self.version = version
|
|
37
|
+
|
|
38
|
+
def __repr__(self) -> str:
|
|
39
|
+
return f"UUIDVersionValidator(version={self.version})"
|
|
40
|
+
|
|
41
|
+
def validate(self, value: Any, field_name: str = "value") -> uuid.UUID:
|
|
42
|
+
if isinstance(value, str):
|
|
43
|
+
try:
|
|
44
|
+
value = uuid.UUID(value)
|
|
45
|
+
except ValueError:
|
|
46
|
+
raise ValidationError(field_name, f"Invalid UUID string: {value!r}")
|
|
47
|
+
if not isinstance(value, uuid.UUID):
|
|
48
|
+
raise ValidationError(field_name, f"Expected UUID, got {type(value).__name__}")
|
|
49
|
+
if value.version != self.version:
|
|
50
|
+
raise ValidationError(
|
|
51
|
+
field_name,
|
|
52
|
+
f"Expected UUID version {self.version}, got version {value.version}"
|
|
53
|
+
)
|
|
54
|
+
return value
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class _UUIDValidator:
|
|
58
|
+
"""Validates any UUID (no version constraint)."""
|
|
59
|
+
|
|
60
|
+
def __repr__(self) -> str:
|
|
61
|
+
return "UUIDValidator()"
|
|
62
|
+
|
|
63
|
+
def validate(self, value: Any, field_name: str = "value") -> uuid.UUID:
|
|
64
|
+
if isinstance(value, str):
|
|
65
|
+
try:
|
|
66
|
+
value = uuid.UUID(value)
|
|
67
|
+
except ValueError:
|
|
68
|
+
raise ValidationError(field_name, f"Invalid UUID string: {value!r}")
|
|
69
|
+
if not isinstance(value, uuid.UUID):
|
|
70
|
+
raise ValidationError(field_name, f"Expected UUID, got {type(value).__name__}")
|
|
71
|
+
return value
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
# ============================================================
|
|
75
|
+
# Path Validators
|
|
76
|
+
# ============================================================
|
|
77
|
+
|
|
78
|
+
class _FilePathValidator:
|
|
79
|
+
"""Validates that a path points to an existing file."""
|
|
80
|
+
|
|
81
|
+
def __repr__(self) -> str:
|
|
82
|
+
return "FilePathValidator()"
|
|
83
|
+
|
|
84
|
+
def validate(self, value: Any, field_name: str = "value") -> Path:
|
|
85
|
+
if isinstance(value, str):
|
|
86
|
+
value = Path(value)
|
|
87
|
+
if not isinstance(value, Path):
|
|
88
|
+
raise ValidationError(field_name, f"Expected path, got {type(value).__name__}")
|
|
89
|
+
if not value.exists():
|
|
90
|
+
raise ValidationError(field_name, f"Path does not exist: {value}")
|
|
91
|
+
if not value.is_file():
|
|
92
|
+
raise ValidationError(field_name, f"Path is not a file: {value}")
|
|
93
|
+
return value
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
class _DirectoryPathValidator:
|
|
97
|
+
"""Validates that a path points to an existing directory."""
|
|
98
|
+
|
|
99
|
+
def __repr__(self) -> str:
|
|
100
|
+
return "DirectoryPathValidator()"
|
|
101
|
+
|
|
102
|
+
def validate(self, value: Any, field_name: str = "value") -> Path:
|
|
103
|
+
if isinstance(value, str):
|
|
104
|
+
value = Path(value)
|
|
105
|
+
if not isinstance(value, Path):
|
|
106
|
+
raise ValidationError(field_name, f"Expected path, got {type(value).__name__}")
|
|
107
|
+
if not value.exists():
|
|
108
|
+
raise ValidationError(field_name, f"Path does not exist: {value}")
|
|
109
|
+
if not value.is_dir():
|
|
110
|
+
raise ValidationError(field_name, f"Path is not a directory: {value}")
|
|
111
|
+
return value
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
class _NewPathValidator:
|
|
115
|
+
"""Validates that a path does NOT already exist (for new file/dir creation)."""
|
|
116
|
+
|
|
117
|
+
def __repr__(self) -> str:
|
|
118
|
+
return "NewPathValidator()"
|
|
119
|
+
|
|
120
|
+
def validate(self, value: Any, field_name: str = "value") -> Path:
|
|
121
|
+
if isinstance(value, str):
|
|
122
|
+
value = Path(value)
|
|
123
|
+
if not isinstance(value, Path):
|
|
124
|
+
raise ValidationError(field_name, f"Expected path, got {type(value).__name__}")
|
|
125
|
+
if value.exists():
|
|
126
|
+
raise ValidationError(field_name, f"Path already exists: {value}")
|
|
127
|
+
# Parent directory should exist
|
|
128
|
+
if not value.parent.exists():
|
|
129
|
+
raise ValidationError(field_name, f"Parent directory does not exist: {value.parent}")
|
|
130
|
+
return value
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
# ============================================================
|
|
134
|
+
# Base64 Validators
|
|
135
|
+
# ============================================================
|
|
136
|
+
|
|
137
|
+
class _Base64BytesValidator:
|
|
138
|
+
"""Validates and decodes base64-encoded bytes."""
|
|
139
|
+
|
|
140
|
+
def __repr__(self) -> str:
|
|
141
|
+
return "Base64BytesValidator()"
|
|
142
|
+
|
|
143
|
+
def validate(self, value: Any, field_name: str = "value") -> bytes:
|
|
144
|
+
if isinstance(value, bytes):
|
|
145
|
+
data = value
|
|
146
|
+
elif isinstance(value, str):
|
|
147
|
+
data = value.encode('ascii')
|
|
148
|
+
else:
|
|
149
|
+
raise ValidationError(field_name, f"Expected str or bytes, got {type(value).__name__}")
|
|
150
|
+
try:
|
|
151
|
+
return base64.b64decode(data, validate=True)
|
|
152
|
+
except Exception:
|
|
153
|
+
raise ValidationError(field_name, "Invalid base64 encoding")
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
class _Base64StrValidator:
|
|
157
|
+
"""Validates and decodes base64-encoded string."""
|
|
158
|
+
|
|
159
|
+
def __repr__(self) -> str:
|
|
160
|
+
return "Base64StrValidator()"
|
|
161
|
+
|
|
162
|
+
def validate(self, value: Any, field_name: str = "value") -> str:
|
|
163
|
+
if not isinstance(value, str):
|
|
164
|
+
raise ValidationError(field_name, f"Expected str, got {type(value).__name__}")
|
|
165
|
+
try:
|
|
166
|
+
decoded = base64.b64decode(value.encode('ascii'), validate=True)
|
|
167
|
+
return decoded.decode('utf-8')
|
|
168
|
+
except Exception:
|
|
169
|
+
raise ValidationError(field_name, "Invalid base64 encoding")
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
class _Base64UrlBytesValidator:
|
|
173
|
+
"""Validates and decodes URL-safe base64-encoded bytes."""
|
|
174
|
+
|
|
175
|
+
def __repr__(self) -> str:
|
|
176
|
+
return "Base64UrlBytesValidator()"
|
|
177
|
+
|
|
178
|
+
def validate(self, value: Any, field_name: str = "value") -> bytes:
|
|
179
|
+
if isinstance(value, bytes):
|
|
180
|
+
data = value
|
|
181
|
+
elif isinstance(value, str):
|
|
182
|
+
data = value.encode('ascii')
|
|
183
|
+
else:
|
|
184
|
+
raise ValidationError(field_name, f"Expected str or bytes, got {type(value).__name__}")
|
|
185
|
+
try:
|
|
186
|
+
# Add padding if needed
|
|
187
|
+
padding = 4 - len(data) % 4
|
|
188
|
+
if padding != 4:
|
|
189
|
+
data += b'=' * padding if isinstance(data, bytes) else ('=' * padding).encode()
|
|
190
|
+
return base64.urlsafe_b64decode(data)
|
|
191
|
+
except Exception:
|
|
192
|
+
raise ValidationError(field_name, "Invalid URL-safe base64 encoding")
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
class _Base64UrlStrValidator:
|
|
196
|
+
"""Validates and decodes URL-safe base64-encoded string."""
|
|
197
|
+
|
|
198
|
+
def __repr__(self) -> str:
|
|
199
|
+
return "Base64UrlStrValidator()"
|
|
200
|
+
|
|
201
|
+
def validate(self, value: Any, field_name: str = "value") -> str:
|
|
202
|
+
if not isinstance(value, str):
|
|
203
|
+
raise ValidationError(field_name, f"Expected str, got {type(value).__name__}")
|
|
204
|
+
try:
|
|
205
|
+
# Add padding if needed
|
|
206
|
+
padding = 4 - len(value) % 4
|
|
207
|
+
if padding != 4:
|
|
208
|
+
value += '=' * padding
|
|
209
|
+
decoded = base64.urlsafe_b64decode(value.encode('ascii'))
|
|
210
|
+
return decoded.decode('utf-8')
|
|
211
|
+
except Exception:
|
|
212
|
+
raise ValidationError(field_name, "Invalid URL-safe base64 encoding")
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
# ============================================================
|
|
216
|
+
# Json Validator
|
|
217
|
+
# ============================================================
|
|
218
|
+
|
|
219
|
+
class _JsonValidator:
|
|
220
|
+
"""Validates that a string is valid JSON and parses it."""
|
|
221
|
+
|
|
222
|
+
def __repr__(self) -> str:
|
|
223
|
+
return "JsonValidator()"
|
|
224
|
+
|
|
225
|
+
def validate(self, value: Any, field_name: str = "value") -> Any:
|
|
226
|
+
if not isinstance(value, str):
|
|
227
|
+
raise ValidationError(field_name, f"Expected str, got {type(value).__name__}")
|
|
228
|
+
try:
|
|
229
|
+
return json_module.loads(value)
|
|
230
|
+
except json_module.JSONDecodeError as e:
|
|
231
|
+
raise ValidationError(field_name, f"Invalid JSON: {e}")
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
# ============================================================
|
|
235
|
+
# ImportString Validator
|
|
236
|
+
# ============================================================
|
|
237
|
+
|
|
238
|
+
class _ImportStringValidator:
|
|
239
|
+
"""Validates that a string is a valid Python import path and imports it."""
|
|
240
|
+
|
|
241
|
+
def __repr__(self) -> str:
|
|
242
|
+
return "ImportStringValidator()"
|
|
243
|
+
|
|
244
|
+
def validate(self, value: Any, field_name: str = "value") -> Any:
|
|
245
|
+
if not isinstance(value, str):
|
|
246
|
+
raise ValidationError(field_name, f"Expected str, got {type(value).__name__}")
|
|
247
|
+
try:
|
|
248
|
+
module_path, _, attr_name = value.rpartition('.')
|
|
249
|
+
if not module_path:
|
|
250
|
+
# Simple module import
|
|
251
|
+
import importlib
|
|
252
|
+
return importlib.import_module(value)
|
|
253
|
+
else:
|
|
254
|
+
import importlib
|
|
255
|
+
module = importlib.import_module(module_path)
|
|
256
|
+
return getattr(module, attr_name)
|
|
257
|
+
except (ImportError, AttributeError) as e:
|
|
258
|
+
raise ValidationError(field_name, f"Cannot import '{value}': {e}")
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
# ============================================================
|
|
262
|
+
# ByteSize
|
|
263
|
+
# ============================================================
|
|
264
|
+
|
|
265
|
+
class ByteSize(int):
|
|
266
|
+
"""Represents a size in bytes with human-readable parsing.
|
|
267
|
+
|
|
268
|
+
Matches Pydantic's ByteSize type.
|
|
269
|
+
|
|
270
|
+
Supports parsing strings like '1kb', '2.5 MB', '1GiB'.
|
|
271
|
+
"""
|
|
272
|
+
|
|
273
|
+
_UNITS = {
|
|
274
|
+
'b': 1,
|
|
275
|
+
'kb': 1000, 'kib': 1024,
|
|
276
|
+
'mb': 1000**2, 'mib': 1024**2,
|
|
277
|
+
'gb': 1000**3, 'gib': 1024**3,
|
|
278
|
+
'tb': 1000**4, 'tib': 1024**4,
|
|
279
|
+
'pb': 1000**5, 'pib': 1024**5,
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
def __new__(cls, value: Any) -> 'ByteSize':
|
|
283
|
+
if isinstance(value, (int, float)):
|
|
284
|
+
return super().__new__(cls, int(value))
|
|
285
|
+
if isinstance(value, str):
|
|
286
|
+
return cls._parse_str(value)
|
|
287
|
+
raise ValidationError("value", f"Expected int or size string, got {type(value).__name__}")
|
|
288
|
+
|
|
289
|
+
@classmethod
|
|
290
|
+
def _parse_str(cls, value: str) -> 'ByteSize':
|
|
291
|
+
import re
|
|
292
|
+
value = value.strip().lower()
|
|
293
|
+
match = re.match(r'^(\d+(?:\.\d+)?)\s*([a-z]*)', value)
|
|
294
|
+
if not match:
|
|
295
|
+
raise ValidationError("value", f"Cannot parse byte size: {value!r}")
|
|
296
|
+
number = float(match.group(1))
|
|
297
|
+
unit = match.group(2) or 'b'
|
|
298
|
+
if unit not in cls._UNITS:
|
|
299
|
+
raise ValidationError("value", f"Unknown byte size unit: {unit!r}")
|
|
300
|
+
return super().__new__(cls, int(number * cls._UNITS[unit]))
|
|
301
|
+
|
|
302
|
+
def human_readable(self, decimal: bool = False) -> str:
|
|
303
|
+
"""Convert to human-readable string."""
|
|
304
|
+
if decimal:
|
|
305
|
+
units = [('PB', 1000**5), ('TB', 1000**4), ('GB', 1000**3),
|
|
306
|
+
('MB', 1000**2), ('KB', 1000), ('B', 1)]
|
|
307
|
+
else:
|
|
308
|
+
units = [('PiB', 1024**5), ('TiB', 1024**4), ('GiB', 1024**3),
|
|
309
|
+
('MiB', 1024**2), ('KiB', 1024), ('B', 1)]
|
|
310
|
+
|
|
311
|
+
for suffix, divisor in units:
|
|
312
|
+
if abs(self) >= divisor:
|
|
313
|
+
value = self / divisor
|
|
314
|
+
if value == int(value):
|
|
315
|
+
return f"{int(value)}{suffix}"
|
|
316
|
+
return f"{value:.1f}{suffix}"
|
|
317
|
+
return f"{int(self)}B"
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
# ============================================================
|
|
321
|
+
# Public Type Aliases
|
|
322
|
+
# ============================================================
|
|
323
|
+
|
|
324
|
+
# UUID types
|
|
325
|
+
UUID1 = Annotated[uuid.UUID, _UUIDVersionValidator(1)]
|
|
326
|
+
UUID3 = Annotated[uuid.UUID, _UUIDVersionValidator(3)]
|
|
327
|
+
UUID4 = Annotated[uuid.UUID, _UUIDVersionValidator(4)]
|
|
328
|
+
UUID5 = Annotated[uuid.UUID, _UUIDVersionValidator(5)]
|
|
329
|
+
|
|
330
|
+
# Path types
|
|
331
|
+
FilePath = Annotated[Path, _FilePathValidator()]
|
|
332
|
+
DirectoryPath = Annotated[Path, _DirectoryPathValidator()]
|
|
333
|
+
NewPath = Annotated[Path, _NewPathValidator()]
|
|
334
|
+
|
|
335
|
+
# Base64 types
|
|
336
|
+
Base64Bytes = Annotated[bytes, _Base64BytesValidator()]
|
|
337
|
+
Base64Str = Annotated[str, _Base64StrValidator()]
|
|
338
|
+
Base64UrlBytes = Annotated[bytes, _Base64UrlBytesValidator()]
|
|
339
|
+
Base64UrlStr = Annotated[str, _Base64UrlStrValidator()]
|
|
340
|
+
|
|
341
|
+
# Json type
|
|
342
|
+
Json = Annotated[str, _JsonValidator()]
|
|
343
|
+
|
|
344
|
+
# ImportString type
|
|
345
|
+
ImportString = Annotated[str, _ImportStringValidator()]
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
__all__ = [
|
|
349
|
+
# UUID
|
|
350
|
+
"UUID1", "UUID3", "UUID4", "UUID5",
|
|
351
|
+
# Path
|
|
352
|
+
"FilePath", "DirectoryPath", "NewPath",
|
|
353
|
+
# Base64
|
|
354
|
+
"Base64Bytes", "Base64Str", "Base64UrlBytes", "Base64UrlStr",
|
|
355
|
+
# Others
|
|
356
|
+
"Json", "ImportString", "ByteSize",
|
|
357
|
+
# Secret (re-exported)
|
|
358
|
+
"SecretStr", "SecretBytes",
|
|
359
|
+
]
|
dhi/types.py
ADDED
|
@@ -0,0 +1,345 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Pydantic v2 compatible type aliases for dhi.
|
|
3
|
+
|
|
4
|
+
Provides all the standard Pydantic constrained types as Annotated aliases:
|
|
5
|
+
- Strict types (StrictInt, StrictStr, etc.)
|
|
6
|
+
- Positive/Negative number types
|
|
7
|
+
- FiniteFloat
|
|
8
|
+
- con* factory functions (conint, confloat, constr, etc.)
|
|
9
|
+
|
|
10
|
+
Example:
|
|
11
|
+
from dhi import PositiveInt, StrictStr, conint
|
|
12
|
+
|
|
13
|
+
class User(BaseModel):
|
|
14
|
+
age: PositiveInt
|
|
15
|
+
name: StrictStr
|
|
16
|
+
score: conint(ge=0, le=100)
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from typing import Annotated, Any, FrozenSet, List, Optional, Set, Type, Union
|
|
20
|
+
import sys
|
|
21
|
+
|
|
22
|
+
from .constraints import (
|
|
23
|
+
Gt, Ge, Lt, Le, MultipleOf,
|
|
24
|
+
MinLength, MaxLength, Pattern,
|
|
25
|
+
Strict, StripWhitespace, ToLower, ToUpper,
|
|
26
|
+
AllowInfNan, MaxDigits, DecimalPlaces, UniqueItems,
|
|
27
|
+
StringConstraints,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# ============================================================
|
|
32
|
+
# Strict Types - No type coercion allowed
|
|
33
|
+
# ============================================================
|
|
34
|
+
|
|
35
|
+
StrictInt = Annotated[int, Strict()]
|
|
36
|
+
StrictFloat = Annotated[float, Strict()]
|
|
37
|
+
StrictStr = Annotated[str, Strict()]
|
|
38
|
+
StrictBool = Annotated[bool, Strict()]
|
|
39
|
+
StrictBytes = Annotated[bytes, Strict()]
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
# ============================================================
|
|
43
|
+
# Positive/Negative Integer Types
|
|
44
|
+
# ============================================================
|
|
45
|
+
|
|
46
|
+
PositiveInt = Annotated[int, Gt(gt=0)]
|
|
47
|
+
NegativeInt = Annotated[int, Lt(lt=0)]
|
|
48
|
+
NonNegativeInt = Annotated[int, Ge(ge=0)]
|
|
49
|
+
NonPositiveInt = Annotated[int, Le(le=0)]
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
# ============================================================
|
|
53
|
+
# Positive/Negative Float Types
|
|
54
|
+
# ============================================================
|
|
55
|
+
|
|
56
|
+
PositiveFloat = Annotated[float, Gt(gt=0)]
|
|
57
|
+
NegativeFloat = Annotated[float, Lt(lt=0)]
|
|
58
|
+
NonNegativeFloat = Annotated[float, Ge(ge=0)]
|
|
59
|
+
NonPositiveFloat = Annotated[float, Le(le=0)]
|
|
60
|
+
FiniteFloat = Annotated[float, AllowInfNan(allow_inf_nan=False)]
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
# ============================================================
|
|
64
|
+
# con* Factory Functions - Create constrained Annotated types
|
|
65
|
+
# ============================================================
|
|
66
|
+
|
|
67
|
+
def conint(
|
|
68
|
+
*,
|
|
69
|
+
gt: Optional[int] = None,
|
|
70
|
+
ge: Optional[int] = None,
|
|
71
|
+
lt: Optional[int] = None,
|
|
72
|
+
le: Optional[int] = None,
|
|
73
|
+
multiple_of: Optional[int] = None,
|
|
74
|
+
strict: Optional[bool] = None,
|
|
75
|
+
) -> Any:
|
|
76
|
+
"""Create a constrained integer type.
|
|
77
|
+
|
|
78
|
+
Matches Pydantic's conint() function.
|
|
79
|
+
|
|
80
|
+
Example:
|
|
81
|
+
Score = conint(ge=0, le=100)
|
|
82
|
+
|
|
83
|
+
class Model(BaseModel):
|
|
84
|
+
score: Score
|
|
85
|
+
"""
|
|
86
|
+
constraints = []
|
|
87
|
+
if strict:
|
|
88
|
+
constraints.append(Strict())
|
|
89
|
+
if gt is not None:
|
|
90
|
+
constraints.append(Gt(gt=gt))
|
|
91
|
+
if ge is not None:
|
|
92
|
+
constraints.append(Ge(ge=ge))
|
|
93
|
+
if lt is not None:
|
|
94
|
+
constraints.append(Lt(lt=lt))
|
|
95
|
+
if le is not None:
|
|
96
|
+
constraints.append(Le(le=le))
|
|
97
|
+
if multiple_of is not None:
|
|
98
|
+
constraints.append(MultipleOf(multiple_of=multiple_of))
|
|
99
|
+
if not constraints:
|
|
100
|
+
return int
|
|
101
|
+
return Annotated[tuple([int] + constraints)]
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def confloat(
|
|
105
|
+
*,
|
|
106
|
+
gt: Optional[float] = None,
|
|
107
|
+
ge: Optional[float] = None,
|
|
108
|
+
lt: Optional[float] = None,
|
|
109
|
+
le: Optional[float] = None,
|
|
110
|
+
multiple_of: Optional[float] = None,
|
|
111
|
+
allow_inf_nan: Optional[bool] = None,
|
|
112
|
+
strict: Optional[bool] = None,
|
|
113
|
+
) -> Any:
|
|
114
|
+
"""Create a constrained float type.
|
|
115
|
+
|
|
116
|
+
Matches Pydantic's confloat() function.
|
|
117
|
+
|
|
118
|
+
Example:
|
|
119
|
+
Probability = confloat(ge=0.0, le=1.0)
|
|
120
|
+
"""
|
|
121
|
+
constraints = []
|
|
122
|
+
if strict:
|
|
123
|
+
constraints.append(Strict())
|
|
124
|
+
if gt is not None:
|
|
125
|
+
constraints.append(Gt(gt=gt))
|
|
126
|
+
if ge is not None:
|
|
127
|
+
constraints.append(Ge(ge=ge))
|
|
128
|
+
if lt is not None:
|
|
129
|
+
constraints.append(Lt(lt=lt))
|
|
130
|
+
if le is not None:
|
|
131
|
+
constraints.append(Le(le=le))
|
|
132
|
+
if multiple_of is not None:
|
|
133
|
+
constraints.append(MultipleOf(multiple_of=multiple_of))
|
|
134
|
+
if allow_inf_nan is not None:
|
|
135
|
+
constraints.append(AllowInfNan(allow_inf_nan=allow_inf_nan))
|
|
136
|
+
if not constraints:
|
|
137
|
+
return float
|
|
138
|
+
return Annotated[tuple([float] + constraints)]
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def constr(
|
|
142
|
+
*,
|
|
143
|
+
min_length: Optional[int] = None,
|
|
144
|
+
max_length: Optional[int] = None,
|
|
145
|
+
pattern: Optional[str] = None,
|
|
146
|
+
strip_whitespace: bool = False,
|
|
147
|
+
to_lower: bool = False,
|
|
148
|
+
to_upper: bool = False,
|
|
149
|
+
strict: Optional[bool] = None,
|
|
150
|
+
) -> Any:
|
|
151
|
+
"""Create a constrained string type.
|
|
152
|
+
|
|
153
|
+
Matches Pydantic's constr() function.
|
|
154
|
+
|
|
155
|
+
Example:
|
|
156
|
+
Username = constr(min_length=3, max_length=20, to_lower=True)
|
|
157
|
+
"""
|
|
158
|
+
constraints = []
|
|
159
|
+
if strict:
|
|
160
|
+
constraints.append(Strict())
|
|
161
|
+
if min_length is not None:
|
|
162
|
+
constraints.append(MinLength(min_length=min_length))
|
|
163
|
+
if max_length is not None:
|
|
164
|
+
constraints.append(MaxLength(max_length=max_length))
|
|
165
|
+
if pattern is not None:
|
|
166
|
+
constraints.append(Pattern(pattern=pattern))
|
|
167
|
+
if strip_whitespace:
|
|
168
|
+
constraints.append(StripWhitespace())
|
|
169
|
+
if to_lower:
|
|
170
|
+
constraints.append(ToLower())
|
|
171
|
+
if to_upper:
|
|
172
|
+
constraints.append(ToUpper())
|
|
173
|
+
if not constraints:
|
|
174
|
+
return str
|
|
175
|
+
return Annotated[tuple([str] + constraints)]
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def conbytes(
|
|
179
|
+
*,
|
|
180
|
+
min_length: Optional[int] = None,
|
|
181
|
+
max_length: Optional[int] = None,
|
|
182
|
+
strict: Optional[bool] = None,
|
|
183
|
+
) -> Any:
|
|
184
|
+
"""Create a constrained bytes type.
|
|
185
|
+
|
|
186
|
+
Matches Pydantic's conbytes() function.
|
|
187
|
+
"""
|
|
188
|
+
constraints = []
|
|
189
|
+
if strict:
|
|
190
|
+
constraints.append(Strict())
|
|
191
|
+
if min_length is not None:
|
|
192
|
+
constraints.append(MinLength(min_length=min_length))
|
|
193
|
+
if max_length is not None:
|
|
194
|
+
constraints.append(MaxLength(max_length=max_length))
|
|
195
|
+
if not constraints:
|
|
196
|
+
return bytes
|
|
197
|
+
return Annotated[tuple([bytes] + constraints)]
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def conlist(
|
|
201
|
+
item_type: Type = Any,
|
|
202
|
+
*,
|
|
203
|
+
min_length: Optional[int] = None,
|
|
204
|
+
max_length: Optional[int] = None,
|
|
205
|
+
unique_items: Optional[bool] = None,
|
|
206
|
+
) -> Any:
|
|
207
|
+
"""Create a constrained list type.
|
|
208
|
+
|
|
209
|
+
Matches Pydantic's conlist() function.
|
|
210
|
+
|
|
211
|
+
Example:
|
|
212
|
+
Tags = conlist(str, min_length=1, max_length=10)
|
|
213
|
+
"""
|
|
214
|
+
constraints = []
|
|
215
|
+
if min_length is not None:
|
|
216
|
+
constraints.append(MinLength(min_length=min_length))
|
|
217
|
+
if max_length is not None:
|
|
218
|
+
constraints.append(MaxLength(max_length=max_length))
|
|
219
|
+
if unique_items:
|
|
220
|
+
constraints.append(UniqueItems())
|
|
221
|
+
base_type = List[item_type]
|
|
222
|
+
if not constraints:
|
|
223
|
+
return base_type
|
|
224
|
+
return Annotated[tuple([base_type] + constraints)]
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def conset(
|
|
228
|
+
item_type: Type = Any,
|
|
229
|
+
*,
|
|
230
|
+
min_length: Optional[int] = None,
|
|
231
|
+
max_length: Optional[int] = None,
|
|
232
|
+
) -> Any:
|
|
233
|
+
"""Create a constrained set type.
|
|
234
|
+
|
|
235
|
+
Matches Pydantic's conset() function.
|
|
236
|
+
"""
|
|
237
|
+
constraints = []
|
|
238
|
+
if min_length is not None:
|
|
239
|
+
constraints.append(MinLength(min_length=min_length))
|
|
240
|
+
if max_length is not None:
|
|
241
|
+
constraints.append(MaxLength(max_length=max_length))
|
|
242
|
+
base_type = Set[item_type]
|
|
243
|
+
if not constraints:
|
|
244
|
+
return base_type
|
|
245
|
+
return Annotated[tuple([base_type] + constraints)]
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
def confrozenset(
|
|
249
|
+
item_type: Type = Any,
|
|
250
|
+
*,
|
|
251
|
+
min_length: Optional[int] = None,
|
|
252
|
+
max_length: Optional[int] = None,
|
|
253
|
+
) -> Any:
|
|
254
|
+
"""Create a constrained frozenset type.
|
|
255
|
+
|
|
256
|
+
Matches Pydantic's confrozenset() function.
|
|
257
|
+
"""
|
|
258
|
+
constraints = []
|
|
259
|
+
if min_length is not None:
|
|
260
|
+
constraints.append(MinLength(min_length=min_length))
|
|
261
|
+
if max_length is not None:
|
|
262
|
+
constraints.append(MaxLength(max_length=max_length))
|
|
263
|
+
base_type = FrozenSet[item_type]
|
|
264
|
+
if not constraints:
|
|
265
|
+
return base_type
|
|
266
|
+
return Annotated[tuple([base_type] + constraints)]
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
def condecimal(
|
|
270
|
+
*,
|
|
271
|
+
gt: Optional[Any] = None,
|
|
272
|
+
ge: Optional[Any] = None,
|
|
273
|
+
lt: Optional[Any] = None,
|
|
274
|
+
le: Optional[Any] = None,
|
|
275
|
+
multiple_of: Optional[Any] = None,
|
|
276
|
+
max_digits: Optional[int] = None,
|
|
277
|
+
decimal_places: Optional[int] = None,
|
|
278
|
+
allow_inf_nan: Optional[bool] = None,
|
|
279
|
+
) -> Any:
|
|
280
|
+
"""Create a constrained Decimal type.
|
|
281
|
+
|
|
282
|
+
Matches Pydantic's condecimal() function.
|
|
283
|
+
"""
|
|
284
|
+
from decimal import Decimal
|
|
285
|
+
constraints = []
|
|
286
|
+
if gt is not None:
|
|
287
|
+
constraints.append(Gt(gt=gt))
|
|
288
|
+
if ge is not None:
|
|
289
|
+
constraints.append(Ge(ge=ge))
|
|
290
|
+
if lt is not None:
|
|
291
|
+
constraints.append(Lt(lt=lt))
|
|
292
|
+
if le is not None:
|
|
293
|
+
constraints.append(Le(le=le))
|
|
294
|
+
if multiple_of is not None:
|
|
295
|
+
constraints.append(MultipleOf(multiple_of=multiple_of))
|
|
296
|
+
if max_digits is not None:
|
|
297
|
+
constraints.append(MaxDigits(max_digits=max_digits))
|
|
298
|
+
if decimal_places is not None:
|
|
299
|
+
constraints.append(DecimalPlaces(decimal_places=decimal_places))
|
|
300
|
+
if allow_inf_nan is not None:
|
|
301
|
+
constraints.append(AllowInfNan(allow_inf_nan=allow_inf_nan))
|
|
302
|
+
if not constraints:
|
|
303
|
+
return Decimal
|
|
304
|
+
return Annotated[tuple([Decimal] + constraints)]
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def condate(
|
|
308
|
+
*,
|
|
309
|
+
gt: Optional[Any] = None,
|
|
310
|
+
ge: Optional[Any] = None,
|
|
311
|
+
lt: Optional[Any] = None,
|
|
312
|
+
le: Optional[Any] = None,
|
|
313
|
+
) -> Any:
|
|
314
|
+
"""Create a constrained date type.
|
|
315
|
+
|
|
316
|
+
Matches Pydantic's condate() function.
|
|
317
|
+
"""
|
|
318
|
+
from datetime import date
|
|
319
|
+
constraints = []
|
|
320
|
+
if gt is not None:
|
|
321
|
+
constraints.append(Gt(gt=gt))
|
|
322
|
+
if ge is not None:
|
|
323
|
+
constraints.append(Ge(ge=ge))
|
|
324
|
+
if lt is not None:
|
|
325
|
+
constraints.append(Lt(lt=lt))
|
|
326
|
+
if le is not None:
|
|
327
|
+
constraints.append(Le(le=le))
|
|
328
|
+
if not constraints:
|
|
329
|
+
return date
|
|
330
|
+
return Annotated[tuple([date] + constraints)]
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
__all__ = [
|
|
334
|
+
# Strict types
|
|
335
|
+
"StrictInt", "StrictFloat", "StrictStr", "StrictBool", "StrictBytes",
|
|
336
|
+
# Positive/Negative integers
|
|
337
|
+
"PositiveInt", "NegativeInt", "NonNegativeInt", "NonPositiveInt",
|
|
338
|
+
# Positive/Negative floats
|
|
339
|
+
"PositiveFloat", "NegativeFloat", "NonNegativeFloat", "NonPositiveFloat",
|
|
340
|
+
"FiniteFloat",
|
|
341
|
+
# con* functions
|
|
342
|
+
"conint", "confloat", "constr", "conbytes",
|
|
343
|
+
"conlist", "conset", "confrozenset",
|
|
344
|
+
"condecimal", "condate",
|
|
345
|
+
]
|