hypern 0.3.5__cp310-cp310-win32.whl → 0.3.7__cp310-cp310-win32.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,11 +1,34 @@
1
1
  # from .context import SqlConfig, DatabaseType
2
- from .field import CharField, IntegerField
2
+ from .field import (
3
+ CharField,
4
+ IntegerField,
5
+ TextField,
6
+ FloatField,
7
+ BooleanField,
8
+ ForeignKeyField,
9
+ DateTimeField,
10
+ Field,
11
+ JSONField,
12
+ ArrayField,
13
+ DecimalField,
14
+ DateField,
15
+ )
3
16
  from .model import Model
4
17
  from .query import F, Q, QuerySet
5
18
 
6
19
  __all__ = [
7
20
  "CharField",
8
21
  "IntegerField",
22
+ "TextField",
23
+ "FloatField",
24
+ "BooleanField",
25
+ "ForeignKeyField",
26
+ "DateTimeField",
27
+ "Field",
28
+ "JSONField",
29
+ "ArrayField",
30
+ "DecimalField",
31
+ "DateField",
9
32
  "Model",
10
33
  "Q",
11
34
  "F",
@@ -1,8 +1,7 @@
1
1
  import json
2
- import re
3
- from datetime import date, datetime, timezone
2
+ from datetime import date, datetime
4
3
  from decimal import Decimal, InvalidOperation
5
- from typing import Any, Callable, List, Optional, Union
4
+ from typing import Any, Optional, Union
6
5
 
7
6
  from hypern.exceptions import DBFieldValidationError
8
7
 
@@ -18,20 +17,9 @@ class Field:
18
17
  default: Any = None,
19
18
  unique: bool = False,
20
19
  index: bool = False,
21
- validators: Optional[list[Callable]] = None,
20
+ validators: Optional[list] = None,
22
21
  auto_increment: bool = False,
23
22
  ):
24
- """
25
- Initialize a field with various constraints and validation options.
26
-
27
- :param field_type: Type of the field
28
- :param primary_key: Whether the field is a primary key
29
- :param null: Whether the field can be null
30
- :param default: Default value for the field
31
- :param unique: Whether the field value must be unique
32
- :param index: Whether to create an index for this field
33
- :param validators: List of custom validator functions
34
- """
35
23
  self.field_type = field_type
36
24
  self.primary_key = primary_key
37
25
  self.null = null
@@ -43,23 +31,20 @@ class Field:
43
31
  self.model = None
44
32
  self.auto_increment = auto_increment
45
33
 
46
- def to_py_type(self, value: Any) -> Any:
47
- """
48
- Convert input value to the field's Python type.
49
-
50
- :param value: Input value to convert
51
- :return: Converted value
52
- """
34
+ def validate(self, value: Any) -> None:
53
35
  if value is None:
54
- return None
55
- return value
36
+ if not self.null:
37
+ raise DBFieldValidationError(f"Field {self.name} cannot be null")
38
+ return
56
39
 
57
- def to_sql_type(self) -> str:
58
- """
59
- Get the SQL type representation of the field.
40
+ for validator in self.validators:
41
+ try:
42
+ validator(value)
43
+ except Exception as e:
44
+ raise DBFieldValidationError(f"Validation failed for {self.name}: {str(e)}")
60
45
 
61
- :return: SQL type string
62
- """
46
+ def sql_type(self) -> str:
47
+ """Return SQL type definition for the field."""
63
48
  type_mapping = {
64
49
  "int": "INTEGER",
65
50
  "str": "VARCHAR(255)",
@@ -74,534 +59,188 @@ class Field:
74
59
  }
75
60
  return type_mapping.get(self.field_type, "VARCHAR(255)")
76
61
 
77
- def validate(self, value: Any) -> None:
78
- """
79
- Validate the input value against field constraints.
80
-
81
- :param value: Value to validate
82
- :raises DBFieldValidationError: If validation fails
83
- """
84
- # Null check
85
- if value is None:
86
- if not self.null:
87
- raise DBFieldValidationError(f"Field {self.name} cannot be null")
88
- return
89
-
90
- # Run custom validators
91
- for validator in self.validators:
92
- try:
93
- validator(value)
94
- except Exception as e:
95
- raise DBFieldValidationError(f"Validation failed for {self.name}: {str(e)}")
96
-
97
62
 
98
63
  class CharField(Field):
99
- """Character field with max length constraint."""
100
-
101
- def __init__(self, max_length: int = 255, min_length: int = 0, regex: Optional[str] = None, **kwargs):
102
- """
103
- Initialize a character field.
104
-
105
- :param max_length: Maximum allowed length
106
- :param min_length: Minimum allowed length
107
- :param regex: Optional regex pattern for validation
108
- """
109
- super().__init__("str", **kwargs)
64
+ def __init__(self, max_length: int = 255, **kwargs):
65
+ super().__init__(field_type="str", **kwargs)
110
66
  self.max_length = max_length
111
- self.min_length = min_length
112
- self.regex = regex
113
-
114
- def to_py_type(self, value: Any) -> Optional[str]:
115
- """Convert input to string."""
116
- if value is None:
117
- return None
118
- return str(value)
119
-
120
- def to_sql_type(self) -> str:
121
- """Get SQL type with defined max length."""
122
- return f"VARCHAR({self.max_length})"
123
67
 
124
68
  def validate(self, value: Any) -> None:
125
- """
126
- Validate character field constraints.
127
-
128
- :param value: Value to validate
129
- """
130
69
  super().validate(value)
70
+ if value is not None:
71
+ if not isinstance(value, str):
72
+ raise DBFieldValidationError(f"Field {self.name} must be a string")
73
+ if len(value) > self.max_length:
74
+ raise DBFieldValidationError(f"Field {self.name} cannot exceed {self.max_length} characters")
131
75
 
132
- if value is None:
133
- return
134
-
135
- # Convert to string for validation
136
- str_value = str(value)
76
+ def sql_type(self) -> str:
77
+ return f"VARCHAR({self.max_length})"
137
78
 
138
- # Length validation
139
- if len(str_value) > self.max_length:
140
- raise DBFieldValidationError(f"Value exceeds max length of {self.max_length}")
141
79
 
142
- if len(str_value) < self.min_length:
143
- raise DBFieldValidationError(f"Value is shorter than min length of {self.min_length}")
80
+ class TextField(Field):
81
+ def __init__(self, **kwargs):
82
+ super().__init__(field_type="text", **kwargs)
144
83
 
145
- # Regex validation
146
- if self.regex and not re.match(self.regex, str_value):
147
- raise DBFieldValidationError(f"Value does not match required pattern: {self.regex}")
84
+ def validate(self, value: Any) -> None:
85
+ super().validate(value)
86
+ if value is not None and not isinstance(value, str):
87
+ raise DBFieldValidationError(f"Field {self.name} must be a string")
148
88
 
149
89
 
150
90
  class IntegerField(Field):
151
- """Integer field with range constraints."""
152
-
153
- def __init__(self, min_value: Optional[int] = None, max_value: Optional[int] = None, **kwargs):
154
- """
155
- Initialize an integer field.
156
-
157
- :param min_value: Minimum allowed value
158
- :param max_value: Maximum allowed value
159
- """
160
- super().__init__("int", **kwargs)
161
- self.min_value = min_value
162
- self.max_value = max_value
163
-
164
- def to_py_type(self, value: Any) -> Optional[int]:
165
- """Convert input to integer."""
166
- if value is None:
167
- return None
168
- try:
169
- return int(value)
170
- except (TypeError, ValueError):
171
- raise DBFieldValidationError(f"Cannot convert {value} to integer")
91
+ def __init__(self, **kwargs):
92
+ super().__init__(field_type="int", **kwargs)
172
93
 
173
94
  def validate(self, value: Any) -> None:
174
- """
175
- Validate integer field constraints.
176
-
177
- :param value: Value to validate
178
- """
179
95
  super().validate(value)
96
+ if value is not None:
97
+ try:
98
+ int(value)
99
+ except (TypeError, ValueError):
100
+ raise DBFieldValidationError(f"Field {self.name} must be an integer")
180
101
 
181
- if value is None:
182
- return
183
-
184
- int_value = self.to_py_type(value)
185
-
186
- # Range validation
187
- if self.min_value is not None and int_value < self.min_value:
188
- raise DBFieldValidationError(f"Value must be >= {self.min_value}")
189
-
190
- if self.max_value is not None and int_value > self.max_value:
191
- raise DBFieldValidationError(f"Value must be <= {self.max_value}")
192
-
193
-
194
- class DecimalField(Field):
195
- """Decimal field with precision and scale constraints."""
196
-
197
- def __init__(
198
- self,
199
- max_digits: int = 10,
200
- decimal_places: int = 2,
201
- min_value: Optional[Union[int, float, Decimal]] = None,
202
- max_value: Optional[Union[int, float, Decimal]] = None,
203
- **kwargs,
204
- ):
205
- """
206
- Initialize a decimal field.
207
-
208
- :param max_digits: Total number of digits
209
- :param decimal_places: Number of decimal places
210
- :param min_value: Minimum allowed value
211
- :param max_value: Maximum allowed value
212
- """
213
- super().__init__("decimal", **kwargs)
214
- self.max_digits = max_digits
215
- self.decimal_places = decimal_places
216
- self.min_value = min_value
217
- self.max_value = max_value
218
-
219
- def to_py_type(self, value: Any) -> Optional[Decimal]:
220
- """Convert input to Decimal."""
221
- if value is None:
222
- return None
223
- try:
224
- decimal_value = Decimal(str(value))
225
102
 
226
- # Check precision
227
- parts = str(decimal_value).split(".")
228
- total_digits = len(parts[0].lstrip("-")) + (len(parts[1]) if len(parts) > 1 else 0)
229
- decimal_digits = len(parts[1]) if len(parts) > 1 else 0
103
+ class FloatField(Field):
104
+ def __init__(self, **kwargs):
105
+ super().__init__(field_type="float", **kwargs)
230
106
 
231
- if total_digits > self.max_digits or decimal_digits > self.decimal_places:
232
- raise DBFieldValidationError(f"Decimal exceeds precision: {self.max_digits} digits, {self.decimal_places} decimal places")
107
+ def validate(self, value: Any) -> None:
108
+ super().validate(value)
109
+ if value is not None:
110
+ try:
111
+ float(value)
112
+ except (TypeError, ValueError):
113
+ raise DBFieldValidationError(f"Field {self.name} must be a float")
233
114
 
234
- return decimal_value
235
- except (TypeError, ValueError, InvalidOperation):
236
- raise DBFieldValidationError(f"Cannot convert {value} to Decimal")
237
115
 
238
- def to_sql_type(self) -> str:
239
- """Get SQL type with defined precision."""
240
- return f"DECIMAL({self.max_digits},{self.decimal_places})"
116
+ class BooleanField(Field):
117
+ def __init__(self, **kwargs):
118
+ super().__init__(field_type="bool", **kwargs)
241
119
 
242
120
  def validate(self, value: Any) -> None:
243
- """
244
- Validate decimal field constraints.
245
-
246
- :param value: Value to validate
247
- """
248
121
  super().validate(value)
122
+ if value is not None and not isinstance(value, bool):
123
+ raise DBFieldValidationError(f"Field {self.name} must be a boolean")
249
124
 
250
- if value is None:
251
- return
252
125
 
253
- decimal_value = self.to_py_type(value)
254
-
255
- # Range validation
256
- if self.min_value is not None and decimal_value < Decimal(str(self.min_value)):
257
- raise DBFieldValidationError(f"Value must be >= {self.min_value}")
126
+ class DateTimeField(Field):
127
+ def __init__(self, auto_now: bool = False, auto_now_add: bool = False, **kwargs):
128
+ super().__init__(field_type="datetime", **kwargs)
129
+ self.auto_now = auto_now
130
+ self.auto_now_add = auto_now_add
258
131
 
259
- if self.max_value is not None and decimal_value > Decimal(str(self.max_value)):
260
- raise DBFieldValidationError(f"Value must be <= {self.max_value}")
132
+ def validate(self, value: Any) -> None:
133
+ super().validate(value)
134
+ if value is not None and not isinstance(value, datetime):
135
+ raise DBFieldValidationError(f"Field {self.name} must be a datetime object")
261
136
 
262
137
 
263
138
  class DateField(Field):
264
- """Date field with range constraints."""
265
-
266
- def __init__(self, auto_now: bool = False, auto_now_add: bool = False, min_date: Optional[date] = None, max_date: Optional[date] = None, **kwargs):
267
- """
268
- Initialize a date field.
269
-
270
- :param auto_now: Update to current date on every save
271
- :param auto_now_add: Set to current date when first created
272
- :param min_date: Minimum allowed date
273
- :param max_date: Maximum allowed date
274
- """
275
- super().__init__("date", **kwargs)
139
+ def __init__(self, auto_now: bool = False, auto_now_add: bool = False, **kwargs):
140
+ super().__init__(field_type="date", **kwargs)
276
141
  self.auto_now = auto_now
277
142
  self.auto_now_add = auto_now_add
278
- self.min_date = min_date
279
- self.max_date = max_date
280
-
281
- def to_py_type(self, value: Any) -> Optional[date]:
282
- """Convert input to date."""
283
- if value is None:
284
- return None
285
-
286
- if isinstance(value, date):
287
- return value
288
-
289
- try:
290
- return date.fromisoformat(str(value))
291
- except ValueError:
292
- raise DBFieldValidationError(f"Cannot convert {value} to date")
293
143
 
294
144
  def validate(self, value: Any) -> None:
295
- """
296
- Validate date field constraints.
297
-
298
- :param value: Value to validate
299
- """
300
145
  super().validate(value)
301
-
302
- if value is None:
303
- return
304
-
305
- date_value = self.to_py_type(value)
306
-
307
- # Range validation
308
- if self.min_date is not None and date_value < self.min_date:
309
- raise DBFieldValidationError(f"Date must be >= {self.min_date}")
310
-
311
- if self.max_date is not None and date_value > self.max_date:
312
- raise DBFieldValidationError(f"Date must be <= {self.max_date}")
146
+ if value is not None and not isinstance(value, date):
147
+ raise DBFieldValidationError(f"Field {self.name} must be a date object")
313
148
 
314
149
 
315
150
  class JSONField(Field):
316
- """JSON field with optional schema validation."""
317
-
318
- def __init__(self, schema: Optional[dict] = None, **kwargs):
319
- """
320
- Initialize a JSON field.
321
-
322
- :param schema: Optional JSON schema for validation
323
- """
324
- super().__init__("json", **kwargs)
325
- self.schema = schema
326
-
327
- def to_py_type(self, value: Any) -> Optional[dict]:
328
- """Convert input to JSON."""
329
- if value is None:
330
- return None
331
-
332
- if isinstance(value, str):
333
- try:
334
- return json.loads(value)
335
- except json.JSONDecodeError:
336
- raise DBFieldValidationError(f"Invalid JSON string: {value}")
337
-
338
- if isinstance(value, dict):
339
- return value
340
-
341
- raise DBFieldValidationError(f"Cannot convert {value} to JSON")
151
+ def __init__(self, **kwargs):
152
+ super().__init__(field_type="json", **kwargs)
342
153
 
343
154
  def validate(self, value: Any) -> None:
344
- """
345
- Validate JSON field constraints.
346
-
347
- :param value: Value to validate
348
- """
349
155
  super().validate(value)
350
-
351
- if value is None:
352
- return
353
-
354
- json_value = self.to_py_type(value)
355
-
356
- # Schema validation
357
- if self.schema:
358
- from jsonschema import DBFieldValidationError as JsonSchemaError
359
- from jsonschema import validate
360
-
156
+ if value is not None:
361
157
  try:
362
- validate(instance=json_value, schema=self.schema)
363
- except JsonSchemaError as e:
364
- raise DBFieldValidationError(f"JSON schema validation failed: {str(e)}")
158
+ json.dumps(value)
159
+ except (TypeError, ValueError):
160
+ raise DBFieldValidationError(f"Field {self.name} must be JSON serializable")
365
161
 
366
162
 
367
163
  class ArrayField(Field):
368
- """Array field with base field type validation."""
369
-
370
- def __init__(self, base_field: Field, min_length: Optional[int] = None, max_length: Optional[int] = None, **kwargs):
371
- """
372
- Initialize an array field.
373
-
374
- :param base_field: Field type for array elements
375
- :param min_length: Minimum number of elements
376
- :param max_length: Maximum number of elements
377
- """
378
- super().__init__("array", **kwargs)
164
+ def __init__(self, base_field: Field, **kwargs):
165
+ super().__init__(field_type="array", **kwargs)
379
166
  self.base_field = base_field
380
- self.min_length = min_length
381
- self.max_length = max_length
382
-
383
- def to_py_type(self, value: Any) -> Optional[List[Any]]:
384
- """
385
- Convert input to a list with base field type conversion.
386
-
387
- :param value: Input value to convert
388
- :return: Converted list
389
- """
390
- if value is None:
391
- return None
392
-
393
- # Ensure input is a list
394
- if not isinstance(value, list):
395
- try:
396
- value = list(value)
397
- except TypeError:
398
- raise DBFieldValidationError(f"Cannot convert {value} to list")
399
-
400
- # Convert each element using base field's to_py_type
401
- return [self.base_field.to_py_type(item) for item in value]
402
-
403
- def to_sql_type(self) -> str:
404
- """
405
- Get SQL type representation of the array.
406
-
407
- :return: SQL array type string
408
- """
409
- return f"{self.base_field.to_sql_type()}[]"
410
167
 
411
168
  def validate(self, value: Any) -> None:
412
- """
413
- Validate array field constraints.
414
-
415
- :param value: Value to validate
416
- """
417
169
  super().validate(value)
170
+ if value is not None:
171
+ if not isinstance(value, (list, tuple)):
172
+ raise DBFieldValidationError(f"Field {self.name} must be a list or tuple")
173
+ for item in value:
174
+ self.base_field.validate(item)
418
175
 
419
- if value is None:
420
- return
421
-
422
- # Ensure we have a list
423
- list_value = self.to_py_type(value)
424
-
425
- # Length validation
426
- if self.min_length is not None and len(list_value) < self.min_length:
427
- raise DBFieldValidationError(f"Array must have at least {self.min_length} elements")
428
-
429
- if self.max_length is not None and len(list_value) > self.max_length:
430
- raise DBFieldValidationError(f"Array must have no more than {self.max_length} elements")
176
+ def sql_type(self) -> str:
177
+ return f"{self.base_field.sql_type()}[]"
431
178
 
432
- # Validate each element using base field's validate method
433
- for item in list_value:
434
- self.base_field.validate(item)
435
179
 
436
-
437
- class ForeignKey(Field):
438
- """Foreign key field representing a relationship to another model."""
439
-
440
- def __init__(self, to_model: str, related_field: str, on_delete: str = "CASCADE", on_update: str = "CASCADE", **kwargs):
441
- """
442
- Initialize a foreign key field.
443
-
444
- :param to_model: Name of the related model
445
- :param on_delete: Action to take on related record deletion
446
- :param on_update: Action to take on related record update
447
- """
448
- # Allow overriding primary key and null status if not specified
449
- if "primary_key" not in kwargs:
450
- kwargs["primary_key"] = False
451
- if "null" not in kwargs:
452
- kwargs["null"] = False
453
-
454
- super().__init__("int", **kwargs)
455
- self.to_model = to_model
456
- self.on_delete = on_delete
457
- self.on_update = on_update
458
- self.related_field = related_field
459
-
460
- def to_py_type(self, value: Any) -> Optional[int]:
461
- """
462
- Convert input to integer representing foreign key.
463
-
464
- :param value: Value to convert
465
- :return: Converted integer
466
- """
467
- if value is None:
468
- return None
469
-
470
- try:
471
- return int(value)
472
- except (TypeError, ValueError):
473
- raise DBFieldValidationError(f"Cannot convert {value} to integer foreign key")
474
-
475
- def to_sql_type(self) -> str:
476
- """
477
- Get SQL type for foreign key.
478
-
479
- :return: SQL integer type string
480
- """
481
- return "INTEGER"
180
+ class DecimalField(Field):
181
+ def __init__(self, max_digits: int = 10, decimal_places: int = 2, **kwargs):
182
+ super().__init__(field_type="decimal", **kwargs)
183
+ self.max_digits = max_digits
184
+ self.decimal_places = decimal_places
482
185
 
483
186
  def validate(self, value: Any) -> None:
484
- """
485
- Validate foreign key constraints.
486
-
487
- :param value: Value to validate
488
- """
489
187
  super().validate(value)
188
+ if value is not None:
189
+ try:
190
+ decimal_value = Decimal(str(value))
191
+ decimal_tuple = decimal_value.as_tuple()
192
+ if len(decimal_tuple.digits) - (-decimal_tuple.exponent) > self.max_digits:
193
+ raise DBFieldValidationError(f"Field {self.name} exceeds maximum digits {self.max_digits}")
194
+ if -decimal_tuple.exponent > self.decimal_places:
195
+ raise DBFieldValidationError(f"Field {self.name} exceeds maximum decimal places {self.decimal_places}")
196
+ except InvalidOperation:
197
+ raise DBFieldValidationError(f"Field {self.name} must be a valid decimal number")
198
+
199
+ def sql_type(self) -> str:
200
+ return f"DECIMAL({self.max_digits},{self.decimal_places})"
490
201
 
491
202
 
492
- class DateTimeField(Field):
493
- """DateTime field with advanced validation and auto-update capabilities."""
203
+ class ForeignKeyField(Field):
204
+ """Field for foreign key relationships."""
494
205
 
495
206
  def __init__(
496
207
  self,
497
- auto_now: bool = False,
498
- auto_now_add: bool = False,
499
- min_datetime: Optional[datetime] = None,
500
- max_datetime: Optional[datetime] = None,
501
- timezone_aware: bool = True,
208
+ to_model: Union[str, Any],
209
+ related_field: str = "id",
210
+ on_delete: str = "CASCADE",
211
+ on_update: str = "CASCADE",
212
+ related_name: Optional[str] = None,
502
213
  **kwargs,
503
214
  ):
504
- """
505
- Initialize a datetime field.
506
-
507
- :param auto_now: Update to current datetime on every save
508
- :param auto_now_add: Set to current datetime when first created
509
- :param min_datetime: Minimum allowed datetime
510
- :param max_datetime: Maximum allowed datetime
511
- :param timezone_aware: Enforce timezone awareness
512
- """
513
- super().__init__("datetime", **kwargs)
514
- self.auto_now = auto_now
515
- self.auto_now_add = auto_now_add
516
- self.min_datetime = min_datetime
517
- self.max_datetime = max_datetime
518
- self.timezone_aware = timezone_aware
519
-
520
- def to_py_type(self, value: Any) -> Optional[datetime]:
521
- """
522
- Convert input to datetime with robust parsing.
523
-
524
- :param value: Value to convert
525
- :return: Converted datetime
526
- """
527
- if value is None:
528
- return None
529
-
530
- # If already a datetime, handle timezone
531
- if isinstance(value, datetime):
532
- return self._handle_timezone(value)
533
-
534
- # String parsing with multiple formats
535
- if isinstance(value, str):
536
- try:
537
- # ISO format parsing
538
- parsed_datetime = datetime.fromisoformat(value)
539
- return self._handle_timezone(parsed_datetime)
540
- except ValueError:
541
- # Additional parsing formats can be added
542
- try:
543
- # Alternative parsing (e.g., common formats)
544
- parsed_datetime = datetime.strptime(value, "%Y-%m-%d %H:%M:%S")
545
- return self._handle_timezone(parsed_datetime)
546
- except ValueError:
547
- raise DBFieldValidationError(f"Cannot parse datetime from: {value}")
548
-
549
- # Attempt generic conversion
550
- try:
551
- converted_datetime = datetime.fromtimestamp(float(value))
552
- return self._handle_timezone(converted_datetime)
553
- except (TypeError, ValueError):
554
- raise DBFieldValidationError(f"Cannot convert {value} to datetime")
555
-
556
- def _handle_timezone(self, dt: datetime) -> datetime:
557
- """
558
- Handle timezone requirements.
559
-
560
- :param dt: Input datetime
561
- :return: Timezone-adjusted datetime
562
- """
563
- if self.timezone_aware:
564
- # If no timezone, assume UTC
565
- if dt.tzinfo is None:
566
- dt = dt.replace(tzinfo=timezone.utc)
215
+ if isinstance(to_model, str):
216
+ field_type = "int"
567
217
  else:
568
- # Remove timezone if not required
569
- dt = dt.replace(tzinfo=None)
218
+ related_field_obj = getattr(to_model, related_field, None)
219
+ if related_field_obj is None:
220
+ raise ValueError(f"Field {related_field} not found in model {to_model.__name__}")
221
+ field_type = related_field_obj.field_type
570
222
 
571
- return dt
223
+ super().__init__(field_type=field_type, **kwargs)
224
+ self.to_model = to_model
225
+ self.related_field = related_field
226
+ self.on_delete = on_delete.upper()
227
+ self.on_update = on_update.upper()
228
+ self.related_name = related_name
572
229
 
573
- def to_sql_type(self) -> str:
574
- """
575
- Get SQL type for datetime.
230
+ valid_actions = {"CASCADE", "SET NULL", "RESTRICT", "NO ACTION"}
231
+ if self.on_delete not in valid_actions:
232
+ raise ValueError(f"Invalid on_delete action. Must be one of: {valid_actions}")
233
+ if self.on_update not in valid_actions:
234
+ raise ValueError(f"Invalid on_update action. Must be one of: {valid_actions}")
576
235
 
577
- :return: SQL timestamp type string
578
- """
579
- return "TIMESTAMP"
236
+ if (self.on_delete == "SET NULL" or self.on_update == "SET NULL") and not kwargs.get("null", True):
237
+ raise ValueError("Field must be nullable to use SET NULL referential action")
580
238
 
581
239
  def validate(self, value: Any) -> None:
582
- """
583
- Validate datetime field constraints.
584
-
585
- :param value: Value to validate
586
- """
587
240
  super().validate(value)
588
-
589
- if value is None:
590
- return
591
-
592
- datetime_value = self.to_py_type(value)
593
-
594
- # Range validation
595
- if self.min_datetime is not None:
596
- min_dt = self._handle_timezone(self.min_datetime)
597
- if datetime_value < min_dt:
598
- raise DBFieldValidationError(f"Datetime must be >= {min_dt}")
599
-
600
- if self.max_datetime is not None:
601
- max_dt = self._handle_timezone(self.max_datetime)
602
- if datetime_value > max_dt:
603
- raise DBFieldValidationError(f"Datetime must be <= {max_dt}")
604
-
605
- # Timezone awareness check
606
- if self.timezone_aware and datetime_value.tzinfo is None:
607
- raise DBFieldValidationError("Datetime must be timezone-aware")
241
+ if value is not None and not isinstance(self.to_model, str):
242
+ related_field_obj = getattr(self.to_model, self.related_field)
243
+ try:
244
+ related_field_obj.validate(value)
245
+ except DBFieldValidationError as e:
246
+ raise DBFieldValidationError(f"Foreign key {self.name} validation failed: {str(e)}")
@@ -0,0 +1,263 @@
1
+ # import os
2
+ # import sys
3
+ # import inspect
4
+ # import importlib
5
+ # import hashlib
6
+ # import argparse
7
+ # from datetime import datetime
8
+ # from typing import List, Type, Dict
9
+
10
+ # from hypern.config import get_config
11
+ # from .model import Model
12
+
13
+
14
+ # class MigrationManager:
15
+ # """Manages database migrations and schema changes."""
16
+
17
+ # def __init__(self, migrations_dir: str = "migrations"):
18
+ # self.migrations_dir = migrations_dir
19
+ # self.config = get_config()
20
+ # self.ensure_migrations_dir()
21
+
22
+ # def ensure_migrations_dir(self):
23
+ # """Ensure migrations directory exists."""
24
+ # if not os.path.exists(self.migrations_dir):
25
+ # os.makedirs(self.migrations_dir)
26
+ # # Create __init__.py to make it a package
27
+ # with open(os.path.join(self.migrations_dir, "__init__.py"), "w") as f:
28
+ # pass
29
+
30
+ # def collect_models(self) -> Dict[str, Type[Model]]:
31
+ # """Collect all model classes from the project."""
32
+ # models = {}
33
+ # # Scan all Python files in the project directory
34
+ # for root, _, files in os.walk("."):
35
+ # if "venv" in root or "migrations" in root:
36
+ # continue
37
+ # for file in files:
38
+ # if file.endswith(".py"):
39
+ # module_path = os.path.join(root, file)
40
+ # module_name = module_path.replace("/", ".").replace("\\", ".")[2:-3]
41
+ # try:
42
+ # module = importlib.import_module(module_name)
43
+ # for name, obj in inspect.getmembers(module):
44
+ # if inspect.isclass(obj) and issubclass(obj, Model) and obj != Model:
45
+ # models[obj.__name__] = obj
46
+ # except (ImportError, AttributeError):
47
+ # continue
48
+ # return models
49
+
50
+ # def generate_migration(self, name: str):
51
+ # """Generate a new migration file."""
52
+ # timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
53
+ # migration_id = f"{timestamp}_{name}"
54
+ # filename = f"{migration_id}.py"
55
+ # filepath = os.path.join(self.migrations_dir, filename)
56
+
57
+ # models = self.collect_models()
58
+
59
+ # # Generate migration content
60
+ # content = self._generate_migration_content(migration_id, models)
61
+
62
+ # with open(filepath, "w") as f:
63
+ # f.write(content)
64
+
65
+ # print(f"Created migration: {filename}")
66
+
67
+ # def _generate_migration_content(self, migration_id: str, models: Dict[str, Type[Model]]) -> str:
68
+ # """Generate the content for a migration file."""
69
+ # content = [
70
+ # "from typing import List",
71
+ # "from hypern.migrations import Migration\n",
72
+ # ]
73
+
74
+ # # Import all models
75
+ # for model_name in models.keys():
76
+ # content.append(f"from app.models import {model_name}")
77
+
78
+ # content.extend([
79
+ # "\n\nclass " + migration_id + "(Migration):",
80
+ # " \"\"\"",
81
+ # " Auto-generated migration.",
82
+ # " \"\"\"",
83
+ # "",
84
+ # " def up(self) -> List[str]:",
85
+ # " return [",
86
+ # ])
87
+
88
+ # # Add CREATE TABLE statements
89
+ # for model in models.values():
90
+ # content.append(f" '''{model.create_table_sql()}''',")
91
+
92
+ # content.extend([
93
+ # " ]",
94
+ # "",
95
+ # " def down(self) -> List[str]:",
96
+ # " return [",
97
+ # ])
98
+
99
+ # # Add DROP TABLE statements in reverse order
100
+ # for model_name in reversed(list(models.keys())):
101
+ # content.append(f" '''DROP TABLE IF EXISTS {model_name.lower()} CASCADE;''',")
102
+
103
+ # content.extend([
104
+ # " ]",
105
+ # ""
106
+ # ])
107
+
108
+ # return "\n".join(content)
109
+
110
+ # def get_applied_migrations(self) -> List[str]:
111
+ # """Get list of applied migrations from database."""
112
+ # session = get_session_database()
113
+ # try:
114
+ # result = session.execute("""
115
+ # SELECT migration_id FROM migrations
116
+ # ORDER BY applied_at;
117
+ # """)
118
+ # return [row[0] for row in result]
119
+ # except Exception:
120
+ # # Migrations table doesn't exist yet
121
+ # return []
122
+
123
+ # def apply_migrations(self, target: str = None):
124
+ # """Apply pending migrations up to target (or all if target is None)."""
125
+ # # Create migrations table if it doesn't exist
126
+ # self._ensure_migrations_table()
127
+
128
+ # # Get applied and available migrations
129
+ # applied = set(self.get_applied_migrations())
130
+ # available = self._get_available_migrations()
131
+
132
+ # # Determine which migrations to apply
133
+ # to_apply = []
134
+ # for migration_id, module in available.items():
135
+ # if migration_id not in applied:
136
+ # to_apply.append((migration_id, module))
137
+
138
+ # if target and migration_id == target:
139
+ # break
140
+
141
+ # # Apply migrations
142
+ # session = get_session_database()
143
+ # for migration_id, module in to_apply:
144
+ # print(f"Applying migration: {migration_id}")
145
+
146
+ # migration = module()
147
+ # for sql in migration.up():
148
+ # session.execute(sql)
149
+
150
+ # # Record migration
151
+ # session.execute(
152
+ # "INSERT INTO migrations (migration_id, applied_at) VALUES (%s, NOW())",
153
+ # (migration_id,)
154
+ # )
155
+ # session.commit()
156
+
157
+ # def rollback_migrations(self, target: str = None):
158
+ # """Rollback migrations up to target (or last one if target is None)."""
159
+ # applied = self.get_applied_migrations()
160
+ # available = self._get_available_migrations()
161
+
162
+ # # Determine which migrations to rollback
163
+ # to_rollback = []
164
+ # rollback_all = target == "zero"
165
+
166
+ # for migration_id in reversed(applied):
167
+ # to_rollback.append((migration_id, available[migration_id]))
168
+
169
+ # if not rollback_all and (target == migration_id or target is None):
170
+ # break
171
+
172
+ # # Rollback migrations
173
+ # session = get_session_database()
174
+ # for migration_id, module in to_rollback:
175
+ # print(f"Rolling back migration: {migration_id}")
176
+
177
+ # migration = module()
178
+ # for sql in migration.down():
179
+ # session.execute(sql)
180
+
181
+ # # Remove migration record
182
+ # session.execute(
183
+ # "DELETE FROM migrations WHERE migration_id = %s",
184
+ # (migration_id,)
185
+ # )
186
+ # session.commit()
187
+
188
+ # def _ensure_migrations_table(self):
189
+ # """Ensure migrations table exists."""
190
+ # session = get_session_database()
191
+ # session.execute("""
192
+ # CREATE TABLE IF NOT EXISTS migrations (
193
+ # migration_id VARCHAR(255) PRIMARY KEY,
194
+ # applied_at TIMESTAMP NOT NULL
195
+ # );
196
+ # """)
197
+ # session.commit()
198
+
199
+ # def _get_available_migrations(self) -> Dict[str, Type['Migration']]:
200
+ # """Get available migrations from migrations directory."""
201
+ # migrations = {}
202
+
203
+ # for filename in sorted(os.listdir(self.migrations_dir)):
204
+ # if filename.endswith(".py") and not filename.startswith("__"):
205
+ # migration_id = filename[:-3]
206
+ # module_name = f"{self.migrations_dir}.{migration_id}"
207
+ # module = importlib.import_module(module_name)
208
+
209
+ # for name, obj in inspect.getmembers(module):
210
+ # if (inspect.isclass(obj) and
211
+ # name == migration_id and
212
+ # hasattr(obj, 'up') and
213
+ # hasattr(obj, 'down')):
214
+ # migrations[migration_id] = obj
215
+
216
+ # return migrations
217
+
218
+
219
+ # class Migration:
220
+ # """Base class for database migrations."""
221
+
222
+ # def up(self) -> List[str]:
223
+ # """Return list of SQL statements to apply migration."""
224
+ # raise NotImplementedError
225
+
226
+ # def down(self) -> List[str]:
227
+ # """Return list of SQL statements to rollback migration."""
228
+ # raise NotImplementedError
229
+
230
+
231
+ # def main():
232
+ # parser = argparse.ArgumentParser(description="Database migration tool")
233
+
234
+ # subparsers = parser.add_subparsers(dest="command", help="Commands")
235
+
236
+ # # makemigrations command
237
+ # make_parser = subparsers.add_parser("makemigrations", help="Generate new migration")
238
+ # make_parser.add_argument("name", help="Migration name")
239
+
240
+ # # migrate command
241
+ # migrate_parser = subparsers.add_parser("migrate", help="Apply migrations")
242
+ # migrate_parser.add_argument("--target", help="Target migration (default: latest)")
243
+
244
+ # # rollback command
245
+ # rollback_parser = subparsers.add_parser("rollback", help="Rollback migrations")
246
+ # rollback_parser.add_argument("--target", help="Target migration (default: last applied)")
247
+
248
+ # args = parser.parse_args()
249
+
250
+ # manager = MigrationManager()
251
+
252
+ # if args.command == "makemigrations":
253
+ # manager.generate_migration(args.name)
254
+ # elif args.command == "migrate":
255
+ # manager.apply_migrations(args.target)
256
+ # elif args.command == "rollback":
257
+ # manager.rollback_migrations(args.target)
258
+ # else:
259
+ # parser.print_help()
260
+
261
+
262
+ # if __name__ == "__main__":
263
+ # main()
@@ -5,7 +5,7 @@ from hypern.config import context_store
5
5
  from hypern.exceptions import OutOfScopeApplicationException
6
6
  from hypern.hypern import get_session_database
7
7
 
8
- from .field import Field, ForeignKey
8
+ from .field import Field, ForeignKeyField
9
9
  from .query import QuerySet
10
10
 
11
11
 
@@ -77,7 +77,7 @@ class Model(metaclass=MetaModel):
77
77
  fields_sql.append(cls._get_field_sql(name, field))
78
78
  if field.index:
79
79
  indexes_sql.append(cls._get_index_sql(name))
80
- if isinstance(field, ForeignKey):
80
+ if isinstance(field, ForeignKeyField):
81
81
  foreign_keys.append(cls._get_foreign_key_sql(name, field))
82
82
 
83
83
  fields_sql.extend(foreign_keys)
@@ -109,7 +109,8 @@ class Model(metaclass=MetaModel):
109
109
 
110
110
  @classmethod
111
111
  def _get_foreign_key_sql(cls, name, field) -> str:
112
- return f"FOREIGN KEY ({name}) REFERENCES {field.to_model}({field.related_field}) ON DELETE {field.on_delete} ON UPDATE {field.on_update}"
112
+ target_table = field.to_model.__name__.lower() if not isinstance(field.to_model, str) else field.to_model.lower()
113
+ return f"FOREIGN KEY ({name}) REFERENCES {target_table}({field.related_field}) ON DELETE {field.on_delete} ON UPDATE {field.on_update}"
113
114
 
114
115
  def save(self):
115
116
  query_object = QuerySet(self)
@@ -1,6 +1,6 @@
1
1
  from enum import Enum
2
2
  from typing import Any, Dict, List, Tuple, Union
3
- from hypern.database.sql.field import ForeignKey
3
+ from hypern.database.sql.field import ForeignKeyField
4
4
 
5
5
 
6
6
  class JoinType(Enum):
@@ -553,7 +553,7 @@ class QuerySet:
553
553
  """
554
554
  qs = self.clone()
555
555
  for field in fields:
556
- if field in qs.model._fields and isinstance(qs.model._fields[field], ForeignKey):
556
+ if field in qs.model._fields and isinstance(qs.model._fields[field], ForeignKeyField):
557
557
  qs._selected_related.add(field)
558
558
  return qs
559
559
 
Binary file
hypern/hypern.pyi CHANGED
@@ -7,7 +7,7 @@ from enum import Enum
7
7
  @dataclass
8
8
  class BaseSchemaGenerator:
9
9
  remove_converter: Callable[[str], str]
10
- parse_docstring: Callable[[Callable[..., Any]], str]
10
+ parse_docstring: Callable[..., str]
11
11
 
12
12
  @dataclass
13
13
  class SwaggerUI:
@@ -196,6 +196,7 @@ class Route:
196
196
  path: str
197
197
  function: FunctionInfo
198
198
  method: str
199
+ doc: str | None = None
199
200
 
200
201
  def matches(self, path: str, method: str) -> str: ...
201
202
  def clone_route(self) -> Route: ...
@@ -330,8 +331,5 @@ class DatabaseTransaction:
330
331
  def bulk_change(self, query: str, params: List[List[Any]], batch_size: int) -> int | None: ...
331
332
  def commit(self) -> None: ...
332
333
  def rollback(self) -> None: ...
333
- def __del__(self) -> None: ...
334
- def __enter__(self) -> None: ...
335
- def __exit__(self, _exc_type, _exc_value, _traceback) -> None: ...
336
334
 
337
335
  def get_session_database(context_id: str) -> DatabaseTransaction: ...
hypern/openapi/schemas.py CHANGED
@@ -37,17 +37,15 @@ class SchemaGenerator(BaseSchemaGenerator):
37
37
  def get_schema(self, app) -> dict[str, typing.Any]:
38
38
  schema = dict(self.base_schema)
39
39
  schema.setdefault("paths", {})
40
- endpoints_info = self.get_endpoints(app.router.routes)
41
-
42
- for endpoint in endpoints_info:
43
- parsed = self.parse_docstring(endpoint.func)
40
+ for route in app.router.routes:
41
+ parsed = self.parse_docstring(route.doc)
44
42
 
45
43
  if not parsed:
46
44
  continue
47
45
 
48
- if endpoint.path not in schema["paths"]:
49
- schema["paths"][endpoint.path] = {}
46
+ if route.path not in schema["paths"]:
47
+ schema["paths"][route.path] = {}
50
48
 
51
- schema["paths"][endpoint.path][endpoint.http_method] = orjson.loads(parsed)
49
+ schema["paths"][route.path][route.method.lower()] = orjson.loads(parsed)
52
50
 
53
51
  return schema
hypern/routing/route.py CHANGED
@@ -224,8 +224,8 @@ class Route:
224
224
  raise ValueError(f"No handler found for route: {self.path}")
225
225
 
226
226
  # Handle functional routes
227
- for h in self.functional_handlers:
228
- router.add_route(route=self.make_internal_route(path=h["path"], handler=h["func"], method=h["method"].upper()))
227
+ for route in self.functional_handlers:
228
+ router.add_route(route=route)
229
229
  if not self.endpoint:
230
230
  return router
231
231
 
@@ -234,9 +234,10 @@ class Route:
234
234
  if name.upper() in self.http_methods:
235
235
  sig = inspect.signature(func)
236
236
  doc = self.swagger_generate(sig, func.__doc__)
237
- self.endpoint.dispatch.__doc__ = doc
238
237
  endpoint_obj = self.endpoint()
239
- router.add_route(route=self.make_internal_route(path="/", handler=endpoint_obj.dispatch, method=name.upper()))
238
+ route = self.make_internal_route(path="/", handler=endpoint_obj.dispatch, method=name.upper())
239
+ route.doc = doc
240
+ router.add_route(route=route)
240
241
  del endpoint_obj # free up memory
241
242
  return router
242
243
 
@@ -250,15 +251,10 @@ class Route:
250
251
  return await dispatch(func, request, inject)
251
252
 
252
253
  sig = inspect.signature(func)
253
- functional_wrapper.__doc__ = self.swagger_generate(sig, func.__doc__)
254
+ route = self.make_internal_route(path=path, handler=functional_wrapper, method=method.upper())
255
+ route.doc = self.swagger_generate(sig, func.__doc__)
254
256
 
255
- self.functional_handlers.append(
256
- {
257
- "path": path,
258
- "method": method,
259
- "func": functional_wrapper,
260
- }
261
- )
257
+ self.functional_handlers.append(route)
262
258
 
263
259
  return decorator
264
260
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hypern
3
- Version: 0.3.5
3
+ Version: 0.3.7
4
4
  Classifier: Programming Language :: Rust
5
5
  Classifier: Programming Language :: Python :: Implementation :: CPython
6
6
  Classifier: Programming Language :: Python :: Implementation :: PyPy
@@ -26,6 +26,7 @@ Requires-Dist: watchdog ==6.0.0
26
26
  Requires-Dist: jsonschema ==4.23.0
27
27
  Requires-Dist: psutil ==6.1.0
28
28
  Requires-Dist: msgpack ==1.1.0
29
+ Requires-Dist: redis ==5.2.1
29
30
  License-File: LICENSE
30
31
  Summary: A Fast Async Python backend with a Rust runtime.
31
32
  Author-email: Martin Dang <vannghiem848@gmail.com>
@@ -1,6 +1,6 @@
1
- hypern-0.3.5.dist-info/METADATA,sha256=Lhxf8FMnS1RvE17wHATRz6F1-v0S3IYcXrGmzWkm3Jg,3850
2
- hypern-0.3.5.dist-info/WHEEL,sha256=HVA0wOUnIw3WSu8DET4aOHFt1dH_siSsnV6db_YgIxE,92
3
- hypern-0.3.5.dist-info/licenses/LICENSE,sha256=qbYKAIJLS6jYg5hYncKE7OtWmqOtpVTvKNkwOa0Iwwg,1328
1
+ hypern-0.3.7.dist-info/METADATA,sha256=Sxz9aJX0DPkYH0LaD6PqhN6FWpAd2nVrrm1Iu848bp8,3879
2
+ hypern-0.3.7.dist-info/WHEEL,sha256=HVA0wOUnIw3WSu8DET4aOHFt1dH_siSsnV6db_YgIxE,92
3
+ hypern-0.3.7.dist-info/licenses/LICENSE,sha256=qbYKAIJLS6jYg5hYncKE7OtWmqOtpVTvKNkwOa0Iwwg,1328
4
4
  hypern/application.py,sha256=DCYFtU8e8NhQtmfaXbUfOxR2_Y3fEn-pzce9OOs6S4U,18396
5
5
  hypern/args_parser.py,sha256=zTfLfBoKBvYWxdPjabTfZsCtYF3La3PT0TD8dfLMeM4,2815
6
6
  hypern/auth/authorization.py,sha256=-NprZsI0np889ZN1fp-MiVFrPoMNzUtatBJaCMtkllM,32
@@ -31,10 +31,11 @@ hypern/database/nosql/addons/password.py,sha256=jfZxvWFm6nV9EWpXq5Mj-jpqnl9QbokZ
31
31
  hypern/database/nosql/addons/unicode.py,sha256=LaDpLfdoTcJuASPE-8fqOVD05H_uOx8gOdnyDn5Iu0c,268
32
32
  hypern/database/nosql/addons/__init__.py,sha256=WEtPM8sPHilvga7zxwqvINeTkF0hdcfgPcAnHc4MASE,125
33
33
  hypern/database/nosql/__init__.py,sha256=MH9YvlbRlbBCrQVNOdfTaK-hINwJxbJLmxwY9Mei7I8,644
34
- hypern/database/sql/field.py,sha256=tSs8iaYjy-K6nplJJ-1X4OQddzW76cfBlx9xTrG_NbQ,20073
35
- hypern/database/sql/model.py,sha256=BLRmOlmfn6ibedR9Bv_rHErSruudJ24B9-nDbRHqWm4,3913
36
- hypern/database/sql/query.py,sha256=tQ7Wss2NAIqsAH0M-fT5m9DU_MsiBR0DcoyTbS_aatU,33335
37
- hypern/database/sql/__init__.py,sha256=lCOGNTHaXNSJbuLLIOe2IWWNmX0MFQFPNCl2yytD2Xs,261
34
+ hypern/database/sql/field.py,sha256=gV9u_BvMIoxoDT3_J7sL5XJNa5XFsAO9w324ThwHbNs,9121
35
+ hypern/database/sql/migrate.py,sha256=BTtAs3-iMyMDzIWl6B3rM9sj7XGggLDRjD0h_WgGPtc,9742
36
+ hypern/database/sql/model.py,sha256=C8_rJA1Adw1yPWthjmAGh26hjTBuwwlEdtH45ADxvL0,4044
37
+ hypern/database/sql/query.py,sha256=En19t27zt6iUDQbFgO_wLEWPQCkPeBuH3s37fzlhMVc,33345
38
+ hypern/database/sql/__init__.py,sha256=dbSAz2nP0DPKK4Bb_jJdObSaSYQfgZ8D4U1TJdc4e7c,645
38
39
  hypern/database/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
40
  hypern/datastructures.py,sha256=zZGGSP07kPc9KJDf11hX5uYhAyRE-Ck5wezW5QtOVXw,897
40
41
  hypern/enum.py,sha256=KcVziJj7vWvyie0r2rtxhrLzdtkZAsf0DY58oJ4tQl4,360
@@ -49,7 +50,7 @@ hypern/gateway/gateway.py,sha256=26K2qvJUR-0JnN4IlhwvSSt7EYcpYrBVDuzZ1ivQQ34,147
49
50
  hypern/gateway/proxy.py,sha256=w1wcTplDnVrfjn7hb0M0yBVth5TGl88irF-MUYHysQQ,2463
50
51
  hypern/gateway/service.py,sha256=PkRaM08olqM_j_4wRjEJCR8X8ZysAF2WOcfhWjaX2eo,1701
51
52
  hypern/gateway/__init__.py,sha256=TpFWtqnJerW1-jCWq5fjypJcw9Y6ytyrkvkzby1Eg0E,235
52
- hypern/hypern.pyi,sha256=yXaWGPt598gwPN-CT1ARDdwOSqryZCBFuDLQC8gRd1U,9345
53
+ hypern/hypern.pyi,sha256=f0kHWHI4creyAezdPlr-HOX87xqpCyYpu6cFGpbFCe4,9210
53
54
  hypern/i18n/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
55
  hypern/logging/logger.py,sha256=WACam_IJiCMXX0hGVKMGSxUQpY4DgAXy7M1dD3q-Z9s,3256
55
56
  hypern/logging/__init__.py,sha256=6eVriyncsJ4J73fGYhoejv9MX7aGTkRezTpPxO4DX1I,52
@@ -61,7 +62,7 @@ hypern/middleware/i18n.py,sha256=jHzVzjTx1nnjbraZtIVOprrnSaeKMxZB8RuSqRp2I4s,16
61
62
  hypern/middleware/limit.py,sha256=eAYARPjqxq8Ue0TCpnxlVRB5hv7hwBF0PxeD-bG6Sl0,8252
62
63
  hypern/middleware/security.py,sha256=fGBSF7n2iKBtDHE2QW4q_sQE4awYgaYxVUFKsDHkMXg,7675
63
64
  hypern/middleware/__init__.py,sha256=V-Gnv-Jf-14BVuA28z7PN7GBVQ9BBiBdab6-QnTPCfY,493
64
- hypern/openapi/schemas.py,sha256=YHfMlPUeP5DzDX5ao3YH8p_25Vvyaf616dh6XDCUZRc,1677
65
+ hypern/openapi/schemas.py,sha256=hsqSPpwsOETQ5NoGiR9Ay0qEp6GxJ2xhh69rzwxx0CY,1598
65
66
  hypern/openapi/swagger.py,sha256=naqUY3rFAEYA1ZLIlmDsMYaol0yIm6TVebdkFa5cMTc,64
66
67
  hypern/openapi/__init__.py,sha256=4rEVD8pa0kdSpsy7ZkJ5JY0Z2XF0NGSKDMwYAd7YZpE,141
67
68
  hypern/processpool.py,sha256=qEsu9WXWc3_Cl0Frn1jGs7jUJho45zck5L5Ww81Vm70,3883
@@ -73,7 +74,7 @@ hypern/routing/dispatcher.py,sha256=NAVjILlEJjYrixJZ4CO4N1CKkuqbk4TGZOjnQNTTEu4,
73
74
  hypern/routing/endpoint.py,sha256=RKVhvqOEGL9IKBXQ3KJgPi9bgJj9gfWC5BdZc5U_atc,1026
74
75
  hypern/routing/parser.py,sha256=0tJVVNwHC3pWDsehwH6SwJv8_gEuDjltVXrNQWbHyrU,3426
75
76
  hypern/routing/queue.py,sha256=NtFBbogU22ddyyX-CuQMip1XFDPZdMCVMIeUCQ-CR6Y,7176
76
- hypern/routing/route.py,sha256=IUnWU5ra-0R9rrRDpxJiwiw7vaEefn-We2dZ4EocJGw,10403
77
+ hypern/routing/route.py,sha256=kan47-UeL-OPwcpp0rEhmBaaum6hN7FUj13Y8pZDEYA,10256
77
78
  hypern/routing/__init__.py,sha256=U4xW5fDRsn03z4cVLT4dJHHGGU6SVxyv2DL86LXodeE,162
78
79
  hypern/scheduler.py,sha256=-k3tW2AGCnHYSthKXk-FOs_SCtWp3yIxQzwzUJMJsbo,67
79
80
  hypern/security.py,sha256=3E86Yp_eOSVa1emUvBrDgoF0Sn6eNX0CfLnt87w5CPI,1773
@@ -85,5 +86,5 @@ hypern/ws/route.py,sha256=fGQ2RC708MPOiiIHPUo8aZ-oK379TTAyQYm4htNA5jM,803
85
86
  hypern/ws/__init__.py,sha256=dhRoRY683_rfPfSPM5qUczfTuyYDeuLOCFxY4hIdKt8,131
86
87
  hypern/ws.py,sha256=F6SA2Z1KVnqTEX8ssvOXqCtudUS4eo30JsiIsvfbHnE,394
87
88
  hypern/__init__.py,sha256=9Ww_aUQ0vJls0tOq7Yw1_TVOCRsa5bHJ-RtnSeComwk,119
88
- hypern/hypern.cp310-win32.pyd,sha256=KYQvjT6DMn0chAqcgr2MpKehAx686dHvUg05oNL4k20,9780736
89
- hypern-0.3.5.dist-info/RECORD,,
89
+ hypern/hypern.cp310-win32.pyd,sha256=wT6oVemqaafBCSwbuTmh-lBTrPH1Hqk89MCgPDFTogw,9790976
90
+ hypern-0.3.7.dist-info/RECORD,,
File without changes