hypern 0.2.0__cp312-none-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hypern/__init__.py +4 -0
- hypern/application.py +412 -0
- hypern/auth/__init__.py +0 -0
- hypern/auth/authorization.py +2 -0
- hypern/background.py +4 -0
- hypern/caching/__init__.py +0 -0
- hypern/caching/base/__init__.py +8 -0
- hypern/caching/base/backend.py +3 -0
- hypern/caching/base/key_maker.py +8 -0
- hypern/caching/cache_manager.py +56 -0
- hypern/caching/cache_tag.py +10 -0
- hypern/caching/custom_key_maker.py +11 -0
- hypern/caching/redis_backend.py +3 -0
- hypern/cli/__init__.py +0 -0
- hypern/cli/commands.py +0 -0
- hypern/config.py +149 -0
- hypern/datastructures.py +40 -0
- hypern/db/__init__.py +0 -0
- hypern/db/nosql/__init__.py +25 -0
- hypern/db/nosql/addons/__init__.py +4 -0
- hypern/db/nosql/addons/color.py +16 -0
- hypern/db/nosql/addons/daterange.py +30 -0
- hypern/db/nosql/addons/encrypted.py +53 -0
- hypern/db/nosql/addons/password.py +134 -0
- hypern/db/nosql/addons/unicode.py +10 -0
- hypern/db/sql/__init__.py +179 -0
- hypern/db/sql/addons/__init__.py +14 -0
- hypern/db/sql/addons/color.py +16 -0
- hypern/db/sql/addons/daterange.py +23 -0
- hypern/db/sql/addons/datetime.py +22 -0
- hypern/db/sql/addons/encrypted.py +58 -0
- hypern/db/sql/addons/password.py +171 -0
- hypern/db/sql/addons/ts_vector.py +46 -0
- hypern/db/sql/addons/unicode.py +15 -0
- hypern/db/sql/repository.py +290 -0
- hypern/enum.py +13 -0
- hypern/exceptions.py +97 -0
- hypern/hypern.cp312-win_amd64.pyd +0 -0
- hypern/hypern.pyi +266 -0
- hypern/i18n/__init__.py +0 -0
- hypern/logging/__init__.py +3 -0
- hypern/logging/logger.py +82 -0
- hypern/middleware/__init__.py +5 -0
- hypern/middleware/base.py +18 -0
- hypern/middleware/cors.py +38 -0
- hypern/middleware/i18n.py +1 -0
- hypern/middleware/limit.py +176 -0
- hypern/openapi/__init__.py +5 -0
- hypern/openapi/schemas.py +53 -0
- hypern/openapi/swagger.py +3 -0
- hypern/processpool.py +106 -0
- hypern/py.typed +0 -0
- hypern/response/__init__.py +3 -0
- hypern/response/response.py +134 -0
- hypern/routing/__init__.py +4 -0
- hypern/routing/dispatcher.py +67 -0
- hypern/routing/endpoint.py +30 -0
- hypern/routing/parser.py +100 -0
- hypern/routing/route.py +284 -0
- hypern/scheduler.py +5 -0
- hypern/security.py +44 -0
- hypern/worker.py +30 -0
- hypern-0.2.0.dist-info/METADATA +127 -0
- hypern-0.2.0.dist-info/RECORD +66 -0
- hypern-0.2.0.dist-info/WHEEL +4 -0
- hypern-0.2.0.dist-info/licenses/LICENSE +24 -0
hypern/datastructures.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from pydantic import BaseModel, AnyUrl, EmailStr
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class BaseModelWithConfig(BaseModel):
|
|
7
|
+
model_config = {"extra": "allow"}
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class Contact(BaseModelWithConfig):
|
|
11
|
+
name: Optional[str] = None
|
|
12
|
+
url: Optional[AnyUrl] = None
|
|
13
|
+
email: Optional[EmailStr] = None
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class License(BaseModelWithConfig):
|
|
17
|
+
name: str
|
|
18
|
+
identifier: Optional[str] = None
|
|
19
|
+
url: Optional[AnyUrl] = None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class Info(BaseModelWithConfig):
|
|
23
|
+
title: str
|
|
24
|
+
summary: Optional[str] = None
|
|
25
|
+
description: Optional[str] = None
|
|
26
|
+
contact: Optional[Contact] = None
|
|
27
|
+
license: Optional[License] = None
|
|
28
|
+
version: str
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class HTTPMethod(Enum):
|
|
32
|
+
GET = "GET"
|
|
33
|
+
POST = "POST"
|
|
34
|
+
PUT = "PUT"
|
|
35
|
+
DELETE = "DELETE"
|
|
36
|
+
PATCH = "PATCH"
|
|
37
|
+
OPTIONS = "OPTIONS"
|
|
38
|
+
HEAD = "HEAD"
|
|
39
|
+
TRACE = "TRACE"
|
|
40
|
+
CONNECT = "CONNECT"
|
hypern/db/__init__.py
ADDED
|
File without changes
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
from typing import List, TypedDict
|
|
3
|
+
|
|
4
|
+
from uuid import uuid4
|
|
5
|
+
from mongoengine import connect
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class TypedDictModel(TypedDict):
|
|
9
|
+
host: str
|
|
10
|
+
alias: str
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class NoSqlConfig:
|
|
14
|
+
def __init__(self, dbs_config: List[TypedDictModel]):
|
|
15
|
+
self.dbs_config = dbs_config
|
|
16
|
+
|
|
17
|
+
def _connect_db(self, db_config: TypedDictModel):
|
|
18
|
+
_alias = db_config.get("alias", str(uuid4()))
|
|
19
|
+
connect(host=db_config["host"], alias=_alias)
|
|
20
|
+
|
|
21
|
+
def init_app(self, app):
|
|
22
|
+
self.app = app # noqa
|
|
23
|
+
# connect
|
|
24
|
+
for db_config in self.dbs_config:
|
|
25
|
+
self._connect_db(db_config)
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from mongoengine import BaseField
|
|
2
|
+
import re
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class ColorField(BaseField):
|
|
6
|
+
def validate(self, value):
|
|
7
|
+
color_regex = r"^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$"
|
|
8
|
+
if not re.match(color_regex, value):
|
|
9
|
+
self.error("Invalid color format. Use hexadecimal color codes (e.g., #FF0000)")
|
|
10
|
+
return True
|
|
11
|
+
|
|
12
|
+
def to_mongo(self, value):
|
|
13
|
+
return value
|
|
14
|
+
|
|
15
|
+
def to_python(self, value):
|
|
16
|
+
return value
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from mongoengine import BaseField
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class DateRangeField(BaseField):
|
|
6
|
+
def __init__(self, **kwargs):
|
|
7
|
+
super(DateRangeField, self).__init__(**kwargs)
|
|
8
|
+
|
|
9
|
+
def validate(self, value):
|
|
10
|
+
if not isinstance(value, dict) or "start" not in value or "end" not in value:
|
|
11
|
+
self.error('DateRangeField must be a dictionary with "start" and "end" keys')
|
|
12
|
+
# Use get to safely access keys
|
|
13
|
+
start = value.get("start")
|
|
14
|
+
end = value.get("end")
|
|
15
|
+
# Check if both "start" and "end" are present
|
|
16
|
+
if start is None or end is None:
|
|
17
|
+
self.error('DateRangeField must contain both "start" and "end" keys')
|
|
18
|
+
|
|
19
|
+
# Check if "start" and "end" are datetime objects
|
|
20
|
+
if not isinstance(value["start"], datetime) or not isinstance(value["end"], datetime):
|
|
21
|
+
self.error('DateRangeField "start" and "end" must be datetime objects')
|
|
22
|
+
if value["start"] > value["end"]:
|
|
23
|
+
self.error('DateRangeField "start" must be earlier than "end"')
|
|
24
|
+
return True
|
|
25
|
+
|
|
26
|
+
def to_mongo(self, value):
|
|
27
|
+
return value
|
|
28
|
+
|
|
29
|
+
def to_python(self, value):
|
|
30
|
+
return value
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from typing import Any, Optional
|
|
3
|
+
from mongoengine.base import BaseField
|
|
4
|
+
|
|
5
|
+
from cryptography.hazmat.primitives import padding
|
|
6
|
+
|
|
7
|
+
from hypern.security import EDEngine, AESEngine
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class EncryptedField(BaseField):
|
|
11
|
+
"""
|
|
12
|
+
A custom MongoEngine field that encrypts data using AES-256-CBC.
|
|
13
|
+
|
|
14
|
+
The field automatically handles encryption when saving to MongoDB and
|
|
15
|
+
decryption when retrieving data.
|
|
16
|
+
|
|
17
|
+
Attributes:
|
|
18
|
+
engine: Encryption engine to use. If not provided, will use AES-256-CBC
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def __init__(self, engine: Optional[EDEngine] = None, **kwargs):
|
|
22
|
+
if not engine:
|
|
23
|
+
key = os.urandom(32)
|
|
24
|
+
iv = os.urandom(16)
|
|
25
|
+
padding_class = padding.PKCS7
|
|
26
|
+
self.engine = AESEngine(secret_key=key, iv=iv, padding_class=padding_class)
|
|
27
|
+
else:
|
|
28
|
+
self.engine = engine # type: ignore
|
|
29
|
+
super(EncryptedField, self).__init__(**kwargs)
|
|
30
|
+
|
|
31
|
+
def to_mongo(self, value: Any) -> Optional[str]:
|
|
32
|
+
"""Convert a Python object to a MongoDB-compatible format."""
|
|
33
|
+
if value is None:
|
|
34
|
+
return None
|
|
35
|
+
return self.engine.encrypt(value)
|
|
36
|
+
|
|
37
|
+
def to_python(self, value: Optional[str]) -> Optional[str]:
|
|
38
|
+
"""Convert a MongoDB-compatible format to a Python object."""
|
|
39
|
+
if value is None:
|
|
40
|
+
return None
|
|
41
|
+
if isinstance(value, bytes):
|
|
42
|
+
return self.engine.decrypt(value)
|
|
43
|
+
return value
|
|
44
|
+
|
|
45
|
+
def prepare_query_value(self, op, value: Any) -> Optional[str]:
|
|
46
|
+
"""Prepare a value used in a query."""
|
|
47
|
+
if value is None:
|
|
48
|
+
return None
|
|
49
|
+
|
|
50
|
+
if op in ("set", "upsert"):
|
|
51
|
+
return self.to_mongo(value)
|
|
52
|
+
|
|
53
|
+
return value
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
from mongoengine.base import BaseField
|
|
2
|
+
import weakref
|
|
3
|
+
from passlib.context import CryptContext
|
|
4
|
+
import re
|
|
5
|
+
from typing import Optional, Any
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class PasswordField(BaseField):
|
|
9
|
+
"""
|
|
10
|
+
A custom password field using passlib for hashing and weakref for reference management.
|
|
11
|
+
Supports multiple hashing schemes and automatic upgrade of hash algorithms.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
# Class-level password context - shared across all instances
|
|
15
|
+
pwd_context = CryptContext(
|
|
16
|
+
# List of hashing schemes in order of preference
|
|
17
|
+
schemes=["argon2", "pbkdf2_sha256", "bcrypt_sha256"],
|
|
18
|
+
# Mark argon2 as default
|
|
19
|
+
default="argon2",
|
|
20
|
+
# Argon2 parameters
|
|
21
|
+
argon2__rounds=4,
|
|
22
|
+
argon2__memory_cost=65536,
|
|
23
|
+
argon2__parallelism=2,
|
|
24
|
+
# PBKDF2 parameters
|
|
25
|
+
pbkdf2_sha256__rounds=29000,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
def __init__(
|
|
29
|
+
self,
|
|
30
|
+
min_length: int = 8,
|
|
31
|
+
require_number: bool = False,
|
|
32
|
+
require_special: bool = False,
|
|
33
|
+
require_uppercase: bool = False,
|
|
34
|
+
require_lowercase: bool = False,
|
|
35
|
+
**kwargs,
|
|
36
|
+
):
|
|
37
|
+
"""
|
|
38
|
+
Initialize the password field with validation rules.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
min_length: Minimum password length
|
|
42
|
+
require_number: Require at least one number
|
|
43
|
+
require_special: Require at least one special character
|
|
44
|
+
require_uppercase: Require at least one uppercase letter
|
|
45
|
+
require_lowercase: Require at least one lowercase letter
|
|
46
|
+
"""
|
|
47
|
+
self.min_length = min_length
|
|
48
|
+
self.require_number = require_number
|
|
49
|
+
self.require_special = require_special
|
|
50
|
+
self.require_uppercase = require_uppercase
|
|
51
|
+
self.require_lowercase = require_lowercase
|
|
52
|
+
|
|
53
|
+
# Use weakref to store references to parent documents
|
|
54
|
+
self.instances = weakref.WeakKeyDictionary()
|
|
55
|
+
|
|
56
|
+
kwargs["required"] = True
|
|
57
|
+
super(PasswordField, self).__init__(**kwargs)
|
|
58
|
+
|
|
59
|
+
def validate_password(self, password: str) -> tuple[bool, str]:
|
|
60
|
+
"""Validate password strength."""
|
|
61
|
+
|
|
62
|
+
if len(password) < self.min_length:
|
|
63
|
+
return False, f"Password must be at least {self.min_length} characters long"
|
|
64
|
+
|
|
65
|
+
if self.require_number and not re.search(r"\d", password):
|
|
66
|
+
return False, "Password must contain at least one number"
|
|
67
|
+
|
|
68
|
+
if self.require_special and not re.search(r"[!@#$%^&*(),.?\":{}|<>]", password):
|
|
69
|
+
return False, "Password must contain at least one special character"
|
|
70
|
+
|
|
71
|
+
if self.require_uppercase and not re.search(r"[A-Z]", password):
|
|
72
|
+
return False, "Password must contain at least one uppercase letter"
|
|
73
|
+
|
|
74
|
+
if self.require_lowercase and not re.search(r"[a-z]", password):
|
|
75
|
+
return False, "Password must contain at least one lowercase letter"
|
|
76
|
+
|
|
77
|
+
return True, ""
|
|
78
|
+
|
|
79
|
+
def hash_password(self, password: str) -> str:
|
|
80
|
+
"""Hash password using the configured passlib context."""
|
|
81
|
+
return self.pwd_context.hash(password)
|
|
82
|
+
|
|
83
|
+
def verify_password(self, password: str, hash: str) -> tuple[bool, Optional[str]]:
|
|
84
|
+
"""
|
|
85
|
+
Verify password and return tuple of (is_valid, new_hash).
|
|
86
|
+
new_hash is provided if the hash needs to be upgraded.
|
|
87
|
+
"""
|
|
88
|
+
try:
|
|
89
|
+
is_valid = self.pwd_context.verify(password, hash)
|
|
90
|
+
# Check if the hash needs to be upgraded
|
|
91
|
+
if is_valid and self.pwd_context.needs_update(hash):
|
|
92
|
+
return True, self.hash_password(password)
|
|
93
|
+
return is_valid, None
|
|
94
|
+
except Exception:
|
|
95
|
+
return False, None
|
|
96
|
+
|
|
97
|
+
def __get__(self, instance, owner):
|
|
98
|
+
"""Custom getter using weakref."""
|
|
99
|
+
if instance is None:
|
|
100
|
+
return self
|
|
101
|
+
return self.instances.get(instance)
|
|
102
|
+
|
|
103
|
+
def __set__(self, instance, value):
|
|
104
|
+
"""Custom setter using weakref."""
|
|
105
|
+
if value and isinstance(value, str):
|
|
106
|
+
# Validate and hash new password
|
|
107
|
+
is_valid, error = self.validate_password(value)
|
|
108
|
+
if not is_valid:
|
|
109
|
+
raise ValueError(error)
|
|
110
|
+
hashed = self.hash_password(value)
|
|
111
|
+
self.instances[instance] = hashed
|
|
112
|
+
instance._data[self.name] = hashed
|
|
113
|
+
else:
|
|
114
|
+
# If it's already hashed or None
|
|
115
|
+
self.instances[instance] = value
|
|
116
|
+
instance._data[self.name] = value
|
|
117
|
+
|
|
118
|
+
def to_mongo(self, value: str) -> Optional[str]:
|
|
119
|
+
"""Convert to MongoDB-compatible value."""
|
|
120
|
+
if value is None:
|
|
121
|
+
return None
|
|
122
|
+
return self.hash_password(value)
|
|
123
|
+
|
|
124
|
+
def to_python(self, value: str) -> str:
|
|
125
|
+
"""Convert from MongoDB to Python."""
|
|
126
|
+
return value
|
|
127
|
+
|
|
128
|
+
def prepare_query_value(self, op, value: Any) -> Optional[str]:
|
|
129
|
+
"""Prepare value for database operations."""
|
|
130
|
+
if value is None:
|
|
131
|
+
return None
|
|
132
|
+
if op == "exact":
|
|
133
|
+
return self.hash_password(value)
|
|
134
|
+
return value
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
import asyncio
|
|
3
|
+
import threading
|
|
4
|
+
import traceback
|
|
5
|
+
from contextlib import asynccontextmanager
|
|
6
|
+
from contextvars import ContextVar, Token
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from typing import Dict, Optional, Union
|
|
9
|
+
from uuid import uuid4
|
|
10
|
+
|
|
11
|
+
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_scoped_session
|
|
12
|
+
from sqlalchemy.orm import Session, sessionmaker
|
|
13
|
+
from sqlalchemy.sql.expression import Delete, Insert, Update
|
|
14
|
+
|
|
15
|
+
from hypern.hypern import Request, Response
|
|
16
|
+
|
|
17
|
+
from .repository import Model, PostgresRepository
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ContextStore:
|
|
21
|
+
def __init__(self, cleanup_interval: int = 300, max_age: int = 3600):
|
|
22
|
+
"""
|
|
23
|
+
Initialize ContextStore with automatic session cleanup.
|
|
24
|
+
|
|
25
|
+
:param cleanup_interval: Interval between cleanup checks (in seconds)
|
|
26
|
+
:param max_age: Maximum age of a session before it's considered expired (in seconds)
|
|
27
|
+
"""
|
|
28
|
+
self._session_times: Dict[str, datetime] = {}
|
|
29
|
+
self.session_var = ContextVar("session_id", default=None)
|
|
30
|
+
|
|
31
|
+
self._max_age = max_age
|
|
32
|
+
self._cleanup_interval = cleanup_interval
|
|
33
|
+
self._cleanup_thread: Optional[threading.Thread] = None
|
|
34
|
+
self._stop_event = threading.Event()
|
|
35
|
+
|
|
36
|
+
# Start the cleanup thread
|
|
37
|
+
self._start_cleanup_thread()
|
|
38
|
+
|
|
39
|
+
def _start_cleanup_thread(self):
|
|
40
|
+
"""Start a background thread for periodic session cleanup."""
|
|
41
|
+
|
|
42
|
+
def cleanup_worker():
|
|
43
|
+
while not self._stop_event.is_set():
|
|
44
|
+
self._perform_cleanup()
|
|
45
|
+
self._stop_event.wait(self._cleanup_interval)
|
|
46
|
+
|
|
47
|
+
self._cleanup_thread = threading.Thread(
|
|
48
|
+
target=cleanup_worker,
|
|
49
|
+
daemon=True, # Allows the thread to be automatically terminated when the main program exits
|
|
50
|
+
)
|
|
51
|
+
self._cleanup_thread.start()
|
|
52
|
+
|
|
53
|
+
def _perform_cleanup(self):
|
|
54
|
+
"""Perform cleanup of expired sessions."""
|
|
55
|
+
current_time = datetime.now()
|
|
56
|
+
expired_sessions = [
|
|
57
|
+
session_id for session_id, timestamp in list(self._session_times.items()) if (current_time - timestamp).total_seconds() > self._max_age
|
|
58
|
+
]
|
|
59
|
+
|
|
60
|
+
for session_id in expired_sessions:
|
|
61
|
+
self.remove_session(session_id)
|
|
62
|
+
|
|
63
|
+
def remove_session(self, session_id: str):
|
|
64
|
+
"""Remove a specific session."""
|
|
65
|
+
self._session_times.pop(session_id, None)
|
|
66
|
+
|
|
67
|
+
def set_context(self, session_id: str):
|
|
68
|
+
"""
|
|
69
|
+
Context manager for setting and resetting session context.
|
|
70
|
+
|
|
71
|
+
:param session_id: Unique identifier for the session
|
|
72
|
+
:return: Context manager for session
|
|
73
|
+
"""
|
|
74
|
+
self.session_var.set(session_id)
|
|
75
|
+
self._session_times[session_id] = datetime.now()
|
|
76
|
+
|
|
77
|
+
def get_context(self) -> str:
|
|
78
|
+
"""
|
|
79
|
+
Get the current session context.
|
|
80
|
+
|
|
81
|
+
:return: Current session ID
|
|
82
|
+
:raises RuntimeError: If no session context is available
|
|
83
|
+
"""
|
|
84
|
+
return self.session_var.get()
|
|
85
|
+
|
|
86
|
+
def reset_context(self):
|
|
87
|
+
"""Reset the session context."""
|
|
88
|
+
token = self.get_context()
|
|
89
|
+
if token is not None:
|
|
90
|
+
self.session_var.reset(token)
|
|
91
|
+
|
|
92
|
+
def stop_cleanup(self):
|
|
93
|
+
"""
|
|
94
|
+
Stop the cleanup thread.
|
|
95
|
+
Useful for graceful shutdown of the application.
|
|
96
|
+
"""
|
|
97
|
+
self._stop_event.set()
|
|
98
|
+
if self._cleanup_thread:
|
|
99
|
+
self._cleanup_thread.join()
|
|
100
|
+
|
|
101
|
+
def __del__(self):
|
|
102
|
+
"""
|
|
103
|
+
Ensure cleanup thread is stopped when the object is deleted.
|
|
104
|
+
"""
|
|
105
|
+
self.stop_cleanup()
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class SqlConfig:
|
|
109
|
+
def __init__(self, default_engine: AsyncEngine | None = None, reader_engine: AsyncEngine | None = None, writer_engine: AsyncEngine | None = None):
|
|
110
|
+
"""
|
|
111
|
+
Initialize the SQL configuration.
|
|
112
|
+
You can provide a default engine, a reader engine, and a writer engine.
|
|
113
|
+
If only one engine is provided (default_engine), it will be used for both reading and writing.
|
|
114
|
+
If both reader and writer engines are provided, they will be used for reading and writing respectively.
|
|
115
|
+
Note: The reader and writer engines must be different.
|
|
116
|
+
"""
|
|
117
|
+
|
|
118
|
+
assert default_engine or reader_engine or writer_engine, "At least one engine must be provided."
|
|
119
|
+
assert not (reader_engine and writer_engine and id(reader_engine) == id(writer_engine)), "Reader and writer engines must be different."
|
|
120
|
+
|
|
121
|
+
engines = {
|
|
122
|
+
"writer": writer_engine or default_engine,
|
|
123
|
+
"reader": reader_engine or default_engine,
|
|
124
|
+
}
|
|
125
|
+
self.session_store = ContextStore()
|
|
126
|
+
|
|
127
|
+
class RoutingSession(Session):
|
|
128
|
+
def get_bind(this, mapper=None, clause=None, **kwargs):
|
|
129
|
+
if this._flushing or isinstance(clause, (Update, Delete, Insert)):
|
|
130
|
+
return engines["writer"].sync_engine
|
|
131
|
+
return engines["reader"].sync_engine
|
|
132
|
+
|
|
133
|
+
async_session_factory = sessionmaker(
|
|
134
|
+
class_=AsyncSession,
|
|
135
|
+
sync_session_class=RoutingSession,
|
|
136
|
+
expire_on_commit=False,
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
session_scope: Union[AsyncSession, async_scoped_session] = async_scoped_session(
|
|
140
|
+
session_factory=async_session_factory,
|
|
141
|
+
scopefunc=asyncio.current_task,
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
@asynccontextmanager
|
|
145
|
+
async def get_session():
|
|
146
|
+
"""
|
|
147
|
+
Get the database session.
|
|
148
|
+
This can be used for dependency injection.
|
|
149
|
+
|
|
150
|
+
:return: The database session.
|
|
151
|
+
"""
|
|
152
|
+
try:
|
|
153
|
+
yield session_scope
|
|
154
|
+
except Exception:
|
|
155
|
+
traceback.print_exc()
|
|
156
|
+
await session_scope.rollback()
|
|
157
|
+
finally:
|
|
158
|
+
await session_scope.remove()
|
|
159
|
+
await session_scope.close()
|
|
160
|
+
|
|
161
|
+
self.get_session = get_session
|
|
162
|
+
self._context_token: Optional[Token] = None
|
|
163
|
+
|
|
164
|
+
def before_request(self, request: Request):
|
|
165
|
+
token = str(uuid4())
|
|
166
|
+
self.session_store.set_context(token)
|
|
167
|
+
return request
|
|
168
|
+
|
|
169
|
+
def after_request(self, response: Response):
|
|
170
|
+
self.session_store.reset_context()
|
|
171
|
+
return response
|
|
172
|
+
|
|
173
|
+
def init_app(self, app):
|
|
174
|
+
app.inject("get_session", self.get_session)
|
|
175
|
+
app.before_request()(self.before_request)
|
|
176
|
+
app.after_request()(self.after_request)
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
__all__ = ["Model", "PostgresRepository", "SqlConfig"]
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
from .ts_vector import TSVector
|
|
3
|
+
from .datetime import DatetimeType
|
|
4
|
+
from .password import PasswordType
|
|
5
|
+
from .encrypted import StringEncryptType, LargeBinaryEncryptType, AESEngine
|
|
6
|
+
|
|
7
|
+
__all__ = [
|
|
8
|
+
"TSVector",
|
|
9
|
+
"DatetimeType",
|
|
10
|
+
"PasswordType",
|
|
11
|
+
"StringEncryptType",
|
|
12
|
+
"LargeBinaryEncryptType",
|
|
13
|
+
"AESEngine",
|
|
14
|
+
]
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import re
|
|
2
|
+
|
|
3
|
+
from sqlalchemy.types import String, TypeDecorator
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class ColorField(TypeDecorator):
|
|
7
|
+
impl = String
|
|
8
|
+
|
|
9
|
+
def process_bind_param(self, value, dialect):
|
|
10
|
+
color_regex = r"^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$"
|
|
11
|
+
if not re.match(color_regex, value):
|
|
12
|
+
raise ValueError("Invalid color format. Use hexadecimal color codes (e.g., #FF0000)")
|
|
13
|
+
return value
|
|
14
|
+
|
|
15
|
+
def process_result_value(self, value, dialect):
|
|
16
|
+
return value
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
|
|
3
|
+
from sqlalchemy.dialects.postgresql import DATERANGE
|
|
4
|
+
from sqlalchemy.types import TypeDecorator
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class DateRangeField(TypeDecorator):
|
|
8
|
+
impl = DATERANGE
|
|
9
|
+
|
|
10
|
+
def process_bind_param(self, value, dialect):
|
|
11
|
+
if value is None:
|
|
12
|
+
return None
|
|
13
|
+
elif "start" in value and "end" in value:
|
|
14
|
+
return f"['{value['start']}', '{value['end']}']"
|
|
15
|
+
else:
|
|
16
|
+
raise ValueError('DateRangeField must be a dictionary with "start" and "end" keys')
|
|
17
|
+
|
|
18
|
+
def process_result_value(self, value, dialect):
|
|
19
|
+
if value is None:
|
|
20
|
+
return None
|
|
21
|
+
else:
|
|
22
|
+
start, end = value[1:-1].split(",")
|
|
23
|
+
return {"start": datetime.strptime(start.strip("'"), "%Y-%m-%d %H:%M:%S.%f"), "end": datetime.strptime(end.strip("'"), "%Y-%m-%d %H:%M:%S.%f")}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
from sqlalchemy import types
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class DatetimeType(types.TypeDecorator):
|
|
6
|
+
impl = types.DateTime
|
|
7
|
+
cache_ok = True
|
|
8
|
+
|
|
9
|
+
def load_dialect_impl(self, dialect):
|
|
10
|
+
if dialect.name == "sqlite":
|
|
11
|
+
return dialect.type_descriptor(types.TEXT)
|
|
12
|
+
return dialect.type_descriptor(self.impl)
|
|
13
|
+
|
|
14
|
+
def process_bind_param(self, value, dialect):
|
|
15
|
+
if dialect.name == "sqlite":
|
|
16
|
+
return value.isoformat()
|
|
17
|
+
return value
|
|
18
|
+
|
|
19
|
+
def process_result_value(self, value, dialect):
|
|
20
|
+
if dialect.name != "sqlite":
|
|
21
|
+
return value.timestamp()
|
|
22
|
+
return value
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
import os
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
from cryptography.hazmat.primitives import padding
|
|
6
|
+
from sqlalchemy.types import LargeBinary, String, TypeDecorator
|
|
7
|
+
|
|
8
|
+
from hypern.security import AESEngine, EDEngine
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class StringEncryptType(TypeDecorator):
|
|
12
|
+
impl = String
|
|
13
|
+
cache_ok = True
|
|
14
|
+
|
|
15
|
+
def __init__(self, engine: typing.Optional[EDEngine] = None, *args, **kwargs) -> None:
|
|
16
|
+
super().__init__(*args, **kwargs)
|
|
17
|
+
|
|
18
|
+
if not engine:
|
|
19
|
+
key = os.urandom(32)
|
|
20
|
+
iv = os.urandom(16)
|
|
21
|
+
padding_class = padding.PKCS7
|
|
22
|
+
self.engine = AESEngine(secret_key=key, iv=iv, padding_class=padding_class)
|
|
23
|
+
else:
|
|
24
|
+
self.engine = engine # type: ignore
|
|
25
|
+
|
|
26
|
+
def process_bind_param(self, value, dialect):
|
|
27
|
+
if value is None:
|
|
28
|
+
return value
|
|
29
|
+
if not isinstance(value, str):
|
|
30
|
+
raise ValueError("Value String Encrypt Type must be a string")
|
|
31
|
+
return self.engine.encrypt(value).decode(encoding="utf-8")
|
|
32
|
+
|
|
33
|
+
def process_result_value(self, value, dialect):
|
|
34
|
+
if value is None:
|
|
35
|
+
return value
|
|
36
|
+
return self.engine.decrypt(value)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class LargeBinaryEncryptType(StringEncryptType):
|
|
40
|
+
impl = LargeBinary
|
|
41
|
+
cache_ok = True
|
|
42
|
+
|
|
43
|
+
def __init__(self, engine: typing.Optional[EDEngine] = None, *args, **kwargs) -> None:
|
|
44
|
+
super().__init__(engine=engine, *args, **kwargs) # type: ignore
|
|
45
|
+
|
|
46
|
+
def process_bind_param(self, value, dialect):
|
|
47
|
+
if value is None:
|
|
48
|
+
return value
|
|
49
|
+
value = super().process_bind_param(value, dialect)
|
|
50
|
+
if isinstance(value, str):
|
|
51
|
+
return value.encode("utf-8")
|
|
52
|
+
return value
|
|
53
|
+
|
|
54
|
+
def process_result_value(self, value, dialect):
|
|
55
|
+
if isinstance(value, bytes):
|
|
56
|
+
value = value.decode("utf-8")
|
|
57
|
+
return super().process_result_value(value, dialect)
|
|
58
|
+
return value
|