hypern 0.3.11__cp312-cp312-musllinux_1_2_armv7l.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. hypern/__init__.py +24 -0
  2. hypern/application.py +495 -0
  3. hypern/args_parser.py +73 -0
  4. hypern/auth/__init__.py +0 -0
  5. hypern/auth/authorization.py +2 -0
  6. hypern/background.py +4 -0
  7. hypern/caching/__init__.py +6 -0
  8. hypern/caching/backend.py +31 -0
  9. hypern/caching/redis_backend.py +201 -0
  10. hypern/caching/strategies.py +208 -0
  11. hypern/cli/__init__.py +0 -0
  12. hypern/cli/commands.py +0 -0
  13. hypern/config.py +246 -0
  14. hypern/database/__init__.py +0 -0
  15. hypern/database/sqlalchemy/__init__.py +4 -0
  16. hypern/database/sqlalchemy/config.py +66 -0
  17. hypern/database/sqlalchemy/repository.py +290 -0
  18. hypern/database/sqlx/__init__.py +36 -0
  19. hypern/database/sqlx/field.py +246 -0
  20. hypern/database/sqlx/migrate.py +263 -0
  21. hypern/database/sqlx/model.py +117 -0
  22. hypern/database/sqlx/query.py +904 -0
  23. hypern/datastructures.py +40 -0
  24. hypern/enum.py +13 -0
  25. hypern/exceptions/__init__.py +34 -0
  26. hypern/exceptions/base.py +62 -0
  27. hypern/exceptions/common.py +12 -0
  28. hypern/exceptions/errors.py +15 -0
  29. hypern/exceptions/formatters.py +56 -0
  30. hypern/exceptions/http.py +76 -0
  31. hypern/gateway/__init__.py +6 -0
  32. hypern/gateway/aggregator.py +32 -0
  33. hypern/gateway/gateway.py +41 -0
  34. hypern/gateway/proxy.py +60 -0
  35. hypern/gateway/service.py +52 -0
  36. hypern/hypern.cpython-312-arm-linux-musleabihf.so +0 -0
  37. hypern/hypern.pyi +333 -0
  38. hypern/i18n/__init__.py +0 -0
  39. hypern/logging/__init__.py +3 -0
  40. hypern/logging/logger.py +82 -0
  41. hypern/middleware/__init__.py +17 -0
  42. hypern/middleware/base.py +13 -0
  43. hypern/middleware/cache.py +177 -0
  44. hypern/middleware/compress.py +78 -0
  45. hypern/middleware/cors.py +41 -0
  46. hypern/middleware/i18n.py +1 -0
  47. hypern/middleware/limit.py +177 -0
  48. hypern/middleware/security.py +184 -0
  49. hypern/openapi/__init__.py +5 -0
  50. hypern/openapi/schemas.py +51 -0
  51. hypern/openapi/swagger.py +3 -0
  52. hypern/processpool.py +139 -0
  53. hypern/py.typed +0 -0
  54. hypern/reload.py +46 -0
  55. hypern/response/__init__.py +3 -0
  56. hypern/response/response.py +142 -0
  57. hypern/routing/__init__.py +5 -0
  58. hypern/routing/dispatcher.py +70 -0
  59. hypern/routing/endpoint.py +30 -0
  60. hypern/routing/parser.py +98 -0
  61. hypern/routing/queue.py +175 -0
  62. hypern/routing/route.py +280 -0
  63. hypern/scheduler.py +5 -0
  64. hypern/worker.py +274 -0
  65. hypern/ws/__init__.py +4 -0
  66. hypern/ws/channel.py +80 -0
  67. hypern/ws/heartbeat.py +74 -0
  68. hypern/ws/room.py +76 -0
  69. hypern/ws/route.py +26 -0
  70. hypern-0.3.11.dist-info/METADATA +134 -0
  71. hypern-0.3.11.dist-info/RECORD +74 -0
  72. hypern-0.3.11.dist-info/WHEEL +4 -0
  73. hypern-0.3.11.dist-info/licenses/LICENSE +24 -0
  74. hypern.libs/libgcc_s-5b5488a6.so.1 +0 -0
@@ -0,0 +1,78 @@
1
+ import gzip
2
+ import zlib
3
+ from typing import List, Optional
4
+
5
+ from hypern.hypern import Request, Response
6
+
7
+ from .base import Middleware, MiddlewareConfig
8
+
9
+
10
+ class CompressionMiddleware(Middleware):
11
+ """
12
+ Middleware for compressing response content using gzip or deflate encoding.
13
+ """
14
+
15
+ def __init__(
16
+ self, config: Optional[MiddlewareConfig] = None, min_size: int = 500, compression_level: int = 6, include_types: Optional[List[str]] = None
17
+ ) -> None:
18
+ """
19
+ Initialize compression middleware.
20
+
21
+ Args:
22
+ min_size: Minimum response size in bytes to trigger compression
23
+ compression_level: Compression level (1-9, higher = better compression but slower)
24
+ include_types: List of content types to compress (defaults to common text types)
25
+ """
26
+ super().__init__(config)
27
+ self.min_size = min_size
28
+ self.compression_level = compression_level
29
+ self.include_types = include_types or [
30
+ "text/plain",
31
+ "text/html",
32
+ "text/css",
33
+ "text/javascript",
34
+ "application/javascript",
35
+ "application/json",
36
+ "application/xml",
37
+ "application/x-yaml",
38
+ ]
39
+
40
+ def before_request(self, request: Request) -> Request:
41
+ return request
42
+
43
+ def after_request(self, response: Response) -> Response:
44
+ # Check if response should be compressed
45
+ content_type = (response.headers.get("content-type") or "").split(";")[0].lower()
46
+ content_encoding = (response.headers.get("content-encoding") or "").lower()
47
+
48
+ # Skip if:
49
+ # - Content is already encoded
50
+ # - Content type is not in include list
51
+ # - Content length is below minimum size
52
+ if content_encoding or content_type not in self.include_types or len(response.description.encode()) < self.min_size:
53
+ return response
54
+
55
+ # Get accepted encodings from request
56
+ accept_encoding = (response.headers.get("accept-encoding") or "").lower()
57
+
58
+ if "gzip" in accept_encoding:
59
+ # Use gzip compression
60
+ response.description = gzip.compress(
61
+ response.description if isinstance(response.description, bytes) else str(response.description).encode(), compresslevel=self.compression_level
62
+ )
63
+ response.headers.set("content-encoding", "gzip")
64
+
65
+ elif "deflate" in accept_encoding:
66
+ # Use deflate compression
67
+ response.description = zlib.compress(
68
+ response.description if isinstance(response.description, bytes) else str(response.description).encode(), level=self.compression_level
69
+ )
70
+ response.headers.set("content-encoding", "deflate")
71
+
72
+ # Update content length after compression
73
+ response.headers.set("content-length", str(len(response.description)))
74
+
75
+ # Add Vary header to indicate content varies by Accept-Encoding
76
+ response.headers.set("vary", "Accept-Encoding")
77
+
78
+ return response
@@ -0,0 +1,41 @@
1
+ from typing import List, Optional
2
+ from .base import Middleware
3
+ from hypern.hypern import MiddlewareConfig
4
+
5
+
6
+ class CORSMiddleware(Middleware):
7
+ """
8
+ The `CORSMiddleware` class is used to add CORS headers to the response based on specified origins,
9
+ methods, and headers.
10
+ """
11
+
12
+ def __init__(
13
+ self, config: Optional[MiddlewareConfig] = None, allow_origins: List[str] = None, allow_methods: List[str] = None, allow_headers: List[str] = None
14
+ ) -> None:
15
+ super().__init__(config)
16
+ self.allow_origins = allow_origins or []
17
+ self.allow_methods = allow_methods or []
18
+ self.allow_headers = allow_headers or []
19
+
20
+ def before_request(self, request):
21
+ return request
22
+
23
+ def after_request(self, response):
24
+ """
25
+ The `after_request` function adds Access-Control headers to the response based on specified origins,
26
+ methods, and headers.
27
+
28
+ :param response: The `after_request` method is used to add CORS (Cross-Origin Resource Sharing)
29
+ headers to the response object before sending it back to the client. The parameters used in this
30
+ method are:
31
+ :return: The `response` object is being returned from the `after_request` method.
32
+ """
33
+ for origin in self.allow_origins:
34
+ self.app.add_response_header("Access-Control-Allow-Origin", origin)
35
+ self.app.add_response_header(
36
+ "Access-Control-Allow-Methods",
37
+ ", ".join([method.upper() for method in self.allow_methods]),
38
+ )
39
+ self.app.add_response_header("Access-Control-Allow-Headers", ", ".join(self.allow_headers))
40
+ self.app.add_response_header("Access-Control-Allow-Credentials", "true")
41
+ return response
@@ -0,0 +1 @@
1
+ # comming soon
@@ -0,0 +1,177 @@
1
+ from typing import Optional
2
+ import time
3
+ from abc import ABC, abstractmethod
4
+ from threading import Lock
5
+
6
+ from hypern.hypern import Request, Response
7
+
8
+ from .base import Middleware, MiddlewareConfig
9
+
10
+
11
+ class StorageBackend(ABC):
12
+ @abstractmethod
13
+ def increment(self, key, amount=1, expire=None):
14
+ pass
15
+
16
+ @abstractmethod
17
+ def get(self, key):
18
+ pass
19
+
20
+
21
+ class RedisBackend(StorageBackend):
22
+ def __init__(self, redis_client):
23
+ self.redis = redis_client
24
+
25
+ def increment(self, key, amount=1, expire=None):
26
+ """
27
+ The `increment` function increments a value in Redis by a specified amount and optionally sets an
28
+ expiration time for the key.
29
+
30
+ :param key: The `key` parameter in the `increment` method is used to specify the key in the Redis
31
+ database that you want to increment
32
+ :param amount: The `amount` parameter in the `increment` method specifies the value by which the
33
+ key's current value should be incremented. By default, it is set to 1, meaning that if no specific
34
+ amount is provided, the key's value will be incremented by 1, defaults to 1 (optional)
35
+ :param expire: The `expire` parameter in the `increment` method is used to specify the expiration
36
+ time for the key in Redis. If a value is provided for `expire`, the key will expire after the
37
+ specified number of seconds. If `expire` is not provided (i.e., it is `None`
38
+ :return: The `increment` method returns the result of incrementing the value of the key by the
39
+ specified amount. If an expiration time is provided, it also sets the expiration time for the key in
40
+ Redis. The method returns the updated value of the key after the increment operation.
41
+ """
42
+ with self.redis.pipeline() as pipe:
43
+ pipe.incr(key, amount)
44
+ if expire:
45
+ pipe.expire(key, int(expire))
46
+ return pipe.execute()[0]
47
+
48
+ def get(self, key):
49
+ return int(self.redis.get(key) or 0)
50
+
51
+
52
+ class InMemoryBackend(StorageBackend):
53
+ def __init__(self):
54
+ self.storage = {}
55
+
56
+ def increment(self, key, amount=1, expire=None):
57
+ """
58
+ The `increment` function updates the value associated with a key in a storage dictionary by a
59
+ specified amount and optionally sets an expiration time.
60
+
61
+ :param key: The `key` parameter in the `increment` method is used to identify the value that needs
62
+ to be incremented in the storage. It serves as a unique identifier for the value being manipulated
63
+ :param amount: The `amount` parameter in the `increment` method specifies the value by which the
64
+ existing value associated with the given `key` should be incremented. By default, if no `amount` is
65
+ provided, it will increment the value by 1, defaults to 1 (optional)
66
+ :param expire: The `expire` parameter in the `increment` method is used to specify the expiration
67
+ time for the key-value pair being incremented. If a value is provided for the `expire` parameter, it
68
+ sets the expiration time for the key in the storage dictionary to the current time plus the
69
+ specified expiration duration
70
+ :return: The function `increment` returns the updated value of the key in the storage after
71
+ incrementing it by the specified amount.
72
+ """
73
+ if key not in self.storage:
74
+ self.storage[key] = {"value": 0, "expire": None}
75
+ self.storage[key]["value"] += amount
76
+ if expire:
77
+ self.storage[key]["expire"] = time.time() + expire
78
+ return self.storage[key]["value"]
79
+
80
+ def get(self, key):
81
+ """
82
+ This Python function retrieves the value associated with a given key from a storage dictionary,
83
+ checking for expiration before returning the value or 0 if the key is not found.
84
+
85
+ :param key: The `key` parameter is used to specify the key of the item you want to retrieve from the
86
+ storage. The function checks if the key exists in the storage dictionary and returns the
87
+ corresponding value if it does. If the key has an expiration time set and it has expired, the
88
+ function deletes the key
89
+ :return: The `get` method returns the value associated with the given key if the key is present in
90
+ the storage and has not expired. If the key is not found or has expired, it returns 0.
91
+ """
92
+ if key in self.storage:
93
+ if self.storage[key]["expire"] and time.time() > self.storage[key]["expire"]:
94
+ del self.storage[key]
95
+ return 0
96
+ return self.storage[key]["value"]
97
+ return 0
98
+
99
+
100
+ class RateLimitMiddleware(Middleware):
101
+ """
102
+ The RateLimitMiddleware class implements rate limiting functionality to restrict the number of
103
+ Requests per minute for a given IP address.
104
+ """
105
+
106
+ def __init__(self, storage_backend, config: Optional[MiddlewareConfig] = None, requests_per_minute=60, window_size=60):
107
+ super().__init__(config)
108
+ self.storage = storage_backend
109
+ self.requests_per_minute = requests_per_minute
110
+ self.window_size = window_size
111
+
112
+ def get_request_identifier(self, request: Request):
113
+ return request.remote_addr
114
+
115
+ def before_request(self, request: Request):
116
+ """
117
+ The `before_request` function checks the request rate limit and returns a 429 status code if the
118
+ limit is exceeded.
119
+
120
+ :param request: The `request` parameter in the `before_request` method is of type `Request`. It
121
+ is used to represent an incoming HTTP request that the server will process
122
+ :type request: Request
123
+ :return: The code snippet is a method called `before_request` that takes in a `Request` object
124
+ as a parameter.
125
+ """
126
+ identifier = self.get_request_identifier(request)
127
+ current_time = int(time.time())
128
+ window_key = f"{identifier}:{current_time // self.window_size}"
129
+
130
+ request_count = self.storage.increment(window_key, expire=self.window_size)
131
+
132
+ if request_count > self.requests_per_minute:
133
+ return Response(status_code=429, description=b"Too Many Requests", headers={"Retry-After": str(self.window_size)})
134
+
135
+ return request
136
+
137
+ def after_request(self, response):
138
+ return response
139
+
140
+
141
+ class ConcurrentRequestMiddleware(Middleware):
142
+ # The `ConcurrentRequestMiddleware` class limits the number of concurrent requests and returns a 429
143
+ # status code with a Retry-After header if the limit is reached.
144
+ def __init__(self, max_concurrent_requests=100):
145
+ super().__init__()
146
+ self.max_concurrent_requests = max_concurrent_requests
147
+ self.current_requests = 0
148
+ self.lock = Lock()
149
+
150
+ def get_request_identifier(self, request):
151
+ return request.remote_addr
152
+
153
+ def before_request(self, request):
154
+ """
155
+ The `before_request` function limits the number of concurrent requests and returns a 429 status code
156
+ with a Retry-After header if the limit is reached.
157
+
158
+ :param request: The `before_request` method in the code snippet is a method that is called before
159
+ processing each incoming request. It checks if the number of current requests is within the allowed
160
+ limit (`max_concurrent_requests`). If the limit is exceeded, it returns a 429 status code with a
161
+ "Too Many Requests
162
+ :return: the `request` object after checking if the number of current requests is within the allowed
163
+ limit. If the limit is exceeded, it returns a 429 status code response with a "Too Many Requests"
164
+ description and a "Retry-After" header set to 5.
165
+ """
166
+
167
+ with self.lock:
168
+ if self.current_requests >= self.max_concurrent_requests:
169
+ return Response(status_code=429, description="Too Many Requests", headers={"Retry-After": "5"})
170
+ self.current_requests += 1
171
+
172
+ return request
173
+
174
+ def after_request(self, response):
175
+ with self.lock:
176
+ self.current_requests -= 1
177
+ return response
@@ -0,0 +1,184 @@
1
+ import hashlib
2
+ import hmac
3
+ import secrets
4
+ import time
5
+ from base64 import b64decode, b64encode
6
+ from dataclasses import dataclass
7
+ from datetime import datetime, timedelta, timezone
8
+ from typing import Any, Dict, List, Optional
9
+
10
+ import jwt
11
+
12
+ from hypern.exceptions import ForbiddenException, UnauthorizedException
13
+ from hypern.hypern import Request, Response
14
+ from .base import Middleware, MiddlewareConfig
15
+
16
+
17
+ @dataclass
18
+ class CORSConfig:
19
+ allowed_origins: List[str]
20
+ allowed_methods: List[str]
21
+ max_age: int
22
+
23
+
24
+ @dataclass
25
+ class SecurityConfig:
26
+ rate_limiting: bool = False
27
+ jwt_auth: bool = False
28
+ cors_configuration: Optional[CORSConfig] = None
29
+ csrf_protection: bool = False
30
+ security_headers: Optional[Dict[str, str]] = None
31
+ jwt_secret: str = ""
32
+ jwt_algorithm: str = "HS256"
33
+ jwt_expires_in: int = 3600 # 1 hour in seconds
34
+
35
+ def __post_init__(self):
36
+ if self.cors_configuration:
37
+ self.cors_configuration = CORSConfig(**self.cors_configuration)
38
+
39
+ if self.security_headers is None:
40
+ self.security_headers = {
41
+ "X-Frame-Options": "DENY",
42
+ "X-Content-Type-Options": "nosniff",
43
+ "Strict-Transport-Security": "max-age=31536000; includeSubDomains",
44
+ }
45
+
46
+
47
+ class SecurityMiddleware(Middleware):
48
+ def __init__(self, secur_config: SecurityConfig, config: Optional[MiddlewareConfig] = None):
49
+ super().__init__(config)
50
+ self.secur_config = secur_config
51
+ self._secret_key = secrets.token_bytes(32)
52
+ self._token_lifetime = 3600
53
+ self._rate_limit_storage = {}
54
+
55
+ def _rate_limit_check(self, request: Request) -> Optional[Response]:
56
+ """Check if the request exceeds rate limits"""
57
+ if not self.secur_config.rate_limiting:
58
+ return None
59
+
60
+ client_ip = request.client.host
61
+ current_time = time.time()
62
+ window_start = int(current_time - 60) # 1-minute window
63
+
64
+ # Clean up old entries
65
+ self._rate_limit_storage = {ip: hits for ip, hits in self._rate_limit_storage.items() if hits["timestamp"] > window_start}
66
+
67
+ if client_ip not in self._rate_limit_storage:
68
+ self._rate_limit_storage[client_ip] = {"count": 1, "timestamp": current_time}
69
+ else:
70
+ self._rate_limit_storage[client_ip]["count"] += 1
71
+
72
+ if self._rate_limit_storage[client_ip]["count"] > 60: # 60 requests per minute
73
+ return Response(status_code=429, description=b"Too Many Requests", headers={"Retry-After": "60"})
74
+ return None
75
+
76
+ def _generate_jwt_token(self, user_data: Dict[str, Any]) -> str:
77
+ """Generate a JWT token"""
78
+ if not self.secur_config.jwt_secret:
79
+ raise ValueError("JWT secret key is not configured")
80
+
81
+ payload = {
82
+ "user": user_data,
83
+ "exp": datetime.now(tz=timezone.utc) + timedelta(seconds=self.secur_config.jwt_expires_in),
84
+ "iat": datetime.now(tz=timezone.utc),
85
+ }
86
+ return jwt.encode(payload, self.secur_config.jwt_secret, algorithm=self.secur_config.jwt_algorithm)
87
+
88
+ def _verify_jwt_token(self, token: str) -> Dict[str, Any]:
89
+ """Verify JWT token and return payload"""
90
+ try:
91
+ payload = jwt.decode(token, self.secur_config.jwt_secret, algorithms=[self.secur_config.jwt_algorithm])
92
+ return payload
93
+ except jwt.ExpiredSignatureError:
94
+ raise UnauthorizedException(details={"message": "Token has expired"})
95
+ except jwt.InvalidTokenError:
96
+ raise UnauthorizedException(details={"message": "Invalid token"})
97
+
98
+ def _generate_csrf_token(self, session_id: str) -> str:
99
+ """Generate a new CSRF token"""
100
+ timestamp = str(int(time.time()))
101
+ token_data = f"{session_id}:{timestamp}"
102
+ signature = hmac.new(self._secret_key, token_data.encode(), hashlib.sha256).digest()
103
+ return b64encode(f"{token_data}:{b64encode(signature).decode()}".encode()).decode()
104
+
105
+ def _validate_csrf_token(self, token: str) -> bool:
106
+ """Validate CSRF token"""
107
+ try:
108
+ decoded_token = b64decode(token.encode()).decode()
109
+ session_id, timestamp, signature = decoded_token.rsplit(":", 2)
110
+
111
+ # Verify timestamp
112
+ token_time = int(timestamp)
113
+ current_time = int(time.time())
114
+ if current_time - token_time > self._token_lifetime:
115
+ return False
116
+
117
+ # Verify signature
118
+ expected_data = f"{session_id}:{timestamp}"
119
+ expected_signature = hmac.new(self._secret_key, expected_data.encode(), hashlib.sha256).digest()
120
+
121
+ actual_signature = b64decode(signature)
122
+ return hmac.compare_digest(expected_signature, actual_signature)
123
+
124
+ except (ValueError, AttributeError, TypeError):
125
+ return False
126
+
127
+ def _apply_cors_headers(self, response: Response) -> None:
128
+ """Apply CORS headers to response"""
129
+ if not self.secur_config.cors_configuration:
130
+ return
131
+
132
+ cors = self.secur_config.cors_configuration
133
+ response.headers.update(
134
+ {
135
+ "Access-Control-Allow-Origin": ", ".join(cors.allowed_origins),
136
+ "Access-Control-Allow-Methods": ", ".join(cors.allowed_methods),
137
+ "Access-Control-Max-Age": str(cors.max_age),
138
+ "Access-Control-Allow-Headers": "Content-Type, Authorization, X-CSRF-Token",
139
+ "Access-Control-Allow-Credentials": "true",
140
+ }
141
+ )
142
+
143
+ def _apply_security_headers(self, response: Response) -> None:
144
+ """Apply security headers to response"""
145
+ if self.secur_config.security_headers:
146
+ response.headers.update(self.secur_config.security_headers)
147
+
148
+ async def before_request(self, request: Request) -> Request | Response:
149
+ """Process request before handling"""
150
+ # Rate limiting check
151
+ if rate_limit_response := self._rate_limit_check(request):
152
+ return rate_limit_response
153
+
154
+ # JWT authentication check
155
+ if self.secur_config.jwt_auth:
156
+ auth_header = request.headers.get("Authorization")
157
+ if not auth_header or not auth_header.startswith("Bearer "):
158
+ raise UnauthorizedException(details={"message": "Authorization header missing or invalid"})
159
+ token = auth_header.split(" ")[1]
160
+ try:
161
+ request.user = self._verify_jwt_token(token)
162
+ except UnauthorizedException as e:
163
+ return Response(status_code=401, description=str(e))
164
+
165
+ # CSRF protection check
166
+ if self.secur_config.csrf_protection and request.method in ["POST", "PUT", "DELETE", "PATCH"]:
167
+ csrf_token = request.headers.get("X-CSRF-Token")
168
+ if not csrf_token or not self._validate_csrf_token(csrf_token):
169
+ raise ForbiddenException(details={"message": "Invalid CSRF token"})
170
+
171
+ return request
172
+
173
+ async def after_request(self, response: Response) -> Response:
174
+ """Process response after handling"""
175
+ self._apply_security_headers(response)
176
+ self._apply_cors_headers(response)
177
+ return response
178
+
179
+ def generate_csrf_token(self, request: Request) -> str:
180
+ """Generate and set CSRF token for the request"""
181
+ if not hasattr(request, "session_id"):
182
+ request.session_id = secrets.token_urlsafe(32)
183
+ token = self._generate_csrf_token(request.session_id)
184
+ return token
@@ -0,0 +1,5 @@
1
+ # -*- coding: utf-8 -*-
2
+ from .schemas import SchemaGenerator
3
+ from .swagger import SwaggerUI
4
+
5
+ __all__ = ["SchemaGenerator", "SwaggerUI"]
@@ -0,0 +1,51 @@
1
+ # -*- coding: utf-8 -*-
2
+ from __future__ import annotations
3
+
4
+ from hypern.hypern import BaseSchemaGenerator, Route as InternalRoute
5
+ import typing
6
+ import orjson
7
+
8
+
9
+ class EndpointInfo(typing.NamedTuple):
10
+ path: str
11
+ http_method: str
12
+ func: typing.Callable[..., typing.Any]
13
+
14
+
15
+ class SchemaGenerator(BaseSchemaGenerator):
16
+ def __init__(self, base_schema: dict[str, typing.Any]) -> None:
17
+ self.base_schema = base_schema
18
+
19
+ def get_endpoints(self, routes: list[InternalRoute]) -> list[EndpointInfo]:
20
+ """
21
+ Given the routes, yields the following information:
22
+
23
+ - path
24
+ eg: /users/
25
+ - http_method
26
+ one of 'get', 'post', 'put', 'patch', 'delete', 'options'
27
+ - func
28
+ method ready to extract the docstring
29
+ """
30
+ endpoints_info: list[EndpointInfo] = []
31
+
32
+ for route in routes:
33
+ method = route.method.lower()
34
+ endpoints_info.append(EndpointInfo(path=route.path, http_method=method, func=route.function.handler))
35
+ return endpoints_info
36
+
37
+ def get_schema(self, app) -> dict[str, typing.Any]:
38
+ schema = dict(self.base_schema)
39
+ schema.setdefault("paths", {})
40
+ for route in app.router.routes:
41
+ parsed = self.parse_docstring(route.doc)
42
+
43
+ if not parsed:
44
+ continue
45
+
46
+ if route.path not in schema["paths"]:
47
+ schema["paths"][route.path] = {}
48
+
49
+ schema["paths"][route.path][route.method.lower()] = orjson.loads(parsed)
50
+
51
+ return schema
@@ -0,0 +1,3 @@
1
+ from hypern.hypern import SwaggerUI
2
+
3
+ __all__ = ["SwaggerUI"]
hypern/processpool.py ADDED
@@ -0,0 +1,139 @@
1
+ import asyncio
2
+ import os
3
+ import signal
4
+ import sys
5
+ from typing import List
6
+ from concurrent.futures import ThreadPoolExecutor
7
+ from multiprocess import Process
8
+ from watchdog.observers import Observer
9
+
10
+ from .hypern import Server, SocketHeld
11
+ from .logging import logger
12
+ from .reload import EventHandler
13
+
14
+
15
+ def run_processes(
16
+ server: Server,
17
+ host: str,
18
+ port: int,
19
+ workers: int,
20
+ processes: int,
21
+ max_blocking_threads: int,
22
+ reload: bool = True,
23
+ ) -> List[Process]:
24
+ socket = SocketHeld(host, port)
25
+
26
+ process_pool = init_processpool(
27
+ server,
28
+ socket,
29
+ workers,
30
+ processes,
31
+ max_blocking_threads,
32
+ )
33
+
34
+ def terminating_signal_handler(_sig, _frame):
35
+ logger.info("Terminating server!!")
36
+ for process in process_pool:
37
+ process.kill()
38
+
39
+ signal.signal(signal.SIGINT, terminating_signal_handler)
40
+ signal.signal(signal.SIGTERM, terminating_signal_handler)
41
+
42
+ if reload:
43
+ # Set up file system watcher for auto-reload
44
+ watch_dirs = [os.getcwd()]
45
+ observer = Observer()
46
+ reload_handler = EventHandler(file_path=sys.argv[0], directory_path=os.getcwd())
47
+
48
+ for directory in watch_dirs:
49
+ observer.schedule(reload_handler, directory, recursive=True)
50
+
51
+ observer.start()
52
+
53
+ logger.info(f"Server started at http://{host}:{port}")
54
+ logger.info("Press Ctrl + C to stop")
55
+
56
+ try:
57
+ for process in process_pool:
58
+ logger.debug(f"Process {process.pid} started")
59
+ process.join()
60
+ except KeyboardInterrupt:
61
+ pass
62
+ finally:
63
+ if reload:
64
+ observer.stop()
65
+ observer.join()
66
+
67
+ return process_pool
68
+
69
+
70
+ def init_processpool(
71
+ server: Server,
72
+ socket: SocketHeld,
73
+ workers: int,
74
+ processes: int,
75
+ max_blocking_threads: int,
76
+ ) -> List[Process]:
77
+ process_pool = []
78
+
79
+ for i in range(processes):
80
+ copied_socket = socket.try_clone()
81
+ process = Process(
82
+ target=spawn_process,
83
+ args=(
84
+ server,
85
+ copied_socket,
86
+ workers,
87
+ max_blocking_threads,
88
+ ),
89
+ name=f"hypern-worker-{i}",
90
+ )
91
+ process.daemon = True # This is important to avoid zombie processes
92
+ process.start()
93
+ process_pool.append(process)
94
+
95
+ return process_pool
96
+
97
+
98
+ class OptimizedEventLoopPolicy(asyncio.DefaultEventLoopPolicy):
99
+ def __init__(self, max_blocking_threads: int):
100
+ super().__init__()
101
+ self.max_blocking_threads = max_blocking_threads
102
+
103
+ def new_event_loop(self):
104
+ loop = super().new_event_loop()
105
+ # Optimize thread pool cho I/O operations
106
+ loop.set_default_executor(ThreadPoolExecutor(max_workers=self.max_blocking_threads, thread_name_prefix="hypern-io"))
107
+ return loop
108
+
109
+
110
+ def initialize_event_loop(max_blocking_threads: int = 100) -> asyncio.AbstractEventLoop:
111
+ if sys.platform.startswith("win32") or sys.platform.startswith("linux-cross"):
112
+ loop = asyncio.new_event_loop()
113
+ asyncio.set_event_loop(loop)
114
+ else:
115
+ import uvloop
116
+
117
+ uvloop.install()
118
+
119
+ asyncio.set_event_loop_policy(OptimizedEventLoopPolicy(max_blocking_threads))
120
+ loop = uvloop.new_event_loop()
121
+ asyncio.set_event_loop(loop)
122
+
123
+ loop.slow_callback_duration = 0.1 # Log warnings for slow callbacks
124
+ loop.set_debug(False) # Disable debug mode
125
+ return loop
126
+
127
+
128
+ def spawn_process(
129
+ server: Server,
130
+ socket: SocketHeld,
131
+ workers: int,
132
+ max_blocking_threads: int,
133
+ ):
134
+ loop = initialize_event_loop(max_blocking_threads)
135
+
136
+ try:
137
+ server.start(socket, workers, max_blocking_threads)
138
+ except KeyboardInterrupt:
139
+ loop.close()
hypern/py.typed ADDED
File without changes