hypern 0.2.1__cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl → 0.3.1__cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hypern/application.py +27 -3
- hypern/caching/strategies.py +115 -0
- hypern/hypern.cpython-310-i386-linux-gnu.so +0 -0
- hypern/hypern.pyi +42 -5
- hypern/middleware/limit.py +1 -1
- hypern/middleware/security.py +179 -0
- hypern/processpool.py +9 -3
- hypern/reload.py +26 -40
- hypern/routing/route.py +1 -1
- hypern/ws/__init__.py +4 -0
- hypern/ws/channel.py +80 -0
- hypern/ws/heartbeat.py +74 -0
- hypern/ws/room.py +76 -0
- hypern/ws/route.py +26 -0
- hypern/ws.py +16 -0
- {hypern-0.2.1.dist-info → hypern-0.3.1.dist-info}/METADATA +2 -2
- {hypern-0.2.1.dist-info → hypern-0.3.1.dist-info}/RECORD +19 -11
- {hypern-0.2.1.dist-info → hypern-0.3.1.dist-info}/WHEEL +1 -1
- {hypern-0.2.1.dist-info → hypern-0.3.1.dist-info}/licenses/LICENSE +0 -0
hypern/application.py
CHANGED
|
@@ -8,8 +8,7 @@ import orjson
|
|
|
8
8
|
from typing_extensions import Annotated, Doc
|
|
9
9
|
|
|
10
10
|
from hypern.datastructures import Contact, HTTPMethod, Info, License
|
|
11
|
-
from hypern.hypern import FunctionInfo, Router
|
|
12
|
-
from hypern.hypern import Route as InternalRoute
|
|
11
|
+
from hypern.hypern import FunctionInfo, Router, Route as InternalRoute, WebsocketRouter
|
|
13
12
|
from hypern.openapi import SchemaGenerator, SwaggerUI
|
|
14
13
|
from hypern.processpool import run_processes
|
|
15
14
|
from hypern.response import HTMLResponse, JSONResponse
|
|
@@ -17,6 +16,7 @@ from hypern.routing import Route
|
|
|
17
16
|
from hypern.scheduler import Scheduler
|
|
18
17
|
from hypern.middleware import Middleware
|
|
19
18
|
from hypern.args_parser import ArgsConfig
|
|
19
|
+
from hypern.ws import WebsocketRoute
|
|
20
20
|
|
|
21
21
|
AppType = TypeVar("AppType", bound="Hypern")
|
|
22
22
|
|
|
@@ -47,6 +47,15 @@ class Hypern:
|
|
|
47
47
|
"""
|
|
48
48
|
),
|
|
49
49
|
] = None,
|
|
50
|
+
websockets: Annotated[
|
|
51
|
+
List[WebsocketRoute] | None,
|
|
52
|
+
Doc(
|
|
53
|
+
"""
|
|
54
|
+
A list of routes to serve incoming WebSocket requests.
|
|
55
|
+
You can define routes using the `WebsocketRoute` class from `Hypern
|
|
56
|
+
"""
|
|
57
|
+
),
|
|
58
|
+
] = None,
|
|
50
59
|
title: Annotated[
|
|
51
60
|
str,
|
|
52
61
|
Doc(
|
|
@@ -186,6 +195,7 @@ class Hypern:
|
|
|
186
195
|
) -> None:
|
|
187
196
|
super().__init__(*args, **kwargs)
|
|
188
197
|
self.router = Router(path="/")
|
|
198
|
+
self.websocket_router = WebsocketRouter(path="/")
|
|
189
199
|
self.scheduler = scheduler
|
|
190
200
|
self.injectables = default_injectables or {}
|
|
191
201
|
self.middleware_before_request = []
|
|
@@ -193,9 +203,12 @@ class Hypern:
|
|
|
193
203
|
self.response_headers = {}
|
|
194
204
|
self.args = ArgsConfig()
|
|
195
205
|
|
|
196
|
-
for route in routes:
|
|
206
|
+
for route in routes or []:
|
|
197
207
|
self.router.extend_route(route(app=self).routes)
|
|
198
208
|
|
|
209
|
+
for websocket_route in websockets or []:
|
|
210
|
+
self.websocket_router.add_route(websocket_route)
|
|
211
|
+
|
|
199
212
|
if openapi_url and docs_url:
|
|
200
213
|
self.__add_openapi(
|
|
201
214
|
info=Info(
|
|
@@ -358,6 +371,7 @@ class Hypern:
|
|
|
358
371
|
processes=self.args.processes,
|
|
359
372
|
max_blocking_threads=self.args.max_blocking_threads,
|
|
360
373
|
router=self.router,
|
|
374
|
+
websocket_router=self.websocket_router,
|
|
361
375
|
injectables=self.injectables,
|
|
362
376
|
before_request=self.middleware_before_request,
|
|
363
377
|
after_request=self.middleware_after_request,
|
|
@@ -379,3 +393,13 @@ class Hypern:
|
|
|
379
393
|
func_info = FunctionInfo(handler=handler, is_async=is_async)
|
|
380
394
|
route = InternalRoute(path=endpoint, function=func_info, method=method.name)
|
|
381
395
|
self.router.add_route(route=route)
|
|
396
|
+
|
|
397
|
+
def add_websocket(self, ws_route: WebsocketRoute):
|
|
398
|
+
"""
|
|
399
|
+
Adds a WebSocket route to the WebSocket router.
|
|
400
|
+
|
|
401
|
+
Args:
|
|
402
|
+
ws_route (WebsocketRoute): The WebSocket route to be added to the router.
|
|
403
|
+
"""
|
|
404
|
+
for route in ws_route.routes:
|
|
405
|
+
self.websocket_router.add_route(route=route)
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
from typing import Any, Optional, Callable, TypeVar
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
import asyncio
|
|
4
|
+
import orjson
|
|
5
|
+
|
|
6
|
+
from hypern.logging import logger
|
|
7
|
+
|
|
8
|
+
T = TypeVar("T")
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class CacheEntry:
|
|
12
|
+
def __init__(self, value: Any, expires_at: int, stale_at: Optional[int] = None):
|
|
13
|
+
self.value = value
|
|
14
|
+
self.expires_at = expires_at
|
|
15
|
+
self.stale_at = stale_at or expires_at
|
|
16
|
+
self.is_revalidating = False
|
|
17
|
+
|
|
18
|
+
def to_json(self) -> str:
|
|
19
|
+
return orjson.dumps({"value": self.value, "expires_at": self.expires_at, "stale_at": self.stale_at, "is_revalidating": self.is_revalidating})
|
|
20
|
+
|
|
21
|
+
@classmethod
|
|
22
|
+
def from_json(cls, data: str) -> "CacheEntry":
|
|
23
|
+
data_dict = orjson.loads(data)
|
|
24
|
+
entry = cls(value=data_dict["value"], expires_at=data_dict["expires_at"], stale_at=data_dict["stale_at"])
|
|
25
|
+
entry.is_revalidating = data_dict["is_revalidating"]
|
|
26
|
+
return entry
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class CacheStrategy:
|
|
30
|
+
def __init__(self, backend: Any):
|
|
31
|
+
self.backend = backend
|
|
32
|
+
|
|
33
|
+
async def get(self, key: str, loader: Callable[[], T]) -> T:
|
|
34
|
+
raise NotImplementedError
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class StaleWhileRevalidateStrategy(CacheStrategy):
|
|
38
|
+
def __init__(self, backend: Any, stale_ttl: int, cache_ttl: int):
|
|
39
|
+
super().__init__(backend)
|
|
40
|
+
self.stale_ttl = stale_ttl
|
|
41
|
+
self.cache_ttl = cache_ttl
|
|
42
|
+
|
|
43
|
+
async def get(self, key: str, loader: Callable[[], T]) -> T:
|
|
44
|
+
now = int(datetime.now().timestamp())
|
|
45
|
+
|
|
46
|
+
# Try to get from cache
|
|
47
|
+
cached_data = await self.backend.get(key)
|
|
48
|
+
if cached_data:
|
|
49
|
+
entry = CacheEntry.from_json(cached_data)
|
|
50
|
+
|
|
51
|
+
if now < entry.stale_at:
|
|
52
|
+
# Cache is fresh
|
|
53
|
+
return entry.value
|
|
54
|
+
|
|
55
|
+
if now < entry.expires_at and not entry.is_revalidating:
|
|
56
|
+
# Cache is stale but usable - trigger background revalidation
|
|
57
|
+
entry.is_revalidating = True
|
|
58
|
+
await self.backend.set(key, entry.to_json(), self.cache_ttl)
|
|
59
|
+
asyncio.create_task(self._revalidate(key, loader))
|
|
60
|
+
return entry.value
|
|
61
|
+
|
|
62
|
+
# Cache miss or expired - load fresh data
|
|
63
|
+
value = await loader()
|
|
64
|
+
entry = CacheEntry(value=value, expires_at=now + self.cache_ttl, stale_at=now + (self.cache_ttl - self.stale_ttl))
|
|
65
|
+
await self.backend.set(key, entry.to_json(), self.cache_ttl)
|
|
66
|
+
return value
|
|
67
|
+
|
|
68
|
+
async def _revalidate(self, key: str, loader: Callable[[], T]):
|
|
69
|
+
try:
|
|
70
|
+
value = await loader()
|
|
71
|
+
now = int(datetime.now().timestamp())
|
|
72
|
+
entry = CacheEntry(value=value, expires_at=now + self.cache_ttl, stale_at=now + (self.cache_ttl - self.stale_ttl))
|
|
73
|
+
await self.backend.set(key, entry.to_json(), self.cache_ttl)
|
|
74
|
+
except Exception as e:
|
|
75
|
+
logger.error(f"Revalidation failed for key {key}: {e}")
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class CacheAsideStrategy(CacheStrategy):
|
|
79
|
+
def __init__(self, backend: Any, ttl: int):
|
|
80
|
+
super().__init__(backend)
|
|
81
|
+
self.ttl = ttl
|
|
82
|
+
|
|
83
|
+
async def get(self, key: str, loader: Callable[[], T]) -> T:
|
|
84
|
+
# Try to get from cache
|
|
85
|
+
cached_data = await self.backend.get(key)
|
|
86
|
+
if cached_data:
|
|
87
|
+
entry = CacheEntry.from_json(cached_data)
|
|
88
|
+
if entry.expires_at > int(datetime.now().timestamp()):
|
|
89
|
+
return entry.value
|
|
90
|
+
|
|
91
|
+
# Cache miss or expired - load from source
|
|
92
|
+
value = await loader()
|
|
93
|
+
entry = CacheEntry(value=value, expires_at=int(datetime.now().timestamp()) + self.ttl)
|
|
94
|
+
await self.backend.set(key, entry.to_json(), self.ttl)
|
|
95
|
+
return value
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def cache_with_strategy(strategy: CacheStrategy, key_prefix: str = None):
|
|
99
|
+
"""
|
|
100
|
+
Decorator for using cache strategies
|
|
101
|
+
"""
|
|
102
|
+
|
|
103
|
+
def decorator(func):
|
|
104
|
+
async def wrapper(*args, **kwargs):
|
|
105
|
+
# Generate cache key
|
|
106
|
+
cache_key = f"{key_prefix or func.__name__}:{hash(str(args) + str(kwargs))}"
|
|
107
|
+
|
|
108
|
+
async def loader():
|
|
109
|
+
return await func(*args, **kwargs)
|
|
110
|
+
|
|
111
|
+
return await strategy.get(cache_key, loader)
|
|
112
|
+
|
|
113
|
+
return wrapper
|
|
114
|
+
|
|
115
|
+
return decorator
|
|
Binary file
|
hypern/hypern.pyi
CHANGED
|
@@ -16,6 +16,9 @@ class RedisBackend(BaseBackend):
|
|
|
16
16
|
get: Callable[[str], Any]
|
|
17
17
|
set: Callable[[Any, str, int], None]
|
|
18
18
|
delete_startswith: Callable[[str], None]
|
|
19
|
+
set_nx: Callable[[Any, str, int], None]
|
|
20
|
+
get_ttl: Callable[[str], int]
|
|
21
|
+
current_timestamp: Callable[[], int]
|
|
19
22
|
|
|
20
23
|
@dataclass
|
|
21
24
|
class BaseSchemaGenerator:
|
|
@@ -183,9 +186,6 @@ class FunctionInfo:
|
|
|
183
186
|
handler: Callable
|
|
184
187
|
is_async: bool
|
|
185
188
|
|
|
186
|
-
class SocketHeld:
|
|
187
|
-
socket: Any
|
|
188
|
-
|
|
189
189
|
@dataclass
|
|
190
190
|
class Server:
|
|
191
191
|
router: Router
|
|
@@ -195,6 +195,7 @@ class Server:
|
|
|
195
195
|
|
|
196
196
|
def add_route(self, route: Route) -> None: ...
|
|
197
197
|
def set_router(self, router: Router) -> None: ...
|
|
198
|
+
def set_websocket_router(self, websocket_router: WebsocketRouter) -> None: ...
|
|
198
199
|
def start(self, socket: SocketHeld, worker: int, max_blocking_threads: int) -> None: ...
|
|
199
200
|
def inject(self, key: str, value: Any) -> None: ...
|
|
200
201
|
def set_injected(self, injected: Dict[str, Any]) -> None: ...
|
|
@@ -227,15 +228,48 @@ class Router:
|
|
|
227
228
|
def get_routes_by_method(self, method: str) -> List[Route]: ...
|
|
228
229
|
def extend_route(self, routes: List[Route]) -> None: ...
|
|
229
230
|
|
|
231
|
+
@dataclass
|
|
232
|
+
class SocketHeld:
|
|
233
|
+
socket: Any
|
|
234
|
+
|
|
235
|
+
@dataclass
|
|
236
|
+
class WebSocketSession:
|
|
237
|
+
sender: Callable[[str], None]
|
|
238
|
+
receiver: Callable[[], str]
|
|
239
|
+
is_closed: bool
|
|
240
|
+
|
|
241
|
+
def send(self, message: str) -> None: ...
|
|
242
|
+
|
|
243
|
+
@dataclass
|
|
244
|
+
class WebsocketRoute:
|
|
245
|
+
path: str
|
|
246
|
+
handler: Callable[[WebSocketSession], None]
|
|
247
|
+
|
|
248
|
+
@dataclass
|
|
249
|
+
class WebsocketRouter:
|
|
250
|
+
path: str
|
|
251
|
+
routes: List[WebsocketRoute]
|
|
252
|
+
|
|
253
|
+
def add_route(self, route: WebsocketRoute) -> None: ...
|
|
254
|
+
def remove_route(self, path: str) -> None: ...
|
|
255
|
+
def extend_route(self, route: WebsocketRoute) -> None: ...
|
|
256
|
+
def clear_routes(self) -> None: ...
|
|
257
|
+
def route_count(self) -> int: ...
|
|
258
|
+
|
|
230
259
|
@dataclass
|
|
231
260
|
class Header:
|
|
232
261
|
headers: Dict[str, str]
|
|
233
262
|
|
|
263
|
+
def get(self, key: str) -> str | None: ...
|
|
264
|
+
def set(self, key: str, value: str) -> None: ...
|
|
265
|
+
def append(self, key: str, value: str) -> None: ...
|
|
266
|
+
def update(self, headers: Dict[str, str]) -> None: ...
|
|
267
|
+
|
|
234
268
|
@dataclass
|
|
235
269
|
class Response:
|
|
236
270
|
status_code: int
|
|
237
271
|
response_type: str
|
|
238
|
-
headers:
|
|
272
|
+
headers: Header
|
|
239
273
|
description: str
|
|
240
274
|
file_path: str
|
|
241
275
|
|
|
@@ -260,7 +294,10 @@ class BodyData:
|
|
|
260
294
|
@dataclass
|
|
261
295
|
class Request:
|
|
262
296
|
query_params: QueryParams
|
|
263
|
-
headers:
|
|
297
|
+
headers: Header
|
|
264
298
|
path_params: Dict[str, str]
|
|
265
299
|
body: BodyData
|
|
266
300
|
method: str
|
|
301
|
+
remote_addr: str
|
|
302
|
+
timestamp: float
|
|
303
|
+
context_id: str
|
hypern/middleware/limit.py
CHANGED
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
import hashlib
|
|
2
|
+
import hmac
|
|
3
|
+
import secrets
|
|
4
|
+
import time
|
|
5
|
+
from base64 import b64decode, b64encode
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from datetime import datetime, timedelta
|
|
8
|
+
from typing import Any, Dict, List, Optional
|
|
9
|
+
|
|
10
|
+
import jwt
|
|
11
|
+
|
|
12
|
+
from hypern.exceptions import Forbidden, Unauthorized
|
|
13
|
+
from hypern.hypern import Middleware, Request, Response
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class CORSConfig:
|
|
18
|
+
allowed_origins: List[str]
|
|
19
|
+
allowed_methods: List[str]
|
|
20
|
+
max_age: int
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class SecurityConfig:
|
|
25
|
+
rate_limiting: bool = False
|
|
26
|
+
jwt_auth: bool = False
|
|
27
|
+
cors_configuration: Optional[CORSConfig] = None
|
|
28
|
+
csrf_protection: bool = False
|
|
29
|
+
security_headers: Optional[Dict[str, str]] = None
|
|
30
|
+
jwt_secret: str = ""
|
|
31
|
+
jwt_algorithm: str = "HS256"
|
|
32
|
+
jwt_expires_in: int = 3600 # 1 hour in seconds
|
|
33
|
+
|
|
34
|
+
def __post_init__(self):
|
|
35
|
+
if self.cors_configuration:
|
|
36
|
+
self.cors_configuration = CORSConfig(**self.cors_configuration)
|
|
37
|
+
|
|
38
|
+
if self.security_headers is None:
|
|
39
|
+
self.security_headers = {
|
|
40
|
+
"X-Frame-Options": "DENY",
|
|
41
|
+
"X-Content-Type-Options": "nosniff",
|
|
42
|
+
"Strict-Transport-Security": "max-age=31536000; includeSubDomains",
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class SecurityMiddleware(Middleware):
|
|
47
|
+
def __init__(self, config: SecurityConfig):
|
|
48
|
+
super().__init__()
|
|
49
|
+
self.config = config
|
|
50
|
+
self._secret_key = secrets.token_bytes(32)
|
|
51
|
+
self._token_lifetime = 3600
|
|
52
|
+
self._rate_limit_storage = {}
|
|
53
|
+
|
|
54
|
+
def _rate_limit_check(self, request: Request) -> Optional[Response]:
|
|
55
|
+
"""Check if the request exceeds rate limits"""
|
|
56
|
+
if not self.config.rate_limiting:
|
|
57
|
+
return None
|
|
58
|
+
|
|
59
|
+
client_ip = request.client.host
|
|
60
|
+
current_time = time.time()
|
|
61
|
+
window_start = int(current_time - 60) # 1-minute window
|
|
62
|
+
|
|
63
|
+
# Clean up old entries
|
|
64
|
+
self._rate_limit_storage = {ip: hits for ip, hits in self._rate_limit_storage.items() if hits["timestamp"] > window_start}
|
|
65
|
+
|
|
66
|
+
if client_ip not in self._rate_limit_storage:
|
|
67
|
+
self._rate_limit_storage[client_ip] = {"count": 1, "timestamp": current_time}
|
|
68
|
+
else:
|
|
69
|
+
self._rate_limit_storage[client_ip]["count"] += 1
|
|
70
|
+
|
|
71
|
+
if self._rate_limit_storage[client_ip]["count"] > 60: # 60 requests per minute
|
|
72
|
+
return Response(status_code=429, description=b"Too Many Requests", headers={"Retry-After": "60"})
|
|
73
|
+
return None
|
|
74
|
+
|
|
75
|
+
def _generate_jwt_token(self, user_data: Dict[str, Any]) -> str:
|
|
76
|
+
"""Generate a JWT token"""
|
|
77
|
+
if not self.config.jwt_secret:
|
|
78
|
+
raise ValueError("JWT secret key is not configured")
|
|
79
|
+
|
|
80
|
+
payload = {"user": user_data, "exp": datetime.utcnow() + timedelta(seconds=self.config.jwt_expires_in), "iat": datetime.utcnow()}
|
|
81
|
+
return jwt.encode(payload, self.config.jwt_secret, algorithm=self.config.jwt_algorithm)
|
|
82
|
+
|
|
83
|
+
def _verify_jwt_token(self, token: str) -> Dict[str, Any]:
|
|
84
|
+
"""Verify JWT token and return payload"""
|
|
85
|
+
try:
|
|
86
|
+
payload = jwt.decode(token, self.config.jwt_secret, algorithms=[self.config.jwt_algorithm])
|
|
87
|
+
return payload
|
|
88
|
+
except jwt.ExpiredSignatureError:
|
|
89
|
+
raise Unauthorized("Token has expired")
|
|
90
|
+
except jwt.InvalidTokenError:
|
|
91
|
+
raise Unauthorized("Invalid token")
|
|
92
|
+
|
|
93
|
+
def _generate_csrf_token(self, session_id: str) -> str:
|
|
94
|
+
"""Generate a new CSRF token"""
|
|
95
|
+
timestamp = str(int(time.time()))
|
|
96
|
+
token_data = f"{session_id}:{timestamp}"
|
|
97
|
+
signature = hmac.new(self._secret_key, token_data.encode(), hashlib.sha256).digest()
|
|
98
|
+
return b64encode(f"{token_data}:{b64encode(signature).decode()}".encode()).decode()
|
|
99
|
+
|
|
100
|
+
def _validate_csrf_token(self, token: str) -> bool:
|
|
101
|
+
"""Validate CSRF token"""
|
|
102
|
+
try:
|
|
103
|
+
decoded_token = b64decode(token.encode()).decode()
|
|
104
|
+
session_id, timestamp, signature = decoded_token.rsplit(":", 2)
|
|
105
|
+
|
|
106
|
+
# Verify timestamp
|
|
107
|
+
token_time = int(timestamp)
|
|
108
|
+
current_time = int(time.time())
|
|
109
|
+
if current_time - token_time > self._token_lifetime:
|
|
110
|
+
return False
|
|
111
|
+
|
|
112
|
+
# Verify signature
|
|
113
|
+
expected_data = f"{session_id}:{timestamp}"
|
|
114
|
+
expected_signature = hmac.new(self._secret_key, expected_data.encode(), hashlib.sha256).digest()
|
|
115
|
+
|
|
116
|
+
actual_signature = b64decode(signature)
|
|
117
|
+
return hmac.compare_digest(expected_signature, actual_signature)
|
|
118
|
+
|
|
119
|
+
except (ValueError, AttributeError, TypeError):
|
|
120
|
+
return False
|
|
121
|
+
|
|
122
|
+
def _apply_cors_headers(self, response: Response) -> None:
|
|
123
|
+
"""Apply CORS headers to response"""
|
|
124
|
+
if not self.config.cors_configuration:
|
|
125
|
+
return
|
|
126
|
+
|
|
127
|
+
cors = self.config.cors_configuration
|
|
128
|
+
response.headers.update(
|
|
129
|
+
{
|
|
130
|
+
"Access-Control-Allow-Origin": ", ".join(cors.allowed_origins),
|
|
131
|
+
"Access-Control-Allow-Methods": ", ".join(cors.allowed_methods),
|
|
132
|
+
"Access-Control-Max-Age": str(cors.max_age),
|
|
133
|
+
"Access-Control-Allow-Headers": "Content-Type, Authorization, X-CSRF-Token",
|
|
134
|
+
"Access-Control-Allow-Credentials": "true",
|
|
135
|
+
}
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
def _apply_security_headers(self, response: Response) -> None:
|
|
139
|
+
"""Apply security headers to response"""
|
|
140
|
+
if self.config.security_headers:
|
|
141
|
+
response.headers.update(self.config.security_headers)
|
|
142
|
+
|
|
143
|
+
async def before_request(self, request: Request) -> Request | Response:
|
|
144
|
+
"""Process request before handling"""
|
|
145
|
+
# Rate limiting check
|
|
146
|
+
if rate_limit_response := self._rate_limit_check(request):
|
|
147
|
+
return rate_limit_response
|
|
148
|
+
|
|
149
|
+
# JWT authentication check
|
|
150
|
+
if self.config.jwt_auth:
|
|
151
|
+
auth_header = request.headers.get("Authorization")
|
|
152
|
+
if not auth_header or not auth_header.startswith("Bearer "):
|
|
153
|
+
raise Unauthorized("Missing or invalid authorization header")
|
|
154
|
+
token = auth_header.split(" ")[1]
|
|
155
|
+
try:
|
|
156
|
+
request.user = self._verify_jwt_token(token)
|
|
157
|
+
except Unauthorized as e:
|
|
158
|
+
return Response(status_code=401, description=str(e))
|
|
159
|
+
|
|
160
|
+
# CSRF protection check
|
|
161
|
+
if self.config.csrf_protection and request.method in ["POST", "PUT", "DELETE", "PATCH"]:
|
|
162
|
+
csrf_token = request.headers.get("X-CSRF-Token")
|
|
163
|
+
if not csrf_token or not self._validate_csrf_token(csrf_token):
|
|
164
|
+
raise Forbidden("CSRF token missing or invalid")
|
|
165
|
+
|
|
166
|
+
return request
|
|
167
|
+
|
|
168
|
+
async def after_request(self, response: Response) -> Response:
|
|
169
|
+
"""Process response after handling"""
|
|
170
|
+
self._apply_security_headers(response)
|
|
171
|
+
self._apply_cors_headers(response)
|
|
172
|
+
return response
|
|
173
|
+
|
|
174
|
+
def generate_csrf_token(self, request: Request) -> str:
|
|
175
|
+
"""Generate and set CSRF token for the request"""
|
|
176
|
+
if not hasattr(request, "session_id"):
|
|
177
|
+
request.session_id = secrets.token_urlsafe(32)
|
|
178
|
+
token = self._generate_csrf_token(request.session_id)
|
|
179
|
+
return token
|
hypern/processpool.py
CHANGED
|
@@ -7,7 +7,7 @@ from typing import Any, Dict, List
|
|
|
7
7
|
from multiprocess import Process
|
|
8
8
|
from watchdog.observers import Observer
|
|
9
9
|
|
|
10
|
-
from .hypern import FunctionInfo, Router, Server, SocketHeld
|
|
10
|
+
from .hypern import FunctionInfo, Router, Server, SocketHeld, WebsocketRouter
|
|
11
11
|
from .logging import logger
|
|
12
12
|
from .reload import EventHandler
|
|
13
13
|
|
|
@@ -19,6 +19,7 @@ def run_processes(
|
|
|
19
19
|
processes: int,
|
|
20
20
|
max_blocking_threads: int,
|
|
21
21
|
router: Router,
|
|
22
|
+
websocket_router: WebsocketRouter,
|
|
22
23
|
injectables: Dict[str, Any],
|
|
23
24
|
before_request: List[FunctionInfo],
|
|
24
25
|
after_request: List[FunctionInfo],
|
|
@@ -27,7 +28,9 @@ def run_processes(
|
|
|
27
28
|
) -> List[Process]:
|
|
28
29
|
socket = SocketHeld(host, port)
|
|
29
30
|
|
|
30
|
-
process_pool = init_processpool(
|
|
31
|
+
process_pool = init_processpool(
|
|
32
|
+
router, websocket_router, socket, workers, processes, max_blocking_threads, injectables, before_request, after_request, response_headers
|
|
33
|
+
)
|
|
31
34
|
|
|
32
35
|
def terminating_signal_handler(_sig, _frame):
|
|
33
36
|
logger.info("Terminating server!!")
|
|
@@ -67,6 +70,7 @@ def run_processes(
|
|
|
67
70
|
|
|
68
71
|
def init_processpool(
|
|
69
72
|
router: Router,
|
|
73
|
+
websocket_router: WebsocketRouter,
|
|
70
74
|
socket: SocketHeld,
|
|
71
75
|
workers: int,
|
|
72
76
|
processes: int,
|
|
@@ -82,7 +86,7 @@ def init_processpool(
|
|
|
82
86
|
copied_socket = socket.try_clone()
|
|
83
87
|
process = Process(
|
|
84
88
|
target=spawn_process,
|
|
85
|
-
args=(router, copied_socket, workers, max_blocking_threads, injectables, before_request, after_request, response_headers),
|
|
89
|
+
args=(router, websocket_router, copied_socket, workers, max_blocking_threads, injectables, before_request, after_request, response_headers),
|
|
86
90
|
)
|
|
87
91
|
process.start()
|
|
88
92
|
process_pool.append(process)
|
|
@@ -106,6 +110,7 @@ def initialize_event_loop():
|
|
|
106
110
|
|
|
107
111
|
def spawn_process(
|
|
108
112
|
router: Router,
|
|
113
|
+
websocket_router: WebsocketRouter,
|
|
109
114
|
socket: SocketHeld,
|
|
110
115
|
workers: int,
|
|
111
116
|
max_blocking_threads: int,
|
|
@@ -118,6 +123,7 @@ def spawn_process(
|
|
|
118
123
|
|
|
119
124
|
server = Server()
|
|
120
125
|
server.set_router(router=router)
|
|
126
|
+
server.set_websocket_router(websocket_router=websocket_router)
|
|
121
127
|
server.set_injected(injected=injectables)
|
|
122
128
|
server.set_before_hooks(hooks=before_request)
|
|
123
129
|
server.set_after_hooks(hooks=after_request)
|
hypern/reload.py
CHANGED
|
@@ -2,6 +2,8 @@ import sys
|
|
|
2
2
|
import time
|
|
3
3
|
import subprocess
|
|
4
4
|
from watchdog.events import FileSystemEventHandler
|
|
5
|
+
import signal
|
|
6
|
+
import os
|
|
5
7
|
|
|
6
8
|
from .logging import logger
|
|
7
9
|
|
|
@@ -10,51 +12,35 @@ class EventHandler(FileSystemEventHandler):
|
|
|
10
12
|
def __init__(self, file_path: str, directory_path: str) -> None:
|
|
11
13
|
self.file_path = file_path
|
|
12
14
|
self.directory_path = directory_path
|
|
13
|
-
self.process = None
|
|
14
|
-
self.last_reload = time.time()
|
|
15
|
-
|
|
16
|
-
def stop_server(self):
|
|
17
|
-
if self.process:
|
|
18
|
-
try:
|
|
19
|
-
# Check if the process is still alive
|
|
20
|
-
if self.process.poll() is None: # None means the process is still running
|
|
21
|
-
self.process.terminate() # Gracefully terminate the process
|
|
22
|
-
self.process.wait(timeout=5) # Wait for the process to exit
|
|
23
|
-
else:
|
|
24
|
-
logger.error("Process is not running.")
|
|
25
|
-
except subprocess.TimeoutExpired:
|
|
26
|
-
logger.error("Process did not terminate in time. Forcing termination.")
|
|
27
|
-
self.process.kill() # Forcefully kill the process if it doesn't stop
|
|
28
|
-
except ProcessLookupError:
|
|
29
|
-
logger.error("Process does not exist.")
|
|
30
|
-
except Exception as e:
|
|
31
|
-
logger.error(f"An error occurred while stopping the server: {e}")
|
|
32
|
-
else:
|
|
33
|
-
logger.debug("No process to stop.")
|
|
15
|
+
self.process = None
|
|
16
|
+
self.last_reload = time.time()
|
|
34
17
|
|
|
35
18
|
def reload(self):
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
19
|
+
# Kill all existing processes with the same command
|
|
20
|
+
current_cmd = [sys.executable, *sys.argv]
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
# Find and kill existing processes
|
|
24
|
+
for proc in subprocess.Popen(["ps", "aux"], stdout=subprocess.PIPE).communicate()[0].decode().splitlines():
|
|
25
|
+
if all(str(arg) in proc for arg in current_cmd):
|
|
26
|
+
pid = int(proc.split()[1])
|
|
27
|
+
try:
|
|
28
|
+
os.kill(pid, signal.SIGKILL) # NOSONAR
|
|
29
|
+
logger.debug(f"Killed process with PID {pid}")
|
|
30
|
+
except ProcessLookupError:
|
|
31
|
+
pass
|
|
32
|
+
|
|
33
|
+
# Start new process
|
|
34
|
+
self.process = subprocess.Popen(current_cmd)
|
|
35
|
+
self.last_reload = time.time()
|
|
36
|
+
logger.debug("Server reloaded successfully")
|
|
37
|
+
|
|
38
|
+
except Exception as e:
|
|
39
|
+
logger.error(f"Reload failed: {e}")
|
|
47
40
|
|
|
48
41
|
def on_modified(self, event) -> None:
|
|
49
|
-
"""
|
|
50
|
-
This function is a callback that will start a new server on every even change
|
|
51
|
-
|
|
52
|
-
:param event FSEvent: a data structure with info about the events
|
|
53
|
-
"""
|
|
54
|
-
|
|
55
|
-
# Avoid reloading multiple times when watchdog detects multiple events
|
|
56
42
|
if time.time() - self.last_reload < 0.5:
|
|
57
43
|
return
|
|
58
44
|
|
|
59
|
-
time.sleep(0.2) #
|
|
45
|
+
time.sleep(0.2) # Ensure file is written
|
|
60
46
|
self.reload()
|
hypern/routing/route.py
CHANGED
|
@@ -188,7 +188,7 @@ class Route:
|
|
|
188
188
|
docs["responses"] = {
|
|
189
189
|
"200": {
|
|
190
190
|
"description": "Successful response",
|
|
191
|
-
"content": {"application/json": {"schema": response_type.
|
|
191
|
+
"content": {"application/json": {"schema": pydantic_to_swagger(response_type).get(response_type.__name__)}},
|
|
192
192
|
}
|
|
193
193
|
}
|
|
194
194
|
|
hypern/ws/__init__.py
ADDED
hypern/ws/channel.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import Any, Awaitable, Callable, Dict, Set
|
|
3
|
+
|
|
4
|
+
from hypern.hypern import WebSocketSession
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@dataclass
|
|
8
|
+
class Channel:
|
|
9
|
+
name: str
|
|
10
|
+
subscribers: Set[WebSocketSession] = field(default_factory=set)
|
|
11
|
+
handlers: Dict[str, Callable[[WebSocketSession, Any], Awaitable[None]]] = field(default_factory=dict)
|
|
12
|
+
|
|
13
|
+
def publish(self, event: str, data: Any, publisher: WebSocketSession = None):
|
|
14
|
+
"""Publish an event to all subscribers except the publisher"""
|
|
15
|
+
for subscriber in self.subscribers:
|
|
16
|
+
if subscriber != publisher:
|
|
17
|
+
subscriber.send({"channel": self.name, "event": event, "data": data})
|
|
18
|
+
|
|
19
|
+
def handle_event(self, event: str, session: WebSocketSession, data: Any):
|
|
20
|
+
"""Handle an event on this channel"""
|
|
21
|
+
if event in self.handlers:
|
|
22
|
+
self.handlers[event](session, data)
|
|
23
|
+
|
|
24
|
+
def add_subscriber(self, subscriber: WebSocketSession):
|
|
25
|
+
"""Add a subscriber to the channel"""
|
|
26
|
+
self.subscribers.add(subscriber)
|
|
27
|
+
|
|
28
|
+
def remove_subscriber(self, subscriber: WebSocketSession):
|
|
29
|
+
"""Remove a subscriber from the channel"""
|
|
30
|
+
self.subscribers.discard(subscriber)
|
|
31
|
+
|
|
32
|
+
def on(self, event: str):
|
|
33
|
+
"""Decorator for registering event handlers"""
|
|
34
|
+
|
|
35
|
+
def decorator(handler: Callable[[WebSocketSession, Any], Awaitable[None]]):
|
|
36
|
+
self.handlers[event] = handler
|
|
37
|
+
return handler
|
|
38
|
+
|
|
39
|
+
return decorator
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class ChannelManager:
|
|
43
|
+
def __init__(self):
|
|
44
|
+
self.channels: Dict[str, Channel] = {}
|
|
45
|
+
self.client_channels: Dict[WebSocketSession, Set[str]] = {}
|
|
46
|
+
|
|
47
|
+
def create_channel(self, channel_name: str) -> Channel:
|
|
48
|
+
"""Create a new channel if it doesn't exist"""
|
|
49
|
+
if channel_name not in self.channels:
|
|
50
|
+
self.channels[channel_name] = Channel(channel_name)
|
|
51
|
+
return self.channels[channel_name]
|
|
52
|
+
|
|
53
|
+
def get_channel(self, channel_name: str) -> Channel:
|
|
54
|
+
"""Get a channel by name"""
|
|
55
|
+
return self.channels.get(channel_name)
|
|
56
|
+
|
|
57
|
+
def subscribe(self, client: WebSocketSession, channel_name: str):
|
|
58
|
+
"""Subscribe a client to a channel"""
|
|
59
|
+
channel = self.create_channel(channel_name)
|
|
60
|
+
channel.add_subscriber(client)
|
|
61
|
+
|
|
62
|
+
if client not in self.client_channels:
|
|
63
|
+
self.client_channels[client] = set()
|
|
64
|
+
self.client_channels[client].add(channel_name)
|
|
65
|
+
|
|
66
|
+
def unsubscribe(self, client: WebSocketSession, channel_name: str):
|
|
67
|
+
"""Unsubscribe a client from a channel"""
|
|
68
|
+
channel = self.get_channel(channel_name)
|
|
69
|
+
if channel:
|
|
70
|
+
channel.remove_subscriber(client)
|
|
71
|
+
if client in self.client_channels:
|
|
72
|
+
self.client_channels[client].discard(channel_name)
|
|
73
|
+
|
|
74
|
+
def unsubscribe_all(self, client: WebSocketSession):
|
|
75
|
+
"""Unsubscribe a client from all channels"""
|
|
76
|
+
if client in self.client_channels:
|
|
77
|
+
channels = self.client_channels[client].copy()
|
|
78
|
+
for channel_name in channels:
|
|
79
|
+
self.unsubscribe(client, channel_name)
|
|
80
|
+
del self.client_channels[client]
|
hypern/ws/heartbeat.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from time import time
|
|
4
|
+
from typing import Dict
|
|
5
|
+
|
|
6
|
+
from hypern.hypern import WebSocketSession
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@dataclass
|
|
10
|
+
class HeartbeatConfig:
|
|
11
|
+
ping_interval: float = 30.0 # Send ping every 30 seconds
|
|
12
|
+
ping_timeout: float = 10.0 # Wait 10 seconds for pong response
|
|
13
|
+
max_missed_pings: int = 2 # Disconnect after 2 missed pings
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class HeartbeatManager:
|
|
17
|
+
def __init__(self, config: HeartbeatConfig = None):
|
|
18
|
+
self.config = config or HeartbeatConfig()
|
|
19
|
+
self.active_sessions: Dict[WebSocketSession, float] = {}
|
|
20
|
+
self.ping_tasks: Dict[WebSocketSession, asyncio.Task] = {}
|
|
21
|
+
self.missed_pings: Dict[WebSocketSession, int] = {}
|
|
22
|
+
|
|
23
|
+
async def start_heartbeat(self, session: WebSocketSession):
|
|
24
|
+
"""Start heartbeat monitoring for a session"""
|
|
25
|
+
self.active_sessions[session] = time()
|
|
26
|
+
self.missed_pings[session] = 0
|
|
27
|
+
self.ping_tasks[session] = asyncio.create_task(self._heartbeat_loop(session))
|
|
28
|
+
|
|
29
|
+
async def stop_heartbeat(self, session: WebSocketSession):
|
|
30
|
+
"""Stop heartbeat monitoring for a session"""
|
|
31
|
+
if session in self.ping_tasks:
|
|
32
|
+
self.ping_tasks[session].cancel()
|
|
33
|
+
del self.ping_tasks[session]
|
|
34
|
+
self.active_sessions.pop(session, None)
|
|
35
|
+
self.missed_pings.pop(session, None)
|
|
36
|
+
|
|
37
|
+
async def handle_pong(self, session: WebSocketSession):
|
|
38
|
+
"""Handle pong response from client"""
|
|
39
|
+
if session in self.active_sessions:
|
|
40
|
+
self.active_sessions[session] = time()
|
|
41
|
+
self.missed_pings[session] = 0
|
|
42
|
+
|
|
43
|
+
async def _heartbeat_loop(self, session: WebSocketSession):
|
|
44
|
+
"""Main heartbeat loop for a session"""
|
|
45
|
+
try:
|
|
46
|
+
while True:
|
|
47
|
+
await asyncio.sleep(self.config.ping_interval)
|
|
48
|
+
|
|
49
|
+
if session not in self.active_sessions:
|
|
50
|
+
break
|
|
51
|
+
|
|
52
|
+
# Send ping frame
|
|
53
|
+
try:
|
|
54
|
+
await session.ping()
|
|
55
|
+
last_pong = self.active_sessions[session]
|
|
56
|
+
|
|
57
|
+
# Wait for pong timeout
|
|
58
|
+
await asyncio.sleep(self.config.ping_timeout)
|
|
59
|
+
|
|
60
|
+
# Check if we received a pong
|
|
61
|
+
if self.active_sessions[session] == last_pong:
|
|
62
|
+
self.missed_pings[session] += 1
|
|
63
|
+
|
|
64
|
+
# Check if we exceeded max missed pings
|
|
65
|
+
if self.missed_pings[session] >= self.config.max_missed_pings:
|
|
66
|
+
await session.close(1001, "Connection timeout")
|
|
67
|
+
break
|
|
68
|
+
|
|
69
|
+
except Exception as e:
|
|
70
|
+
await session.close(1001, f"Heartbeat failed: {str(e)}")
|
|
71
|
+
break
|
|
72
|
+
|
|
73
|
+
finally:
|
|
74
|
+
await self.stop_heartbeat(session)
|
hypern/ws/room.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import Dict, Set
|
|
3
|
+
|
|
4
|
+
from hypern.hypern import WebSocketSession
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@dataclass
|
|
8
|
+
class Room:
|
|
9
|
+
name: str
|
|
10
|
+
clients: Set[WebSocketSession] = field(default_factory=set)
|
|
11
|
+
|
|
12
|
+
def broadcast(self, message: str, exclude: WebSocketSession = None):
|
|
13
|
+
"""Broadcast message to all clients in the room except excluded one"""
|
|
14
|
+
for client in self.clients:
|
|
15
|
+
if client != exclude:
|
|
16
|
+
client.send(message)
|
|
17
|
+
|
|
18
|
+
def add_client(self, client: WebSocketSession):
|
|
19
|
+
"""Add a client to the room"""
|
|
20
|
+
self.clients.add(client)
|
|
21
|
+
|
|
22
|
+
def remove_client(self, client: WebSocketSession):
|
|
23
|
+
"""Remove a client from the room"""
|
|
24
|
+
self.clients.discard(client)
|
|
25
|
+
|
|
26
|
+
@property
|
|
27
|
+
def client_count(self) -> int:
|
|
28
|
+
return len(self.clients)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class RoomManager:
|
|
32
|
+
def __init__(self):
|
|
33
|
+
self.rooms: Dict[str, Room] = {}
|
|
34
|
+
self.client_rooms: Dict[WebSocketSession, Set[str]] = {}
|
|
35
|
+
|
|
36
|
+
def create_room(self, room_name: str) -> Room:
|
|
37
|
+
"""Create a new room if it doesn't exist"""
|
|
38
|
+
if room_name not in self.rooms:
|
|
39
|
+
self.rooms[room_name] = Room(room_name)
|
|
40
|
+
return self.rooms[room_name]
|
|
41
|
+
|
|
42
|
+
def get_room(self, room_name: str) -> Room:
|
|
43
|
+
"""Get a room by name"""
|
|
44
|
+
return self.rooms.get(room_name)
|
|
45
|
+
|
|
46
|
+
def join_room(self, client: WebSocketSession, room_name: str):
|
|
47
|
+
"""Add a client to a room"""
|
|
48
|
+
room = self.create_room(room_name)
|
|
49
|
+
room.add_client(client)
|
|
50
|
+
|
|
51
|
+
if client not in self.client_rooms:
|
|
52
|
+
self.client_rooms[client] = set()
|
|
53
|
+
self.client_rooms[client].add(room_name)
|
|
54
|
+
|
|
55
|
+
room.broadcast(f"Client joined room: {room_name}", exclude=client)
|
|
56
|
+
|
|
57
|
+
def leave_room(self, client: WebSocketSession, room_name: str):
|
|
58
|
+
"""Remove a client from a room"""
|
|
59
|
+
room = self.get_room(room_name)
|
|
60
|
+
if room:
|
|
61
|
+
room.remove_client(client)
|
|
62
|
+
if client in self.client_rooms:
|
|
63
|
+
self.client_rooms[client].discard(room_name)
|
|
64
|
+
|
|
65
|
+
room.broadcast(f"Client left room: {room_name}", exclude=client)
|
|
66
|
+
|
|
67
|
+
if room.client_count == 0:
|
|
68
|
+
del self.rooms[room_name]
|
|
69
|
+
|
|
70
|
+
def leave_all_rooms(self, client: WebSocketSession):
|
|
71
|
+
"""Remove a client from all rooms"""
|
|
72
|
+
if client in self.client_rooms:
|
|
73
|
+
rooms = self.client_rooms[client].copy()
|
|
74
|
+
for room_name in rooms:
|
|
75
|
+
self.leave_room(client, room_name)
|
|
76
|
+
del self.client_rooms[client]
|
hypern/ws/route.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from typing import Callable, Optional
|
|
2
|
+
|
|
3
|
+
from hypern.hypern import WebsocketRoute as WebsocketRouteInternal, WebSocketSession
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class WebsocketRoute:
|
|
7
|
+
def __init__(self) -> None:
|
|
8
|
+
self.routes = []
|
|
9
|
+
self._disconnect_handler: Optional[Callable] = None
|
|
10
|
+
|
|
11
|
+
def on(self, path):
|
|
12
|
+
def wrapper(func):
|
|
13
|
+
self.routes.append(WebsocketRouteInternal(path, func))
|
|
14
|
+
return func
|
|
15
|
+
|
|
16
|
+
return wrapper
|
|
17
|
+
|
|
18
|
+
def on_disconnect(self, func):
|
|
19
|
+
"""Register a disconnect handler"""
|
|
20
|
+
self._disconnect_handler = func
|
|
21
|
+
return func
|
|
22
|
+
|
|
23
|
+
def handle_disconnect(self, session: WebSocketSession):
|
|
24
|
+
"""Internal method to handle disconnection"""
|
|
25
|
+
if self._disconnect_handler:
|
|
26
|
+
return self._disconnect_handler(session)
|
hypern/ws.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from .hypern import WebsocketRoute as WebsocketRouteInternal, WebSocketSession
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class WebsocketRoute:
|
|
5
|
+
def __init__(self) -> None:
|
|
6
|
+
self.routes = []
|
|
7
|
+
|
|
8
|
+
def on(self, path):
|
|
9
|
+
def wrapper(func):
|
|
10
|
+
self.routes.append(WebsocketRouteInternal(path, func))
|
|
11
|
+
return func
|
|
12
|
+
|
|
13
|
+
return wrapper
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
__all__ = ["WebsocketRoute", "WebSocketSession"]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: hypern
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.3.1
|
|
4
4
|
Classifier: Programming Language :: Rust
|
|
5
5
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
6
6
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
@@ -1,26 +1,33 @@
|
|
|
1
|
-
hypern-0.
|
|
2
|
-
hypern-0.
|
|
3
|
-
hypern-0.
|
|
4
|
-
hypern/application.py,sha256=
|
|
1
|
+
hypern-0.3.1.dist-info/METADATA,sha256=4Wi8nT7rpmfnz3LxOF1imQOsFLyOZIpz-YbmNkzy85I,3658
|
|
2
|
+
hypern-0.3.1.dist-info/WHEEL,sha256=9Ph2vvsZnBqwGITCasHTEa65w59hPsnpJHGUB2wTGtA,125
|
|
3
|
+
hypern-0.3.1.dist-info/licenses/LICENSE,sha256=VdbaK2hSaaD-LUjtDIlEbeZVmvLGK7BEQvltP3mv-cY,1304
|
|
4
|
+
hypern/application.py,sha256=yXrKhVKwCQDweHoZPFyWr0ws7nmCOPp6H3aLZLbD7sw,13923
|
|
5
|
+
hypern/middleware/security.py,sha256=d_cfu58lUeVY7qk6O4GNvM1qIj64IO0Jd9DjcWB899s,7094
|
|
5
6
|
hypern/middleware/__init__.py,sha256=YpgxL7GQkzZM91VCNxHT2xmTa1R1b_BUS8n3tZ2b1Ys,268
|
|
6
7
|
hypern/middleware/cors.py,sha256=SfG-3vAS-4MPXqsIsegNwDx9mqC9lvgUc3RuYzN6HNg,1643
|
|
7
8
|
hypern/middleware/base.py,sha256=Llcg9wglcumvY4BqaTfrX1OOZDqns4wb34wGF55EXcI,523
|
|
8
|
-
hypern/middleware/limit.py,sha256=
|
|
9
|
+
hypern/middleware/limit.py,sha256=9EA79q2GgyZkRynMJj8rfgumEhJKbAvyi3jII6A_BX8,7976
|
|
9
10
|
hypern/middleware/i18n.py,sha256=s82nQo6kKClZ0s3G3jsy87VRfwxpBDbASB_ErjRL3O0,15
|
|
10
11
|
hypern/routing/dispatcher.py,sha256=aujogCVTz2mYtZRkEtmpdlxXA9l6X4D072qOiIg-a_Q,2301
|
|
11
12
|
hypern/routing/__init__.py,sha256=MtyPYRHYMWIiCReZsUjJH93PvluotCbPU3RnWFQQmrA,97
|
|
12
|
-
hypern/routing/route.py,sha256=
|
|
13
|
+
hypern/routing/route.py,sha256=AZc4Qo5iy74q0_U8E5X6RIsudznHZYEZR8MdHRoCmB4,10119
|
|
13
14
|
hypern/routing/endpoint.py,sha256=AWLHLQNlSGR8IGU6xM0RP-1kP06OJQzqpbXKSiZEzEo,996
|
|
14
15
|
hypern/routing/parser.py,sha256=4BFn8MAmSX1QplwBXEEgbabYiNUAllYf2svPZoPPD5k,3454
|
|
15
16
|
hypern/response/__init__.py,sha256=9z99BDgASpG404GK8LGkOsXgac0wFwH_cQOTI5Ju-1U,223
|
|
16
17
|
hypern/response/response.py,sha256=s6KqscjA7jl8RaZh5gZQgVksPtHKzsRrQywDcEjVSR4,4448
|
|
18
|
+
hypern/ws/channel.py,sha256=TWaqowshz3WZRdg7ApBdFtVAlZW0OsVqoOHoFtXaZrk,3103
|
|
19
|
+
hypern/ws/__init__.py,sha256=iUYERiHxs7HCmHj7CS5iG2XewKAJgW7w8kqxSORu_N8,127
|
|
20
|
+
hypern/ws/room.py,sha256=9u6gLq1WY4hn9_yEniavaY0yetFbQzgya_g6VPN-cgg,2522
|
|
21
|
+
hypern/ws/route.py,sha256=8YPTf1fsF46oCyqoXePC3mEbhjNVFDp8rqyWSsBHhBA,777
|
|
22
|
+
hypern/ws/heartbeat.py,sha256=PIrYWnQn8VxQjfDXDmtMymMl-wN5MQ_Q6oHfAjPWznU,2787
|
|
17
23
|
hypern/i18n/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
|
-
hypern/
|
|
24
|
+
hypern/ws.py,sha256=XImBSYW2SM9s8wprnis8c2uRPhYxIXuDhwgRLLLDU38,378
|
|
25
|
+
hypern/processpool.py,sha256=mtP6qYSKTUgHOQEqtw6yhBvZ4JMAPw6uLxuyd-96CYI,3908
|
|
19
26
|
hypern/security.py,sha256=dOWdNHA8SoUhlbu6Z2MapOwT9tAhlsierrTBBiEV5-A,1729
|
|
20
|
-
hypern/hypern.pyi,sha256=
|
|
27
|
+
hypern/hypern.pyi,sha256=u0hRm3xmoecGvxb8G3oCLGROa9NAJi_C16CypEKlmPk,7735
|
|
21
28
|
hypern/logging/logger.py,sha256=62Qg4YAi_JDGV72Rd6R58jixqZk7anRqHbtnuBlkrwA,3174
|
|
22
29
|
hypern/logging/__init__.py,sha256=lzYSz0382eIM3CvP0sZ6RbEEwYZwfeJEJh9cxQA6Rws,49
|
|
23
|
-
hypern/reload.py,sha256=
|
|
30
|
+
hypern/reload.py,sha256=Y2pjHHh8Zv0z-pRkBPLezKiowRblb1ZJ7yI_oE55D4U,1523
|
|
24
31
|
hypern/exceptions.py,sha256=wpTSTzw32Sb6bY9YxCDM7W_-Ww6u6pB1GKNbFf-1oj0,2331
|
|
25
32
|
hypern/openapi/schemas.py,sha256=nmcmNYvKmjNkwFqi_3qpXVi1ukanNxMVay68bOLTrx8,1624
|
|
26
33
|
hypern/openapi/__init__.py,sha256=oJ0HM9yAgSN00mBC_fRgV2irlGugrhvIpiveuDMv8PM,136
|
|
@@ -38,6 +45,7 @@ hypern/caching/base/__init__.py,sha256=8S6QSax1FVGzfve_fYIzPmhrpvxXG8WYoc7M9v25b
|
|
|
38
45
|
hypern/caching/base/backend.py,sha256=RHOq9Y_FzTlYHKpTqAPxUYdJDOSYBXx-mxaDitYtP70,65
|
|
39
46
|
hypern/caching/redis_backend.py,sha256=3FYzKCW0_OvoIMl-e9pARRGOUvRYGG7hGlaXEB18vnY,67
|
|
40
47
|
hypern/caching/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
48
|
+
hypern/caching/strategies.py,sha256=CNpcwN719ETqxzkWWX-tQvVb4j2xlGrI667QEca3mJM,4084
|
|
41
49
|
hypern/caching/custom_key_maker.py,sha256=DxJv1RV--5IdFCCFYawVExwMQ097hZ5V6_nHDYIQIZI,383
|
|
42
50
|
hypern/caching/cache_manager.py,sha256=EBx89xNj38bYpQ9jf4MoQ3zNkfoCGYInxvvPtzsW5Xo,1997
|
|
43
51
|
hypern/args_parser.py,sha256=kJQtzw2xZrumDBzLQaZyEejAT02rUetPYCrmOpjzxWY,1731
|
|
@@ -64,5 +72,5 @@ hypern/db/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
64
72
|
hypern/datastructures.py,sha256=7Nb_fOxmfO8CT7_v_-RhmXg54IhioXGZSp405IzJLh4,857
|
|
65
73
|
hypern/config.py,sha256=v9KLL6snReAETKiVb8x3KOFrXpYd8-Io5tM7eruR85U,4781
|
|
66
74
|
hypern/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
67
|
-
hypern/hypern.cpython-310-i386-linux-gnu.so,sha256=
|
|
68
|
-
hypern-0.
|
|
75
|
+
hypern/hypern.cpython-310-i386-linux-gnu.so,sha256=3ZgjchXZFVSDmWvCSuALo7zfyCZJvX0JEVDW28rpxFI,6504596
|
|
76
|
+
hypern-0.3.1.dist-info/RECORD,,
|
|
File without changes
|