hypern 0.3.11__cp312-cp312-musllinux_1_2_i686.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hypern/__init__.py +24 -0
- hypern/application.py +495 -0
- hypern/args_parser.py +73 -0
- hypern/auth/__init__.py +0 -0
- hypern/auth/authorization.py +2 -0
- hypern/background.py +4 -0
- hypern/caching/__init__.py +6 -0
- hypern/caching/backend.py +31 -0
- hypern/caching/redis_backend.py +201 -0
- hypern/caching/strategies.py +208 -0
- hypern/cli/__init__.py +0 -0
- hypern/cli/commands.py +0 -0
- hypern/config.py +246 -0
- hypern/database/__init__.py +0 -0
- hypern/database/sqlalchemy/__init__.py +4 -0
- hypern/database/sqlalchemy/config.py +66 -0
- hypern/database/sqlalchemy/repository.py +290 -0
- hypern/database/sqlx/__init__.py +36 -0
- hypern/database/sqlx/field.py +246 -0
- hypern/database/sqlx/migrate.py +263 -0
- hypern/database/sqlx/model.py +117 -0
- hypern/database/sqlx/query.py +904 -0
- hypern/datastructures.py +40 -0
- hypern/enum.py +13 -0
- hypern/exceptions/__init__.py +34 -0
- hypern/exceptions/base.py +62 -0
- hypern/exceptions/common.py +12 -0
- hypern/exceptions/errors.py +15 -0
- hypern/exceptions/formatters.py +56 -0
- hypern/exceptions/http.py +76 -0
- hypern/gateway/__init__.py +6 -0
- hypern/gateway/aggregator.py +32 -0
- hypern/gateway/gateway.py +41 -0
- hypern/gateway/proxy.py +60 -0
- hypern/gateway/service.py +52 -0
- hypern/hypern.cpython-312-i386-linux-musl.so +0 -0
- hypern/hypern.pyi +333 -0
- hypern/i18n/__init__.py +0 -0
- hypern/logging/__init__.py +3 -0
- hypern/logging/logger.py +82 -0
- hypern/middleware/__init__.py +17 -0
- hypern/middleware/base.py +13 -0
- hypern/middleware/cache.py +177 -0
- hypern/middleware/compress.py +78 -0
- hypern/middleware/cors.py +41 -0
- hypern/middleware/i18n.py +1 -0
- hypern/middleware/limit.py +177 -0
- hypern/middleware/security.py +184 -0
- hypern/openapi/__init__.py +5 -0
- hypern/openapi/schemas.py +51 -0
- hypern/openapi/swagger.py +3 -0
- hypern/processpool.py +139 -0
- hypern/py.typed +0 -0
- hypern/reload.py +46 -0
- hypern/response/__init__.py +3 -0
- hypern/response/response.py +142 -0
- hypern/routing/__init__.py +5 -0
- hypern/routing/dispatcher.py +70 -0
- hypern/routing/endpoint.py +30 -0
- hypern/routing/parser.py +98 -0
- hypern/routing/queue.py +175 -0
- hypern/routing/route.py +280 -0
- hypern/scheduler.py +5 -0
- hypern/worker.py +274 -0
- hypern/ws/__init__.py +4 -0
- hypern/ws/channel.py +80 -0
- hypern/ws/heartbeat.py +74 -0
- hypern/ws/room.py +76 -0
- hypern/ws/route.py +26 -0
- hypern-0.3.11.dist-info/METADATA +134 -0
- hypern-0.3.11.dist-info/RECORD +74 -0
- hypern-0.3.11.dist-info/WHEEL +4 -0
- hypern-0.3.11.dist-info/licenses/LICENSE +24 -0
- hypern.libs/libgcc_s-b5472b99.so.1 +0 -0
hypern/hypern.pyi
ADDED
@@ -0,0 +1,333 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from dataclasses import dataclass
|
4
|
+
from typing import Any, Callable, Dict, List, Tuple
|
5
|
+
from enum import Enum
|
6
|
+
|
7
|
+
@dataclass
|
8
|
+
class BaseSchemaGenerator:
|
9
|
+
remove_converter: Callable[[str], str]
|
10
|
+
parse_docstring: Callable[..., str]
|
11
|
+
|
12
|
+
@dataclass
|
13
|
+
class SwaggerUI:
|
14
|
+
title: str
|
15
|
+
openapi_url: str
|
16
|
+
|
17
|
+
def get_html_content(self) -> str: ...
|
18
|
+
|
19
|
+
@dataclass
|
20
|
+
class BackgroundTask:
|
21
|
+
"""
|
22
|
+
A task to be executed in the background
|
23
|
+
id: str: The task ID
|
24
|
+
function: Callable[..., Any]: The function to be executed
|
25
|
+
args: List | Tuple: The arguments to be passed to the function
|
26
|
+
kwargs: Dict[str, Any]: The keyword arguments to be passed to the function
|
27
|
+
timeout_secs: int: The maximum time in seconds the task is allowed to run
|
28
|
+
cancelled: bool: Whether the task is cancelled
|
29
|
+
|
30
|
+
**Note**: function is currently running with sync mode, so it should be a sync function
|
31
|
+
"""
|
32
|
+
|
33
|
+
id: str
|
34
|
+
function: Callable[..., Any]
|
35
|
+
args: List | Tuple
|
36
|
+
kwargs: Dict[str, Any]
|
37
|
+
timeout_secs: int
|
38
|
+
cancelled: bool
|
39
|
+
|
40
|
+
def get_id(self) -> str:
|
41
|
+
"""
|
42
|
+
Get the task ID
|
43
|
+
"""
|
44
|
+
pass
|
45
|
+
|
46
|
+
def cancel(self) -> None:
|
47
|
+
"""
|
48
|
+
Cancel the task
|
49
|
+
"""
|
50
|
+
pass
|
51
|
+
|
52
|
+
def is_cancelled(self) -> bool:
|
53
|
+
"""
|
54
|
+
Check if the task is cancelled
|
55
|
+
"""
|
56
|
+
pass
|
57
|
+
|
58
|
+
def execute(self) -> Any:
|
59
|
+
"""
|
60
|
+
Execute the task
|
61
|
+
"""
|
62
|
+
pass
|
63
|
+
|
64
|
+
@dataclass
|
65
|
+
class BackgroundTasks:
|
66
|
+
"""
|
67
|
+
A collection of tasks to be executed in the background
|
68
|
+
|
69
|
+
**Note**: Only set tasks. pool, sender, receiver are set by the framework
|
70
|
+
"""
|
71
|
+
|
72
|
+
def add_task(self, task: BackgroundTask) -> str:
|
73
|
+
"""
|
74
|
+
Add a task to the collection
|
75
|
+
"""
|
76
|
+
pass
|
77
|
+
|
78
|
+
def cancel_task(self, task_id: str) -> bool:
|
79
|
+
"""
|
80
|
+
Cancel a task in the collection
|
81
|
+
"""
|
82
|
+
pass
|
83
|
+
|
84
|
+
def execute_all(self) -> None:
|
85
|
+
"""
|
86
|
+
Execute all tasks in the collection
|
87
|
+
"""
|
88
|
+
pass
|
89
|
+
|
90
|
+
def execute_task(self, task_id: str) -> None:
|
91
|
+
"""
|
92
|
+
Execute a task in the collection
|
93
|
+
"""
|
94
|
+
pass
|
95
|
+
|
96
|
+
class Scheduler:
|
97
|
+
def add_job(
|
98
|
+
self,
|
99
|
+
job_type: str,
|
100
|
+
schedule_param: str,
|
101
|
+
task: Callable[..., Any],
|
102
|
+
timezone: str,
|
103
|
+
dependencies: List[str],
|
104
|
+
retry_policy: Tuple[int, int, bool] | None = None,
|
105
|
+
) -> str:
|
106
|
+
"""
|
107
|
+
Add a job to the scheduler
|
108
|
+
params:
|
109
|
+
job_type: str: The type of the job (e.g. "cron", "interval")
|
110
|
+
|
111
|
+
schedule_param: str: The schedule parameter of the job. interval in seconds for interval jobs, cron expression for cron jobs
|
112
|
+
|
113
|
+
Exmaple:
|
114
|
+
// sec min hour day of month month day of week year
|
115
|
+
expression = "0 30 9,12,15 1,15 May-Aug Mon,Wed,Fri 2018/2";
|
116
|
+
|
117
|
+
task: Callable[..., Any]: The task to be executed
|
118
|
+
|
119
|
+
timezone: str: The timezone of the job
|
120
|
+
|
121
|
+
dependencies: List[str]: The IDs of the jobs this job depends on
|
122
|
+
|
123
|
+
retry_policy: Tuple[int, int, bool] | None: The retry policy of the job. (max_retries, retry_delay_secs, exponential_backoff)
|
124
|
+
|
125
|
+
return:
|
126
|
+
str: The ID of the job
|
127
|
+
"""
|
128
|
+
pass
|
129
|
+
|
130
|
+
def remove_job(self, job_id: str) -> None:
|
131
|
+
"""
|
132
|
+
Remove a job from the scheduler
|
133
|
+
"""
|
134
|
+
pass
|
135
|
+
|
136
|
+
def start(self) -> None:
|
137
|
+
"""
|
138
|
+
Start the scheduler
|
139
|
+
"""
|
140
|
+
pass
|
141
|
+
|
142
|
+
def stop(self) -> None:
|
143
|
+
"""
|
144
|
+
Stop the scheduler
|
145
|
+
"""
|
146
|
+
pass
|
147
|
+
|
148
|
+
def get_job_status(self, job_id: str) -> Tuple[float, float, List[str], int]:
|
149
|
+
"""
|
150
|
+
Get the status of a job
|
151
|
+
"""
|
152
|
+
pass
|
153
|
+
|
154
|
+
def get_next_run(self, job_id: str) -> float:
|
155
|
+
"""
|
156
|
+
Get the next run time of a job
|
157
|
+
"""
|
158
|
+
pass
|
159
|
+
|
160
|
+
@dataclass
|
161
|
+
class FunctionInfo:
|
162
|
+
"""
|
163
|
+
The function info object passed to the route handler.
|
164
|
+
|
165
|
+
Attributes:
|
166
|
+
handler (Callable): The function to be called
|
167
|
+
is_async (bool): Whether the function is async or not
|
168
|
+
"""
|
169
|
+
|
170
|
+
handler: Callable
|
171
|
+
is_async: bool
|
172
|
+
|
173
|
+
@dataclass
|
174
|
+
class Server:
|
175
|
+
router: Router
|
176
|
+
websocket_router: Any
|
177
|
+
startup_handler: Any
|
178
|
+
shutdown_handler: Any
|
179
|
+
|
180
|
+
def add_route(self, route: Route) -> None: ...
|
181
|
+
def set_router(self, router: Router) -> None: ...
|
182
|
+
def set_websocket_router(self, websocket_router: WebsocketRouter) -> None: ...
|
183
|
+
def start(self, socket: SocketHeld, worker: int, max_blocking_threads: int) -> None: ...
|
184
|
+
def set_before_hooks(self, hooks: List[FunctionInfo]) -> None: ...
|
185
|
+
def set_after_hooks(self, hooks: List[FunctionInfo]) -> None: ...
|
186
|
+
def set_response_headers(self, headers: Dict[str, str]) -> None: ...
|
187
|
+
def set_startup_handler(self, on_startup: FunctionInfo) -> None: ...
|
188
|
+
def set_shutdown_handler(self, on_shutdown: FunctionInfo) -> None: ...
|
189
|
+
def set_database_config(self, config: DatabaseConfig) -> None: ...
|
190
|
+
def set_dependencies(self, dependencies: Dict[str, Any]) -> None: ...
|
191
|
+
def enable_http2(self) -> None: ...
|
192
|
+
|
193
|
+
class Route:
|
194
|
+
path: str
|
195
|
+
function: FunctionInfo
|
196
|
+
method: str
|
197
|
+
doc: str | None = None
|
198
|
+
|
199
|
+
def matches(self, path: str, method: str) -> str: ...
|
200
|
+
def clone_route(self) -> Route: ...
|
201
|
+
def update_path(self, new_path: str) -> None: ...
|
202
|
+
def update_method(self, new_method: str) -> None: ...
|
203
|
+
def is_valid(self) -> bool: ...
|
204
|
+
def get_path_parans(self) -> List[str]: ...
|
205
|
+
def has_parameters(self) -> bool: ...
|
206
|
+
def normalized_path(self) -> str: ...
|
207
|
+
def same_handler(self, other: Route) -> bool: ...
|
208
|
+
|
209
|
+
class Router:
|
210
|
+
routes: List[Route]
|
211
|
+
|
212
|
+
def add_route(self, route: Route) -> None: ...
|
213
|
+
def remove_route(self, path: str, method: str) -> bool: ...
|
214
|
+
def get_route(self, path: str, method) -> Route | None: ...
|
215
|
+
def get_routes_by_path(self, path: str) -> List[Route]: ...
|
216
|
+
def get_routes_by_method(self, method: str) -> List[Route]: ...
|
217
|
+
def extend_route(self, routes: List[Route]) -> None: ...
|
218
|
+
|
219
|
+
@dataclass
|
220
|
+
class SocketHeld:
|
221
|
+
socket: Any
|
222
|
+
|
223
|
+
@dataclass
|
224
|
+
class WebSocketSession:
|
225
|
+
sender: Callable[[str], None]
|
226
|
+
receiver: Callable[[], str]
|
227
|
+
is_closed: bool
|
228
|
+
|
229
|
+
def send(self, message: str) -> None: ...
|
230
|
+
|
231
|
+
@dataclass
|
232
|
+
class WebsocketRoute:
|
233
|
+
path: str
|
234
|
+
handler: Callable[[WebSocketSession], None]
|
235
|
+
|
236
|
+
@dataclass
|
237
|
+
class WebsocketRouter:
|
238
|
+
path: str
|
239
|
+
routes: List[WebsocketRoute]
|
240
|
+
|
241
|
+
def add_route(self, route: WebsocketRoute) -> None: ...
|
242
|
+
def remove_route(self, path: str) -> None: ...
|
243
|
+
def extend_route(self, route: WebsocketRoute) -> None: ...
|
244
|
+
def clear_routes(self) -> None: ...
|
245
|
+
def route_count(self) -> int: ...
|
246
|
+
|
247
|
+
@dataclass
|
248
|
+
class Header:
|
249
|
+
headers: Dict[str, str]
|
250
|
+
|
251
|
+
def get(self, key: str) -> str | None: ...
|
252
|
+
def set(self, key: str, value: str) -> None: ...
|
253
|
+
def append(self, key: str, value: str) -> None: ...
|
254
|
+
def update(self, headers: Dict[str, str]) -> None: ...
|
255
|
+
def get_headers(self) -> Dict[str, str]: ...
|
256
|
+
|
257
|
+
@dataclass
|
258
|
+
class Response:
|
259
|
+
status_code: int
|
260
|
+
response_type: str
|
261
|
+
headers: Header
|
262
|
+
description: str
|
263
|
+
file_path: str | None
|
264
|
+
context_id: str
|
265
|
+
|
266
|
+
@dataclass
|
267
|
+
class QueryParams:
|
268
|
+
queries: Dict[str, List[str]]
|
269
|
+
|
270
|
+
def to_dict(self) -> Dict[str, str]: ...
|
271
|
+
|
272
|
+
@dataclass
|
273
|
+
class UploadedFile:
|
274
|
+
name: str
|
275
|
+
content_type: str
|
276
|
+
path: str
|
277
|
+
size: int
|
278
|
+
content: bytes
|
279
|
+
file_name: str
|
280
|
+
|
281
|
+
@dataclass
|
282
|
+
class BodyData:
|
283
|
+
json: bytes
|
284
|
+
files: List[UploadedFile]
|
285
|
+
|
286
|
+
@dataclass
|
287
|
+
class Request:
|
288
|
+
path: str
|
289
|
+
query_params: QueryParams
|
290
|
+
headers: Header
|
291
|
+
path_params: Dict[str, str]
|
292
|
+
body: BodyData
|
293
|
+
method: str
|
294
|
+
remote_addr: str
|
295
|
+
timestamp: float
|
296
|
+
context_id: str
|
297
|
+
|
298
|
+
def json(self) -> Dict[str, Any]: ...
|
299
|
+
def set_body(self, body: BodyData) -> None: ...
|
300
|
+
|
301
|
+
@dataclass
|
302
|
+
class MiddlewareConfig:
|
303
|
+
priority: int = 0
|
304
|
+
is_conditional: bool = True
|
305
|
+
|
306
|
+
@staticmethod
|
307
|
+
def default(self) -> MiddlewareConfig: ...
|
308
|
+
|
309
|
+
class DatabaseType(Enum):
|
310
|
+
Postgres: str
|
311
|
+
MySQL: str
|
312
|
+
SQLite: str
|
313
|
+
|
314
|
+
@dataclass
|
315
|
+
class DatabaseConfig:
|
316
|
+
driver: DatabaseType
|
317
|
+
url: str
|
318
|
+
max_connections: int = 10
|
319
|
+
min_connections: int = 1
|
320
|
+
idle_timeout: int = 30
|
321
|
+
|
322
|
+
options: Dict[str, Any] = {}
|
323
|
+
|
324
|
+
@dataclass
|
325
|
+
class DatabaseTransaction:
|
326
|
+
def execute(self, query: str, params: List[Any]) -> int: ...
|
327
|
+
def fetch_all(self, query: str, params: List[Any]) -> List[Dict[str, Any]]: ...
|
328
|
+
def stream_data(self, query: str, params: List[Any], chunk_size: int) -> Dict[str, Any]: ...
|
329
|
+
def bulk_change(self, query: str, params: List[List[Any]], batch_size: int) -> int | None: ...
|
330
|
+
def commit(self) -> None: ...
|
331
|
+
def rollback(self) -> None: ...
|
332
|
+
|
333
|
+
def get_session_database(context_id: str) -> DatabaseTransaction: ...
|
hypern/i18n/__init__.py
ADDED
File without changes
|
hypern/logging/logger.py
ADDED
@@ -0,0 +1,82 @@
|
|
1
|
+
# -*- coding: utf-8 -*-
|
2
|
+
import logging
|
3
|
+
import sys
|
4
|
+
from copy import copy
|
5
|
+
from datetime import datetime, timezone
|
6
|
+
from typing import Literal, Optional
|
7
|
+
|
8
|
+
import click
|
9
|
+
|
10
|
+
TRACE_LOG_LEVEL = 5
|
11
|
+
|
12
|
+
|
13
|
+
class ColourizedFormatter(logging.Formatter):
|
14
|
+
level_name_colors = {
|
15
|
+
TRACE_LOG_LEVEL: lambda level_name: click.style(str(level_name), fg="blue"),
|
16
|
+
logging.DEBUG: lambda level_name: click.style(str(level_name), fg="cyan"),
|
17
|
+
logging.INFO: lambda level_name: click.style(str(level_name), fg="green"),
|
18
|
+
logging.WARNING: lambda level_name: click.style(str(level_name), fg="yellow"),
|
19
|
+
logging.ERROR: lambda level_name: click.style(str(level_name), fg="red"),
|
20
|
+
logging.CRITICAL: lambda level_name: click.style(str(level_name), fg="bright_red"),
|
21
|
+
}
|
22
|
+
|
23
|
+
def __init__(
|
24
|
+
self,
|
25
|
+
fmt: Optional[str] = None,
|
26
|
+
datefmt: Optional[str] = None,
|
27
|
+
style: Literal["%", "{", "$"] = "%",
|
28
|
+
use_colors: Optional[bool] = None,
|
29
|
+
):
|
30
|
+
if use_colors in (True, False):
|
31
|
+
self.use_colors = use_colors
|
32
|
+
else:
|
33
|
+
self.use_colors = sys.stdout.isatty()
|
34
|
+
super().__init__(fmt=fmt, datefmt=datefmt, style=style)
|
35
|
+
|
36
|
+
def color_level_name(self, level_name: str, level_no: int) -> str:
|
37
|
+
def default(level_name: str) -> str:
|
38
|
+
return str(level_name)
|
39
|
+
|
40
|
+
func = self.level_name_colors.get(level_no, default)
|
41
|
+
return func(level_name)
|
42
|
+
|
43
|
+
def should_use_colors(self) -> bool:
|
44
|
+
return True
|
45
|
+
|
46
|
+
def formatMessage(self, record: logging.LogRecord) -> str:
|
47
|
+
recordcopy = copy(record)
|
48
|
+
levelname = recordcopy.levelname
|
49
|
+
process = recordcopy.process
|
50
|
+
created = recordcopy.created
|
51
|
+
filename = recordcopy.filename
|
52
|
+
module = recordcopy.module
|
53
|
+
lineno = recordcopy.lineno
|
54
|
+
separator = " " * (5 - len(recordcopy.levelname))
|
55
|
+
if self.use_colors:
|
56
|
+
levelname = self.color_level_name(levelname, recordcopy.levelno)
|
57
|
+
if "color_message" in recordcopy.__dict__:
|
58
|
+
recordcopy.msg = recordcopy.__dict__["color_message"]
|
59
|
+
recordcopy.__dict__["message"] = recordcopy.getMessage()
|
60
|
+
recordcopy.__dict__["levelprefix"] = levelname + separator
|
61
|
+
recordcopy.__dict__["process"] = click.style(str(process), fg="blue")
|
62
|
+
recordcopy.__dict__["asctime"] = click.style(datetime.fromtimestamp(created, tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.%fZ"), fg=(101, 111, 104))
|
63
|
+
recordcopy.__dict__["filename"] = click.style(f"{module}/{filename}:{lineno}:", fg=(101, 111, 104))
|
64
|
+
return super().formatMessage(recordcopy)
|
65
|
+
|
66
|
+
|
67
|
+
class DefaultFormatter(ColourizedFormatter):
|
68
|
+
def should_use_colors(self) -> bool:
|
69
|
+
return sys.stderr.isatty()
|
70
|
+
|
71
|
+
|
72
|
+
def create_logger(name) -> logging.Logger:
|
73
|
+
logger = logging.getLogger(name)
|
74
|
+
logger.setLevel(logging.DEBUG)
|
75
|
+
formatter = DefaultFormatter(fmt="%(asctime)s %(levelprefix)s %(filename)s %(message)s", use_colors=True, datefmt="%Y-%m-%d %H:%M:%S")
|
76
|
+
handler = logging.StreamHandler()
|
77
|
+
handler.setFormatter(formatter)
|
78
|
+
logger.addHandler(handler)
|
79
|
+
return logger
|
80
|
+
|
81
|
+
|
82
|
+
logger = create_logger("hypern")
|
@@ -0,0 +1,17 @@
|
|
1
|
+
from .base import Middleware, MiddlewareConfig
|
2
|
+
from .cors import CORSMiddleware
|
3
|
+
from .limit import RateLimitMiddleware, StorageBackend, RedisBackend, InMemoryBackend
|
4
|
+
from .compress import CompressionMiddleware
|
5
|
+
from .cache import EdgeCacheMiddleware
|
6
|
+
|
7
|
+
__all__ = [
|
8
|
+
"Middleware",
|
9
|
+
"CORSMiddleware",
|
10
|
+
"RateLimitMiddleware",
|
11
|
+
"StorageBackend",
|
12
|
+
"RedisBackend",
|
13
|
+
"InMemoryBackend",
|
14
|
+
"CompressionMiddleware",
|
15
|
+
"EdgeCacheMiddleware",
|
16
|
+
"MiddlewareConfig",
|
17
|
+
]
|
@@ -0,0 +1,13 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
from hypern.hypern import MiddlewareConfig
|
3
|
+
|
4
|
+
|
5
|
+
class Middleware:
|
6
|
+
def __init__(self, config: Optional[MiddlewareConfig] = None):
|
7
|
+
self.config = config or MiddlewareConfig.default()
|
8
|
+
|
9
|
+
async def before_request(self, request):
|
10
|
+
return request
|
11
|
+
|
12
|
+
async def after_request(self, response):
|
13
|
+
return response
|
@@ -0,0 +1,177 @@
|
|
1
|
+
import hashlib
|
2
|
+
from datetime import datetime, timezone
|
3
|
+
from typing import Dict, List, Optional
|
4
|
+
|
5
|
+
from hypern.hypern import Header, MiddlewareConfig, Request, Response
|
6
|
+
|
7
|
+
from .base import Middleware
|
8
|
+
|
9
|
+
|
10
|
+
class CacheConfig:
|
11
|
+
"""
|
12
|
+
Configuration class for caching middleware.
|
13
|
+
|
14
|
+
Attributes:
|
15
|
+
max_age (int): The maximum age (in seconds) for the cache. Default is 3600 seconds (1 hour).
|
16
|
+
s_maxage (Optional[int]): The shared maximum age (in seconds) for the cache. Default is None.
|
17
|
+
stale_while_revalidate (Optional[int]): The time (in seconds) the cache can be used while revalidation is performed. Default is None.
|
18
|
+
stale_if_error (Optional[int]): The time (in seconds) the cache can be used if an error occurs during revalidation. Default is None.
|
19
|
+
vary_by (List[str]): List of headers to vary the cache by. Default is ['Accept', 'Accept-Encoding'].
|
20
|
+
cache_control (List[str]): List of cache control directives. Default is an empty list.
|
21
|
+
include_query_string (bool): Whether to include the query string in the cache key. Default is True.
|
22
|
+
exclude_paths (List[str]): List of paths to exclude from caching. Default is ['/admin', '/api/private'].
|
23
|
+
exclude_methods (List[str]): List of HTTP methods to exclude from caching. Default is ['POST', 'PUT', 'DELETE', 'PATCH'].
|
24
|
+
private_paths (List[str]): List of paths to be marked as private. Default is an empty list.
|
25
|
+
cache_by_headers (List[str]): List of headers to include in the cache key. Default is an empty list.
|
26
|
+
"""
|
27
|
+
|
28
|
+
def __init__(
|
29
|
+
self,
|
30
|
+
max_age: int = 3600, # 1 hour default
|
31
|
+
s_maxage: Optional[int] = None,
|
32
|
+
stale_while_revalidate: Optional[int] = None,
|
33
|
+
stale_if_error: Optional[int] = None,
|
34
|
+
vary_by: List[str] = None,
|
35
|
+
cache_control: List[str] = None,
|
36
|
+
include_query_string: bool = True,
|
37
|
+
exclude_paths: List[str] = None,
|
38
|
+
exclude_methods: List[str] = None,
|
39
|
+
private_paths: List[str] = None,
|
40
|
+
cache_by_headers: List[str] = None,
|
41
|
+
):
|
42
|
+
self.max_age = max_age
|
43
|
+
self.s_maxage = s_maxage
|
44
|
+
self.stale_while_revalidate = stale_while_revalidate
|
45
|
+
self.stale_if_error = stale_if_error
|
46
|
+
self.vary_by = vary_by or ["accept", "accept-encoding"]
|
47
|
+
self.cache_control = cache_control or []
|
48
|
+
self.include_query_string = include_query_string
|
49
|
+
self.exclude_paths = exclude_paths or ["/admin", "/api/private"]
|
50
|
+
self.exclude_methods = exclude_methods or ["POST", "PUT", "DELETE", "PATCH"]
|
51
|
+
self.private_paths = private_paths or []
|
52
|
+
self.cache_by_headers = cache_by_headers or []
|
53
|
+
|
54
|
+
|
55
|
+
class EdgeCacheMiddleware(Middleware):
|
56
|
+
"""
|
57
|
+
Middleware implementing edge caching strategies with support for:
|
58
|
+
- Cache-Control directives
|
59
|
+
- ETag generation
|
60
|
+
- Conditional requests (If-None-Match, If-Modified-Since)
|
61
|
+
- Vary header management
|
62
|
+
- CDN-specific headers
|
63
|
+
"""
|
64
|
+
|
65
|
+
def __init__(self, cache_config: CacheConfig | None = None, config: Optional[MiddlewareConfig] = None):
|
66
|
+
super().__init__(config)
|
67
|
+
self.cache_config = cache_config or CacheConfig()
|
68
|
+
self._etag_cache: Dict[str, str] = {}
|
69
|
+
self.request_context = {}
|
70
|
+
|
71
|
+
def _should_cache(self, request: Request, path: str) -> bool:
|
72
|
+
"""Determine if the request should be cached"""
|
73
|
+
if request.method in self.cache_config.exclude_methods:
|
74
|
+
return False
|
75
|
+
|
76
|
+
if any(excluded in path for excluded in self.cache_config.exclude_paths):
|
77
|
+
return False
|
78
|
+
|
79
|
+
return True
|
80
|
+
|
81
|
+
def _generate_cache_key(self, request: Request) -> str:
|
82
|
+
"""Generate a unique cache key based on request attributes"""
|
83
|
+
components = [request.method, request.path]
|
84
|
+
|
85
|
+
if self.cache_config.include_query_string:
|
86
|
+
components.append(str(request.query_params))
|
87
|
+
|
88
|
+
for header in self.cache_config.cache_by_headers:
|
89
|
+
value = request.headers.get(str(header).lower())
|
90
|
+
if value:
|
91
|
+
components.append(f"{header}:{value}")
|
92
|
+
|
93
|
+
return hashlib.sha256(":".join(components).encode()).hexdigest()
|
94
|
+
|
95
|
+
def _generate_etag(self, response: Response) -> str:
|
96
|
+
"""Generate ETag for response content"""
|
97
|
+
content = response.description
|
98
|
+
if not isinstance(content, bytes):
|
99
|
+
content = str(content).encode()
|
100
|
+
return hashlib.sha256(content).hexdigest()
|
101
|
+
|
102
|
+
def _build_cache_control(self, path: str) -> str:
|
103
|
+
"""Build Cache-Control header value"""
|
104
|
+
directives = []
|
105
|
+
|
106
|
+
# Determine public/private caching
|
107
|
+
if any(private in path for private in self.cache_config.private_paths):
|
108
|
+
directives.append("private")
|
109
|
+
else:
|
110
|
+
directives.append("public")
|
111
|
+
|
112
|
+
# Add max-age directives
|
113
|
+
directives.append(f"max-age={self.cache_config.max_age}")
|
114
|
+
|
115
|
+
if self.cache_config.s_maxage is not None:
|
116
|
+
directives.append(f"s-maxage={self.cache_config.s_maxage}")
|
117
|
+
|
118
|
+
if self.cache_config.stale_while_revalidate is not None:
|
119
|
+
directives.append(f"stale-while-revalidate={self.cache_config.stale_while_revalidate}")
|
120
|
+
|
121
|
+
if self.cache_config.stale_if_error is not None:
|
122
|
+
directives.append(f"stale-if-error={self.cache_config.stale_if_error}")
|
123
|
+
|
124
|
+
# Add custom cache control directives
|
125
|
+
directives.extend(self.cache_config.cache_control)
|
126
|
+
|
127
|
+
return ", ".join(directives)
|
128
|
+
|
129
|
+
def cleanup_context(self, context_id: str):
|
130
|
+
try:
|
131
|
+
del self.request_context[context_id]
|
132
|
+
except Exception:
|
133
|
+
pass
|
134
|
+
|
135
|
+
def before_request(self, request: Request) -> Request | Response:
|
136
|
+
"""Handle conditional requests"""
|
137
|
+
if not self._should_cache(request, request.path):
|
138
|
+
return request
|
139
|
+
|
140
|
+
cache_key = self._generate_cache_key(request)
|
141
|
+
etag = self._etag_cache.get(cache_key)
|
142
|
+
|
143
|
+
if etag:
|
144
|
+
if_none_match = request.headers.get("if-none-match")
|
145
|
+
if if_none_match and if_none_match == etag:
|
146
|
+
return Response(status_code=304, description=b"", headers=Header({"ETag": etag}))
|
147
|
+
self.request_context[request.context_id] = request
|
148
|
+
return request
|
149
|
+
|
150
|
+
def after_request(self, response: Response) -> Response:
|
151
|
+
"""Add caching headers to response"""
|
152
|
+
request = self.request_context.get(response.context_id)
|
153
|
+
self.cleanup_context(response.context_id)
|
154
|
+
if not self._should_cache(request, request.path):
|
155
|
+
response.headers.set("Cache-Control", "no-store")
|
156
|
+
return response
|
157
|
+
|
158
|
+
# Generate and store ETag
|
159
|
+
cache_key = self._generate_cache_key(request)
|
160
|
+
etag = self._generate_etag(response)
|
161
|
+
self._etag_cache[cache_key] = etag
|
162
|
+
|
163
|
+
# Set cache headers
|
164
|
+
response.headers.update(
|
165
|
+
{
|
166
|
+
"Cache-Control": self._build_cache_control(request.path),
|
167
|
+
"ETag": etag,
|
168
|
+
"Vary": ", ".join(self.cache_config.vary_by),
|
169
|
+
"Last-Modified": datetime.now(tz=timezone.utc).strftime("%a, %d %b %Y %H:%M:%S GMT"),
|
170
|
+
}
|
171
|
+
)
|
172
|
+
|
173
|
+
# Add CDN-specific headers
|
174
|
+
response.headers.set("CDN-Cache-Control", response.headers["Cache-Control"])
|
175
|
+
response.headers.set("Surrogate-Control", f"max-age={self.cache_config.s_maxage or self.cache_config.max_age}")
|
176
|
+
|
177
|
+
return response
|