hypern 0.3.0__cp312-cp312-win32.whl → 0.3.2__cp312-cp312-win32.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hypern/application.py CHANGED
@@ -8,7 +8,7 @@ import orjson
8
8
  from typing_extensions import Annotated, Doc
9
9
 
10
10
  from hypern.datastructures import Contact, HTTPMethod, Info, License
11
- from hypern.hypern import FunctionInfo, Router, Route as InternalRoute, WebsocketRouter
11
+ from hypern.hypern import FunctionInfo, Router, Route as InternalRoute, WebsocketRouter, MiddlewareConfig
12
12
  from hypern.openapi import SchemaGenerator, SwaggerUI
13
13
  from hypern.processpool import run_processes
14
14
  from hypern.response import HTMLResponse, JSONResponse
@@ -17,6 +17,7 @@ from hypern.scheduler import Scheduler
17
17
  from hypern.middleware import Middleware
18
18
  from hypern.args_parser import ArgsConfig
19
19
  from hypern.ws import WebsocketRoute
20
+ from hypern.logging import logger
20
21
 
21
22
  AppType = TypeVar("AppType", bound="Hypern")
22
23
 
@@ -190,6 +191,14 @@ class Hypern:
190
191
  """
191
192
  ),
192
193
  ] = None,
194
+ auto_compression: Annotated[
195
+ bool,
196
+ Doc(
197
+ """
198
+ Enable automatic compression of responses.
199
+ """
200
+ ),
201
+ ] = False,
193
202
  *args: Any,
194
203
  **kwargs: Any,
195
204
  ) -> None:
@@ -202,6 +211,9 @@ class Hypern:
202
211
  self.middleware_after_request = []
203
212
  self.response_headers = {}
204
213
  self.args = ArgsConfig()
214
+ self.start_up_handler = None
215
+ self.shutdown_handler = None
216
+ self.auto_compression = auto_compression
205
217
 
206
218
  for route in routes or []:
207
219
  self.router.extend_route(route(app=self).routes)
@@ -287,10 +299,12 @@ class Hypern:
287
299
  function: The decorator function that registers the middleware.
288
300
  """
289
301
 
302
+ logger.warning("This functin will be deprecated in version 0.4.0. Please use the middleware class instead.")
303
+
290
304
  def decorator(func):
291
305
  is_async = asyncio.iscoroutinefunction(func)
292
306
  func_info = FunctionInfo(handler=func, is_async=is_async)
293
- self.middleware_before_request.append(func_info)
307
+ self.middleware_before_request.append((func_info, MiddlewareConfig.default()))
294
308
  return func
295
309
 
296
310
  return decorator
@@ -306,11 +320,12 @@ class Hypern:
306
320
  Returns:
307
321
  function: The decorator function that registers the given function.
308
322
  """
323
+ logger.warning("This functin will be deprecated in version 0.4.0. Please use the middleware class instead.")
309
324
 
310
325
  def decorator(func):
311
326
  is_async = asyncio.iscoroutinefunction(func)
312
327
  func_info = FunctionInfo(handler=func, is_async=is_async)
313
- self.middleware_after_request.append(func_info)
328
+ self.middleware_after_request.append((func_info, MiddlewareConfig.default()))
314
329
  return func
315
330
 
316
331
  return decorator
@@ -346,11 +361,13 @@ class Hypern:
346
361
  before_request = getattr(middleware, "before_request", None)
347
362
  after_request = getattr(middleware, "after_request", None)
348
363
 
349
- if before_request:
350
- self.before_request()(before_request)
351
- if after_request:
352
- self.after_request()(after_request)
353
- return self
364
+ is_async = asyncio.iscoroutinefunction(before_request)
365
+ before_request = FunctionInfo(handler=before_request, is_async=is_async)
366
+ self.middleware_before_request.append((before_request, middleware.config))
367
+
368
+ is_async = asyncio.iscoroutinefunction(after_request)
369
+ after_request = FunctionInfo(handler=after_request, is_async=is_async)
370
+ self.middleware_after_request.append((after_request, middleware.config))
354
371
 
355
372
  def start(
356
373
  self,
@@ -377,6 +394,9 @@ class Hypern:
377
394
  after_request=self.middleware_after_request,
378
395
  response_headers=self.response_headers,
379
396
  reload=self.args.reload,
397
+ on_startup=self.start_up_handler,
398
+ on_shutdown=self.shutdown_handler,
399
+ auto_compression=self.args.auto_compression or self.auto_compression,
380
400
  )
381
401
 
382
402
  def add_route(self, method: HTTPMethod, endpoint: str, handler: Callable[..., Any]):
@@ -403,3 +423,22 @@ class Hypern:
403
423
  """
404
424
  for route in ws_route.routes:
405
425
  self.websocket_router.add_route(route=route)
426
+
427
+ def on_startup(self, handler: Callable[..., Any]):
428
+ """
429
+ Registers a function to be executed on application startup.
430
+
431
+ Args:
432
+ handler (Callable[..., Any]): The function to be executed on application startup.
433
+ """
434
+ # decorator
435
+ self.start_up_handler = FunctionInfo(handler=handler, is_async=asyncio.iscoroutinefunction(handler))
436
+
437
+ def on_shutdown(self, handler: Callable[..., Any]):
438
+ """
439
+ Registers a function to be executed on application shutdown.
440
+
441
+ Args:
442
+ handler (Callable[..., Any]): The function to be executed on application shutdown.
443
+ """
444
+ self.shutdown_handler = FunctionInfo(handler=handler, is_async=asyncio.iscoroutinefunction(handler))
hypern/args_parser.py CHANGED
@@ -49,6 +49,12 @@ class ArgsConfig:
49
49
  action="store_true",
50
50
  help="It restarts the server based on file changes.",
51
51
  )
52
+
53
+ parser.add_argument(
54
+ "--auto-compression",
55
+ action="store_true",
56
+ help="It compresses the response automatically.",
57
+ )
52
58
  args, _ = parser.parse_known_args()
53
59
 
54
60
  self.host = args.host or "127.0.0.1"
@@ -57,3 +63,4 @@ class ArgsConfig:
57
63
  self.processes = args.processes or 1
58
64
  self.workers = args.workers or 1
59
65
  self.reload = args.reload or False
66
+ self.auto_compression = args.auto_compression
@@ -0,0 +1,6 @@
1
+ from .backend import BaseBackend
2
+ from .redis_backend import RedisBackend
3
+
4
+ from .strategies import CacheAsideStrategy, CacheEntry, CacheStrategy, StaleWhileRevalidateStrategy, cache_with_strategy
5
+
6
+ __all__ = ["BaseBackend", "RedisBackend", "CacheAsideStrategy", "CacheEntry", "CacheStrategy", "StaleWhileRevalidateStrategy", "cache_with_strategy"]
@@ -0,0 +1,31 @@
1
+ from abc import ABC, abstractmethod
2
+ from typing import Any, Optional
3
+
4
+
5
+ class BaseBackend(ABC):
6
+ @abstractmethod
7
+ async def get(self, key: str) -> Any: ...
8
+
9
+ @abstractmethod
10
+ async def set(self, key: str, value: Any, ttl: Optional[int] = None) -> None: ...
11
+
12
+ @abstractmethod
13
+ async def delete_pattern(self, pattern: str) -> None: ...
14
+
15
+ @abstractmethod
16
+ async def delete(self, key: str) -> None: ...
17
+
18
+ @abstractmethod
19
+ async def exists(self, key: str) -> bool: ...
20
+
21
+ @abstractmethod
22
+ async def set_nx(self, key: str, value: Any, ttl: Optional[int] = None) -> bool: ...
23
+
24
+ @abstractmethod
25
+ async def ttl(self, key: str) -> int: ...
26
+
27
+ @abstractmethod
28
+ async def incr(self, key: str) -> int: ...
29
+
30
+ @abstractmethod
31
+ async def clear(self) -> None: ...
@@ -1,3 +1,201 @@
1
- from hypern.hypern import RedisBackend
1
+ # src/hypern/cache/backends/redis.py
2
+ import pickle
3
+ from typing import Any, Optional
2
4
 
3
- __all__ = ["RedisBackend"]
5
+ from redis import asyncio as aioredis
6
+
7
+ from hypern.logging import logger
8
+
9
+ from .backend import BaseBackend
10
+
11
+
12
+ class RedisBackend(BaseBackend):
13
+ def __init__(self, url: str = "redis://localhost:6379", encoding: str = "utf-8", decode_responses: bool = False, **kwargs):
14
+ """
15
+ Initialize Redis backend with aioredis
16
+
17
+ Args:
18
+ url: Redis connection URL
19
+ encoding: Character encoding to use
20
+ decode_responses: Whether to decode response bytes to strings
21
+ **kwargs: Additional arguments passed to aioredis.from_url
22
+ """
23
+ self.redis = aioredis.from_url(url, encoding=encoding, decode_responses=decode_responses, **kwargs)
24
+ self._encoding = encoding
25
+
26
+ async def get(self, key: str) -> Optional[Any]:
27
+ """
28
+ Get value from Redis
29
+
30
+ Args:
31
+ key: Cache key
32
+
33
+ Returns:
34
+ Deserialized Python object or None if key doesn't exist
35
+ """
36
+ try:
37
+ value = await self.redis.get(key)
38
+ if value is not None:
39
+ return pickle.loads(value)
40
+ return None
41
+ except Exception as e:
42
+ logger.error(f"Error getting cache key {key}: {e}")
43
+ return None
44
+
45
+ async def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
46
+ """
47
+ Set value in Redis with optional TTL
48
+
49
+ Args:
50
+ key: Cache key
51
+ value: Python object to store
52
+ ttl: Time to live in seconds
53
+
54
+ Returns:
55
+ bool: True if successful, False otherwise
56
+ """
57
+ try:
58
+ serialized = pickle.dumps(value)
59
+ if ttl is not None:
60
+ await self.redis.setex(key, ttl, serialized)
61
+ else:
62
+ await self.redis.set(key, serialized)
63
+ return True
64
+ except Exception as e:
65
+ logger.error(f"Error setting cache key {key}: {e}")
66
+ return False
67
+
68
+ async def delete(self, key: str) -> bool:
69
+ """
70
+ Delete key from Redis
71
+
72
+ Args:
73
+ key: Cache key to delete
74
+
75
+ Returns:
76
+ bool: True if key was deleted, False otherwise
77
+ """
78
+ try:
79
+ return bool(await self.redis.delete(key))
80
+ except Exception as e:
81
+ logger.error(f"Error deleting cache key {key}: {e}")
82
+ return False
83
+
84
+ async def delete_pattern(self, pattern: str) -> int:
85
+ """
86
+ Delete all keys matching pattern
87
+
88
+ Args:
89
+ pattern: Redis key pattern to match
90
+
91
+ Returns:
92
+ int: Number of keys deleted
93
+ """
94
+ try:
95
+ keys = await self.redis.keys(pattern)
96
+ if keys:
97
+ return await self.redis.delete(*keys)
98
+ return 0
99
+ except Exception as e:
100
+ logger.error(f"Error deleting keys matching {pattern}: {e}")
101
+ return 0
102
+
103
+ async def exists(self, key: str) -> bool:
104
+ """
105
+ Check if key exists
106
+
107
+ Args:
108
+ key: Cache key to check
109
+
110
+ Returns:
111
+ bool: True if key exists, False otherwise
112
+ """
113
+ try:
114
+ return bool(await self.redis.exists(key))
115
+ except Exception as e:
116
+ logger.error(f"Error checking existence of key {key}: {e}")
117
+ return False
118
+
119
+ async def ttl(self, key: str) -> int:
120
+ """
121
+ Get TTL of key in seconds
122
+
123
+ Args:
124
+ key: Cache key
125
+
126
+ Returns:
127
+ int: TTL in seconds, -2 if key doesn't exist, -1 if key has no TTL
128
+ """
129
+ try:
130
+ return await self.redis.ttl(key)
131
+ except Exception as e:
132
+ logger.error(f"Error getting TTL for key {key}: {e}")
133
+ return -2
134
+
135
+ async def incr(self, key: str, amount: int = 1) -> Optional[int]:
136
+ """
137
+ Increment value by amount
138
+
139
+ Args:
140
+ key: Cache key
141
+ amount: Amount to increment by
142
+
143
+ Returns:
144
+ int: New value after increment or None on error
145
+ """
146
+ try:
147
+ return await self.redis.incrby(key, amount)
148
+ except Exception as e:
149
+ logger.error(f"Error incrementing key {key}: {e}")
150
+ return None
151
+
152
+ async def set_nx(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
153
+ """
154
+ Set key only if it doesn't exist (SET NX operation)
155
+
156
+ Args:
157
+ key: Cache key
158
+ value: Value to set
159
+ ttl: Optional TTL in seconds
160
+
161
+ Returns:
162
+ bool: True if key was set, False otherwise
163
+ """
164
+ try:
165
+ serialized = pickle.dumps(value)
166
+ if ttl is not None:
167
+ return await self.redis.set(key, serialized, nx=True, ex=ttl)
168
+ return await self.redis.set(key, serialized, nx=True)
169
+ except Exception as e:
170
+ logger.error(f"Error setting NX for key {key}: {e}")
171
+ return False
172
+
173
+ async def clear(self) -> bool:
174
+ """
175
+ Clear all keys from the current database
176
+
177
+ Returns:
178
+ bool: True if successful, False otherwise
179
+ """
180
+ try:
181
+ await self.redis.flushdb()
182
+ return True
183
+ except Exception as e:
184
+ logger.error(f"Error clearing cache: {e}")
185
+ return False
186
+
187
+ async def close(self) -> None:
188
+ """Close Redis connection"""
189
+ await self.redis.close()
190
+
191
+ async def ping(self) -> bool:
192
+ """
193
+ Check Redis connection
194
+
195
+ Returns:
196
+ bool: True if connection is alive, False otherwise
197
+ """
198
+ try:
199
+ return await self.redis.ping()
200
+ except Exception:
201
+ return False
@@ -0,0 +1,208 @@
1
+ import asyncio
2
+ import time
3
+ from abc import ABC, abstractmethod
4
+ from typing import Callable, Generic, Optional, TypeVar
5
+
6
+ import orjson
7
+
8
+ from .backend import BaseBackend
9
+
10
+ T = TypeVar("T")
11
+
12
+
13
+ class CacheStrategy(ABC, Generic[T]):
14
+ """Base class for cache strategies"""
15
+
16
+ @abstractmethod
17
+ async def get(self, key: str) -> Optional[T]:
18
+ pass
19
+
20
+ @abstractmethod
21
+ async def set(self, key: str, value: T, ttl: Optional[int] = None) -> None:
22
+ pass
23
+
24
+ @abstractmethod
25
+ async def delete(self, key: str) -> None:
26
+ pass
27
+
28
+
29
+ class CacheEntry(Generic[T]):
30
+ """Represents a cached item with metadata"""
31
+
32
+ def __init__(self, value: T, created_at: float, ttl: int, revalidate_after: Optional[int] = None):
33
+ self.value = value
34
+ self.created_at = created_at
35
+ self.ttl = ttl
36
+ self.revalidate_after = revalidate_after
37
+ self.is_revalidating = False
38
+
39
+ def is_stale(self) -> bool:
40
+ """Check if entry is stale and needs revalidation"""
41
+ now = time.time()
42
+ return self.revalidate_after is not None and now > (self.created_at + self.revalidate_after)
43
+
44
+ def is_expired(self) -> bool:
45
+ """Check if entry has completely expired"""
46
+ now = time.time()
47
+ return now > (self.created_at + self.ttl)
48
+
49
+ def to_json(self) -> bytes:
50
+ """Serialize entry to JSON"""
51
+ return orjson.dumps(
52
+ {
53
+ "value": self.value,
54
+ "created_at": self.created_at,
55
+ "ttl": self.ttl,
56
+ "revalidate_after": self.revalidate_after,
57
+ "is_revalidating": self.is_revalidating,
58
+ }
59
+ )
60
+
61
+ @classmethod
62
+ def from_json(cls, data: bytes) -> "CacheEntry[T]":
63
+ """Deserialize entry from JSON"""
64
+ parsed = orjson.loads(data)
65
+ return cls(value=parsed["value"], created_at=parsed["created_at"], ttl=parsed["ttl"], revalidate_after=parsed["revalidate_after"])
66
+
67
+
68
+ class StaleWhileRevalidateStrategy(CacheStrategy[T]):
69
+ """
70
+ Implements stale-while-revalidate caching strategy.
71
+ Allows serving stale content while revalidating in the background.
72
+ """
73
+
74
+ def __init__(self, backend: BaseBackend, revalidate_after: int, ttl: int, revalidate_fn: Callable[..., T]):
75
+ """
76
+ Initialize the caching strategy.
77
+
78
+ Args:
79
+ backend (BaseBackend): The backend storage for caching.
80
+ revalidate_after (int): The time in seconds after which the cache should be revalidated.
81
+ ttl (int): The time-to-live for cache entries in seconds.
82
+ revalidate_fn (Callable[..., T]): The function to call for revalidating the cache.
83
+
84
+ Attributes:
85
+ backend (BaseBackend): The backend storage for caching.
86
+ revalidate_after (int): The time in seconds after which the cache should be revalidated.
87
+ ttl (int): The time-to-live for cache entries in seconds.
88
+ revalidate_fn (Callable[..., T]): The function to call for revalidating the cache.
89
+ _revalidation_locks (dict): A dictionary to manage revalidation locks.
90
+ """
91
+ self.backend = backend
92
+ self.revalidate_after = revalidate_after
93
+ self.ttl = ttl
94
+ self.revalidate_fn = revalidate_fn
95
+ self._revalidation_locks: dict = {}
96
+
97
+ async def get(self, key: str) -> Optional[T]:
98
+ entry = await self.backend.get(key)
99
+ if not entry:
100
+ return None
101
+
102
+ if isinstance(entry, bytes):
103
+ entry = CacheEntry.from_json(entry)
104
+
105
+ # If entry is stale but not expired, trigger background revalidation
106
+ if entry.is_stale() and not entry.is_expired():
107
+ if not entry.is_revalidating:
108
+ entry.is_revalidating = True
109
+ await self.backend.set(key, entry.to_json())
110
+ asyncio.create_task(self._revalidate(key))
111
+ return entry.value
112
+
113
+ # If entry is expired, return None
114
+ if entry.is_expired():
115
+ return None
116
+
117
+ return entry.value
118
+
119
+ async def set(self, key: str, value: T, ttl: Optional[int] = None) -> None:
120
+ entry = CacheEntry(value=value, created_at=time.time(), ttl=ttl or self.ttl, revalidate_after=self.revalidate_after)
121
+ await self.backend.set(key, entry.to_json(), ttl=ttl)
122
+
123
+ async def delete(self, key: str) -> None:
124
+ await self.backend.delete(key)
125
+
126
+ async def _revalidate(self, key: str) -> None:
127
+ """Background revalidation of cached data"""
128
+ try:
129
+ # Prevent multiple simultaneous revalidations
130
+ if key in self._revalidation_locks:
131
+ return
132
+ self._revalidation_locks[key] = True
133
+
134
+ # Get fresh data
135
+ fresh_value = await self.revalidate_fn(key)
136
+
137
+ # Update cache with fresh data
138
+ await self.set(key, fresh_value)
139
+ finally:
140
+ self._revalidation_locks.pop(key, None)
141
+
142
+
143
+ class CacheAsideStrategy(CacheStrategy[T]):
144
+ """
145
+ Implements cache-aside (lazy loading) strategy.
146
+ Data is loaded into cache only when requested.
147
+ """
148
+
149
+ def __init__(self, backend: BaseBackend, load_fn: Callable[[str], T], ttl: int, write_through: bool = False):
150
+ self.backend = backend
151
+ self.load_fn = load_fn
152
+ self.ttl = ttl
153
+ self.write_through = write_through
154
+
155
+ async def get(self, key: str) -> Optional[T]:
156
+ # Try to get from cache first
157
+ value = await self.backend.get(key)
158
+ if value:
159
+ if isinstance(value, bytes):
160
+ value = orjson.loads(value)
161
+ return value
162
+
163
+ # On cache miss, load from source
164
+ value = await self.load_fn(key)
165
+ if value is not None:
166
+ await self.set(key, value)
167
+ return value
168
+
169
+ async def set(self, key: str, value: T, ttl: Optional[int] = None) -> None:
170
+ await self.backend.set(key, value, ttl or self.ttl)
171
+
172
+ # If write-through is enabled, update the source
173
+ if self.write_through:
174
+ await self._write_to_source(key, value)
175
+
176
+ async def delete(self, key: str) -> None:
177
+ await self.backend.delete(key)
178
+
179
+ async def _write_to_source(self, key: str, value: T) -> None:
180
+ """Write to the source in write-through mode"""
181
+ if hasattr(self.load_fn, "write"):
182
+ await self.load_fn.write(key, value)
183
+
184
+
185
+ def cache_with_strategy(strategy: CacheStrategy, key_prefix: str | None = None, ttl: int = 3600):
186
+ """
187
+ Decorator for using cache strategies
188
+ """
189
+
190
+ def decorator(func):
191
+ async def wrapper(*args, **kwargs):
192
+ # Generate cache key
193
+ cache_key = f"{key_prefix or func.__name__}:{hash(str(args) + str(kwargs))}"
194
+
195
+ result = await strategy.get(cache_key)
196
+ if result is not None:
197
+ return result
198
+
199
+ # Execute function and cache result
200
+ result = await func(*args, **kwargs)
201
+ if result is not None:
202
+ await strategy.set(cache_key, result, ttl)
203
+
204
+ return result
205
+
206
+ return wrapper
207
+
208
+ return decorator
@@ -0,0 +1,6 @@
1
+ from .gateway import APIGateway
2
+ from .aggregator import Aggregator
3
+ from .proxy import Proxy
4
+ from .service import ServiceConfig, ServiceRegistry
5
+
6
+ __all__ = ["APIGateway", "Aggregator", "Proxy", "ServiceConfig", "ServiceRegistry"]
@@ -0,0 +1,32 @@
1
+ import asyncio
2
+ from typing import Any, Dict, List
3
+
4
+ from hypern.response import JSONResponse
5
+
6
+ from .proxy import Proxy
7
+ from .service import ServiceRegistry
8
+
9
+
10
+ class Aggregator:
11
+ def __init__(self, registry: ServiceRegistry, proxy: Proxy):
12
+ self._registry = registry
13
+ self._proxy = proxy
14
+
15
+ async def aggregate_responses(self, requests: List[Dict[str, Any]]) -> JSONResponse:
16
+ tasks = []
17
+ for req in requests:
18
+ service = self._registry.get_service(req["service"])
19
+ if service:
20
+ tasks.append(self._proxy.forward_request(service, req["request"]))
21
+
22
+ responses = await asyncio.gather(*tasks, return_exceptions=True)
23
+
24
+ aggregated = {}
25
+ for i, response in enumerate(responses):
26
+ service_name = requests[i]["service"]
27
+ if isinstance(response, Exception):
28
+ aggregated[service_name] = {"status": "error", "error": str(response)}
29
+ else:
30
+ aggregated[service_name] = {"status": "success", "data": response.body}
31
+
32
+ return JSONResponse(content=aggregated)
@@ -0,0 +1,41 @@
1
+ from typing import Any, Dict, List, Optional
2
+
3
+ from hypern import Hypern
4
+ from hypern.hypern import Request
5
+ from hypern.response import JSONResponse
6
+
7
+ from .aggregator import Aggregator
8
+ from .proxy import Proxy
9
+ from .service import ServiceConfig, ServiceRegistry
10
+
11
+
12
+ class APIGateway:
13
+ def __init__(self, app: Hypern):
14
+ self.app = app
15
+ self.registry = ServiceRegistry()
16
+ self.proxy = Proxy(self.registry)
17
+ self.aggregator = Aggregator(self.registry, self.proxy)
18
+
19
+ def register_service(self, config: ServiceConfig, metadata: Optional[Dict[str, Any]] = None):
20
+ """Register a new service with the gateway"""
21
+ self.registry.register(config, metadata)
22
+
23
+ async def startup(self):
24
+ """Initialize the gateway components"""
25
+ await self.proxy.startup()
26
+
27
+ async def shutdown(self):
28
+ """Cleanup gateway resources"""
29
+ await self.proxy.shutdown()
30
+
31
+ async def handle_request(self, request: Request) -> Any:
32
+ """Main request handler"""
33
+ service = self.registry.get_service_by_prefix(request.path)
34
+ if not service:
35
+ return JSONResponse(content={"error": "Service not found"}, status_code=404)
36
+
37
+ return await self.proxy.forward_request(service, request)
38
+
39
+ async def aggregate(self, requests: List[Dict[str, Any]]) -> Any:
40
+ """Handle aggregated requests"""
41
+ return await self.aggregator.aggregate_responses(requests)