hypern 0.3.11__cp310-cp310-musllinux_1_2_i686.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. hypern/__init__.py +24 -0
  2. hypern/application.py +495 -0
  3. hypern/args_parser.py +73 -0
  4. hypern/auth/__init__.py +0 -0
  5. hypern/auth/authorization.py +2 -0
  6. hypern/background.py +4 -0
  7. hypern/caching/__init__.py +6 -0
  8. hypern/caching/backend.py +31 -0
  9. hypern/caching/redis_backend.py +201 -0
  10. hypern/caching/strategies.py +208 -0
  11. hypern/cli/__init__.py +0 -0
  12. hypern/cli/commands.py +0 -0
  13. hypern/config.py +246 -0
  14. hypern/database/__init__.py +0 -0
  15. hypern/database/sqlalchemy/__init__.py +4 -0
  16. hypern/database/sqlalchemy/config.py +66 -0
  17. hypern/database/sqlalchemy/repository.py +290 -0
  18. hypern/database/sqlx/__init__.py +36 -0
  19. hypern/database/sqlx/field.py +246 -0
  20. hypern/database/sqlx/migrate.py +263 -0
  21. hypern/database/sqlx/model.py +117 -0
  22. hypern/database/sqlx/query.py +904 -0
  23. hypern/datastructures.py +40 -0
  24. hypern/enum.py +13 -0
  25. hypern/exceptions/__init__.py +34 -0
  26. hypern/exceptions/base.py +62 -0
  27. hypern/exceptions/common.py +12 -0
  28. hypern/exceptions/errors.py +15 -0
  29. hypern/exceptions/formatters.py +56 -0
  30. hypern/exceptions/http.py +76 -0
  31. hypern/gateway/__init__.py +6 -0
  32. hypern/gateway/aggregator.py +32 -0
  33. hypern/gateway/gateway.py +41 -0
  34. hypern/gateway/proxy.py +60 -0
  35. hypern/gateway/service.py +52 -0
  36. hypern/hypern.cpython-310-i386-linux-gnu.so +0 -0
  37. hypern/hypern.pyi +333 -0
  38. hypern/i18n/__init__.py +0 -0
  39. hypern/logging/__init__.py +3 -0
  40. hypern/logging/logger.py +82 -0
  41. hypern/middleware/__init__.py +17 -0
  42. hypern/middleware/base.py +13 -0
  43. hypern/middleware/cache.py +177 -0
  44. hypern/middleware/compress.py +78 -0
  45. hypern/middleware/cors.py +41 -0
  46. hypern/middleware/i18n.py +1 -0
  47. hypern/middleware/limit.py +177 -0
  48. hypern/middleware/security.py +184 -0
  49. hypern/openapi/__init__.py +5 -0
  50. hypern/openapi/schemas.py +51 -0
  51. hypern/openapi/swagger.py +3 -0
  52. hypern/processpool.py +139 -0
  53. hypern/py.typed +0 -0
  54. hypern/reload.py +46 -0
  55. hypern/response/__init__.py +3 -0
  56. hypern/response/response.py +142 -0
  57. hypern/routing/__init__.py +5 -0
  58. hypern/routing/dispatcher.py +70 -0
  59. hypern/routing/endpoint.py +30 -0
  60. hypern/routing/parser.py +98 -0
  61. hypern/routing/queue.py +175 -0
  62. hypern/routing/route.py +280 -0
  63. hypern/scheduler.py +5 -0
  64. hypern/worker.py +274 -0
  65. hypern/ws/__init__.py +4 -0
  66. hypern/ws/channel.py +80 -0
  67. hypern/ws/heartbeat.py +74 -0
  68. hypern/ws/room.py +76 -0
  69. hypern/ws/route.py +26 -0
  70. hypern-0.3.11.dist-info/METADATA +134 -0
  71. hypern-0.3.11.dist-info/RECORD +74 -0
  72. hypern-0.3.11.dist-info/WHEEL +4 -0
  73. hypern-0.3.11.dist-info/licenses/LICENSE +24 -0
  74. hypern.libs/libgcc_s-b5472b99.so.1 +0 -0
@@ -0,0 +1,201 @@
1
+ # src/hypern/cache/backends/redis.py
2
+ import pickle
3
+ from typing import Any, Optional
4
+
5
+ from redis import asyncio as aioredis
6
+
7
+ from hypern.logging import logger
8
+
9
+ from .backend import BaseBackend
10
+
11
+
12
+ class RedisBackend(BaseBackend):
13
+ def __init__(self, url: str = "redis://localhost:6379", encoding: str = "utf-8", decode_responses: bool = False, **kwargs):
14
+ """
15
+ Initialize Redis backend with aioredis
16
+
17
+ Args:
18
+ url: Redis connection URL
19
+ encoding: Character encoding to use
20
+ decode_responses: Whether to decode response bytes to strings
21
+ **kwargs: Additional arguments passed to aioredis.from_url
22
+ """
23
+ self.redis = aioredis.from_url(url, encoding=encoding, decode_responses=decode_responses, **kwargs)
24
+ self._encoding = encoding
25
+
26
+ async def get(self, key: str) -> Optional[Any]:
27
+ """
28
+ Get value from Redis
29
+
30
+ Args:
31
+ key: Cache key
32
+
33
+ Returns:
34
+ Deserialized Python object or None if key doesn't exist
35
+ """
36
+ try:
37
+ value = await self.redis.get(key)
38
+ if value is not None:
39
+ return pickle.loads(value)
40
+ return None
41
+ except Exception as e:
42
+ logger.error(f"Error getting cache key {key}: {e}")
43
+ return None
44
+
45
+ async def set(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
46
+ """
47
+ Set value in Redis with optional TTL
48
+
49
+ Args:
50
+ key: Cache key
51
+ value: Python object to store
52
+ ttl: Time to live in seconds
53
+
54
+ Returns:
55
+ bool: True if successful, False otherwise
56
+ """
57
+ try:
58
+ serialized = pickle.dumps(value)
59
+ if ttl is not None:
60
+ await self.redis.setex(key, ttl, serialized)
61
+ else:
62
+ await self.redis.set(key, serialized)
63
+ return True
64
+ except Exception as e:
65
+ logger.error(f"Error setting cache key {key}: {e}")
66
+ return False
67
+
68
+ async def delete(self, key: str) -> bool:
69
+ """
70
+ Delete key from Redis
71
+
72
+ Args:
73
+ key: Cache key to delete
74
+
75
+ Returns:
76
+ bool: True if key was deleted, False otherwise
77
+ """
78
+ try:
79
+ return bool(await self.redis.delete(key))
80
+ except Exception as e:
81
+ logger.error(f"Error deleting cache key {key}: {e}")
82
+ return False
83
+
84
+ async def delete_pattern(self, pattern: str) -> int:
85
+ """
86
+ Delete all keys matching pattern
87
+
88
+ Args:
89
+ pattern: Redis key pattern to match
90
+
91
+ Returns:
92
+ int: Number of keys deleted
93
+ """
94
+ try:
95
+ keys = await self.redis.keys(pattern)
96
+ if keys:
97
+ return await self.redis.delete(*keys)
98
+ return 0
99
+ except Exception as e:
100
+ logger.error(f"Error deleting keys matching {pattern}: {e}")
101
+ return 0
102
+
103
+ async def exists(self, key: str) -> bool:
104
+ """
105
+ Check if key exists
106
+
107
+ Args:
108
+ key: Cache key to check
109
+
110
+ Returns:
111
+ bool: True if key exists, False otherwise
112
+ """
113
+ try:
114
+ return bool(await self.redis.exists(key))
115
+ except Exception as e:
116
+ logger.error(f"Error checking existence of key {key}: {e}")
117
+ return False
118
+
119
+ async def ttl(self, key: str) -> int:
120
+ """
121
+ Get TTL of key in seconds
122
+
123
+ Args:
124
+ key: Cache key
125
+
126
+ Returns:
127
+ int: TTL in seconds, -2 if key doesn't exist, -1 if key has no TTL
128
+ """
129
+ try:
130
+ return await self.redis.ttl(key)
131
+ except Exception as e:
132
+ logger.error(f"Error getting TTL for key {key}: {e}")
133
+ return -2
134
+
135
+ async def incr(self, key: str, amount: int = 1) -> Optional[int]:
136
+ """
137
+ Increment value by amount
138
+
139
+ Args:
140
+ key: Cache key
141
+ amount: Amount to increment by
142
+
143
+ Returns:
144
+ int: New value after increment or None on error
145
+ """
146
+ try:
147
+ return await self.redis.incrby(key, amount)
148
+ except Exception as e:
149
+ logger.error(f"Error incrementing key {key}: {e}")
150
+ return None
151
+
152
+ async def set_nx(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
153
+ """
154
+ Set key only if it doesn't exist (SET NX operation)
155
+
156
+ Args:
157
+ key: Cache key
158
+ value: Value to set
159
+ ttl: Optional TTL in seconds
160
+
161
+ Returns:
162
+ bool: True if key was set, False otherwise
163
+ """
164
+ try:
165
+ serialized = pickle.dumps(value)
166
+ if ttl is not None:
167
+ return await self.redis.set(key, serialized, nx=True, ex=ttl)
168
+ return await self.redis.set(key, serialized, nx=True)
169
+ except Exception as e:
170
+ logger.error(f"Error setting NX for key {key}: {e}")
171
+ return False
172
+
173
+ async def clear(self) -> bool:
174
+ """
175
+ Clear all keys from the current database
176
+
177
+ Returns:
178
+ bool: True if successful, False otherwise
179
+ """
180
+ try:
181
+ await self.redis.flushdb()
182
+ return True
183
+ except Exception as e:
184
+ logger.error(f"Error clearing cache: {e}")
185
+ return False
186
+
187
+ async def close(self) -> None:
188
+ """Close Redis connection"""
189
+ await self.redis.close()
190
+
191
+ async def ping(self) -> bool:
192
+ """
193
+ Check Redis connection
194
+
195
+ Returns:
196
+ bool: True if connection is alive, False otherwise
197
+ """
198
+ try:
199
+ return await self.redis.ping()
200
+ except Exception:
201
+ return False
@@ -0,0 +1,208 @@
1
+ import asyncio
2
+ import time
3
+ from abc import ABC, abstractmethod
4
+ from typing import Callable, Generic, Optional, TypeVar
5
+
6
+ import orjson
7
+
8
+ from .backend import BaseBackend
9
+
10
+ T = TypeVar("T")
11
+
12
+
13
+ class CacheStrategy(ABC, Generic[T]):
14
+ """Base class for cache strategies"""
15
+
16
+ @abstractmethod
17
+ async def get(self, key: str) -> Optional[T]:
18
+ pass
19
+
20
+ @abstractmethod
21
+ async def set(self, key: str, value: T, ttl: Optional[int] = None) -> None:
22
+ pass
23
+
24
+ @abstractmethod
25
+ async def delete(self, key: str) -> None:
26
+ pass
27
+
28
+
29
+ class CacheEntry(Generic[T]):
30
+ """Represents a cached item with metadata"""
31
+
32
+ def __init__(self, value: T, created_at: float, ttl: int, revalidate_after: Optional[int] = None):
33
+ self.value = value
34
+ self.created_at = created_at
35
+ self.ttl = ttl
36
+ self.revalidate_after = revalidate_after
37
+ self.is_revalidating = False
38
+
39
+ def is_stale(self) -> bool:
40
+ """Check if entry is stale and needs revalidation"""
41
+ now = time.time()
42
+ return self.revalidate_after is not None and now > (self.created_at + self.revalidate_after)
43
+
44
+ def is_expired(self) -> bool:
45
+ """Check if entry has completely expired"""
46
+ now = time.time()
47
+ return now > (self.created_at + self.ttl)
48
+
49
+ def to_json(self) -> bytes:
50
+ """Serialize entry to JSON"""
51
+ return orjson.dumps(
52
+ {
53
+ "value": self.value,
54
+ "created_at": self.created_at,
55
+ "ttl": self.ttl,
56
+ "revalidate_after": self.revalidate_after,
57
+ "is_revalidating": self.is_revalidating,
58
+ }
59
+ )
60
+
61
+ @classmethod
62
+ def from_json(cls, data: bytes) -> "CacheEntry[T]":
63
+ """Deserialize entry from JSON"""
64
+ parsed = orjson.loads(data)
65
+ return cls(value=parsed["value"], created_at=parsed["created_at"], ttl=parsed["ttl"], revalidate_after=parsed["revalidate_after"])
66
+
67
+
68
+ class StaleWhileRevalidateStrategy(CacheStrategy[T]):
69
+ """
70
+ Implements stale-while-revalidate caching strategy.
71
+ Allows serving stale content while revalidating in the background.
72
+ """
73
+
74
+ def __init__(self, backend: BaseBackend, revalidate_after: int, ttl: int, revalidate_fn: Callable[..., T]):
75
+ """
76
+ Initialize the caching strategy.
77
+
78
+ Args:
79
+ backend (BaseBackend): The backend storage for caching.
80
+ revalidate_after (int): The time in seconds after which the cache should be revalidated.
81
+ ttl (int): The time-to-live for cache entries in seconds.
82
+ revalidate_fn (Callable[..., T]): The function to call for revalidating the cache.
83
+
84
+ Attributes:
85
+ backend (BaseBackend): The backend storage for caching.
86
+ revalidate_after (int): The time in seconds after which the cache should be revalidated.
87
+ ttl (int): The time-to-live for cache entries in seconds.
88
+ revalidate_fn (Callable[..., T]): The function to call for revalidating the cache.
89
+ _revalidation_locks (dict): A dictionary to manage revalidation locks.
90
+ """
91
+ self.backend = backend
92
+ self.revalidate_after = revalidate_after
93
+ self.ttl = ttl
94
+ self.revalidate_fn = revalidate_fn
95
+ self._revalidation_locks: dict = {}
96
+
97
+ async def get(self, key: str) -> Optional[T]:
98
+ entry = await self.backend.get(key)
99
+ if not entry:
100
+ return None
101
+
102
+ if isinstance(entry, bytes):
103
+ entry = CacheEntry.from_json(entry)
104
+
105
+ # If entry is stale but not expired, trigger background revalidation
106
+ if entry.is_stale() and not entry.is_expired():
107
+ if not entry.is_revalidating:
108
+ entry.is_revalidating = True
109
+ await self.backend.set(key, entry.to_json())
110
+ asyncio.create_task(self._revalidate(key))
111
+ return entry.value
112
+
113
+ # If entry is expired, return None
114
+ if entry.is_expired():
115
+ return None
116
+
117
+ return entry.value
118
+
119
+ async def set(self, key: str, value: T, ttl: Optional[int] = None) -> None:
120
+ entry = CacheEntry(value=value, created_at=time.time(), ttl=ttl or self.ttl, revalidate_after=self.revalidate_after)
121
+ await self.backend.set(key, entry.to_json(), ttl=ttl)
122
+
123
+ async def delete(self, key: str) -> None:
124
+ await self.backend.delete(key)
125
+
126
+ async def _revalidate(self, key: str) -> None:
127
+ """Background revalidation of cached data"""
128
+ try:
129
+ # Prevent multiple simultaneous revalidations
130
+ if key in self._revalidation_locks:
131
+ return
132
+ self._revalidation_locks[key] = True
133
+
134
+ # Get fresh data
135
+ fresh_value = await self.revalidate_fn(key)
136
+
137
+ # Update cache with fresh data
138
+ await self.set(key, fresh_value)
139
+ finally:
140
+ self._revalidation_locks.pop(key, None)
141
+
142
+
143
+ class CacheAsideStrategy(CacheStrategy[T]):
144
+ """
145
+ Implements cache-aside (lazy loading) strategy.
146
+ Data is loaded into cache only when requested.
147
+ """
148
+
149
+ def __init__(self, backend: BaseBackend, load_fn: Callable[[str], T], ttl: int, write_through: bool = False):
150
+ self.backend = backend
151
+ self.load_fn = load_fn
152
+ self.ttl = ttl
153
+ self.write_through = write_through
154
+
155
+ async def get(self, key: str) -> Optional[T]:
156
+ # Try to get from cache first
157
+ value = await self.backend.get(key)
158
+ if value:
159
+ if isinstance(value, bytes):
160
+ value = orjson.loads(value)
161
+ return value
162
+
163
+ # On cache miss, load from source
164
+ value = await self.load_fn(key)
165
+ if value is not None:
166
+ await self.set(key, value)
167
+ return value
168
+
169
+ async def set(self, key: str, value: T, ttl: Optional[int] = None) -> None:
170
+ await self.backend.set(key, value, ttl or self.ttl)
171
+
172
+ # If write-through is enabled, update the source
173
+ if self.write_through:
174
+ await self._write_to_source(key, value)
175
+
176
+ async def delete(self, key: str) -> None:
177
+ await self.backend.delete(key)
178
+
179
+ async def _write_to_source(self, key: str, value: T) -> None:
180
+ """Write to the source in write-through mode"""
181
+ if hasattr(self.load_fn, "write"):
182
+ await self.load_fn.write(key, value)
183
+
184
+
185
+ def cache_with_strategy(strategy: CacheStrategy, key_prefix: str | None = None, ttl: int = 3600):
186
+ """
187
+ Decorator for using cache strategies
188
+ """
189
+
190
+ def decorator(func):
191
+ async def wrapper(*args, **kwargs):
192
+ # Generate cache key
193
+ cache_key = f"{key_prefix or func.__name__}:{hash(str(args) + str(kwargs))}"
194
+
195
+ result = await strategy.get(cache_key)
196
+ if result is not None:
197
+ return result
198
+
199
+ # Execute function and cache result
200
+ result = await func(*args, **kwargs)
201
+ if result is not None:
202
+ await strategy.set(cache_key, result, ttl)
203
+
204
+ return result
205
+
206
+ return wrapper
207
+
208
+ return decorator
hypern/cli/__init__.py ADDED
File without changes
hypern/cli/commands.py ADDED
File without changes
hypern/config.py ADDED
@@ -0,0 +1,246 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+
5
+ # -*- coding: utf-8 -*-
6
+ import threading
7
+ import typing
8
+ import warnings
9
+ from contextvars import ContextVar
10
+ from datetime import datetime
11
+ from pathlib import Path
12
+ from typing import Dict, Optional
13
+
14
+ """
15
+
16
+ refer: https://github.com/encode/starlette/blob/master/starlette/config.py
17
+ # Config will be read from environment variables and/or ".env" files.
18
+ config = Config(".env")
19
+
20
+ DEBUG = config('DEBUG', cast=bool, default=False)
21
+ DATABASE_URL = config('DATABASE_URL')
22
+ ALLOWED_HOSTS = config('ALLOWED_HOSTS', cast=CommaSeparatedStrings)
23
+ """
24
+
25
+
26
+ class undefined:
27
+ pass
28
+
29
+
30
+ class EnvironError(Exception):
31
+ pass
32
+
33
+
34
+ class Environ(typing.MutableMapping[str, str]):
35
+ def __init__(self, environ: typing.MutableMapping[str, str] = os.environ):
36
+ self._environ = environ
37
+ self._has_been_read: set[str] = set()
38
+
39
+ def __getitem__(self, key: str) -> str:
40
+ self._has_been_read.add(key)
41
+ return self._environ.__getitem__(key)
42
+
43
+ def __setitem__(self, key: str, value: str) -> None:
44
+ if key in self._has_been_read:
45
+ raise EnvironError(f"Attempting to set environ['{key}'], but the value has already been read.")
46
+ self._environ.__setitem__(key, value)
47
+
48
+ def __delitem__(self, key: str) -> None:
49
+ if key in self._has_been_read:
50
+ raise EnvironError(f"Attempting to delete environ['{key}'], but the value has already been read.")
51
+ self._environ.__delitem__(key)
52
+
53
+ def __iter__(self) -> typing.Iterator[str]:
54
+ return iter(self._environ)
55
+
56
+ def __len__(self) -> int:
57
+ return len(self._environ)
58
+
59
+
60
+ environ = Environ()
61
+
62
+ T = typing.TypeVar("T")
63
+
64
+
65
+ class Config:
66
+ def __init__(
67
+ self,
68
+ env_file: str | Path | None = None,
69
+ environ: typing.Mapping[str, str] = environ,
70
+ env_prefix: str = "",
71
+ ) -> None:
72
+ self.environ = environ
73
+ self.env_prefix = env_prefix
74
+ self.file_values: dict[str, str] = {}
75
+ if env_file is not None:
76
+ if not os.path.isfile(env_file):
77
+ warnings.warn(f"Config file '{env_file}' not found.")
78
+ else:
79
+ self.file_values = self._read_file(env_file)
80
+
81
+ @typing.overload
82
+ def __call__(self, key: str, *, default: None) -> str | None: ...
83
+
84
+ @typing.overload
85
+ def __call__(self, key: str, cast: type[T], default: T = ...) -> T: ...
86
+
87
+ @typing.overload
88
+ def __call__(self, key: str, cast: type[str] = ..., default: str = ...) -> str: ...
89
+
90
+ @typing.overload
91
+ def __call__(
92
+ self,
93
+ key: str,
94
+ cast: typing.Callable[[typing.Any], T] = ...,
95
+ default: typing.Any = ...,
96
+ ) -> T: ...
97
+
98
+ @typing.overload
99
+ def __call__(self, key: str, cast: type[str] = ..., default: T = ...) -> T | str: ...
100
+
101
+ def __call__(
102
+ self,
103
+ key: str,
104
+ cast: typing.Callable[[typing.Any], typing.Any] | None = None,
105
+ default: typing.Any = undefined,
106
+ ) -> typing.Any:
107
+ return self.get(key, cast, default)
108
+
109
+ def get(
110
+ self,
111
+ key: str,
112
+ cast: typing.Callable[[typing.Any], typing.Any] | None = None,
113
+ default: typing.Any = undefined,
114
+ ) -> typing.Any:
115
+ key = self.env_prefix + key
116
+ if key in self.environ:
117
+ value = self.environ[key]
118
+ return self._perform_cast(key, value, cast)
119
+ if key in self.file_values:
120
+ value = self.file_values[key]
121
+ return self._perform_cast(key, value, cast)
122
+ if default is not undefined:
123
+ return self._perform_cast(key, default, cast)
124
+ raise KeyError(f"Config '{key}' is missing, and has no default.")
125
+
126
+ def _read_file(self, file_name: str | Path) -> dict[str, str]:
127
+ file_values: dict[str, str] = {}
128
+ with open(file_name) as input_file:
129
+ for line in input_file.readlines():
130
+ line = line.strip()
131
+ if "=" in line and not line.startswith("#"):
132
+ key, value = line.split("=", 1)
133
+ key = key.strip()
134
+ value = value.strip().strip("\"'")
135
+ file_values[key] = value
136
+ return file_values
137
+
138
+ def _perform_cast(
139
+ self,
140
+ key: str,
141
+ value: typing.Any,
142
+ cast: typing.Callable[[typing.Any], typing.Any] | None = None,
143
+ ) -> typing.Any:
144
+ if cast is None or value is None:
145
+ return value
146
+ elif cast is bool and isinstance(value, str):
147
+ mapping = {"true": True, "1": True, "false": False, "0": False}
148
+ value = value.lower()
149
+ if value not in mapping:
150
+ raise ValueError(f"Config '{key}' has value '{value}'. Not a valid bool.")
151
+ return mapping[value]
152
+ try:
153
+ return cast(value)
154
+ except (TypeError, ValueError):
155
+ raise ValueError(f"Config '{key}' has value '{value}'. Not a valid {cast.__name__}.")
156
+
157
+
158
+ class ContextStore:
159
+ def __init__(self, cleanup_interval: int = 300, max_age: int = 3600):
160
+ """
161
+ Initialize ContextStore with automatic session cleanup.
162
+
163
+ :param cleanup_interval: Interval between cleanup checks (in seconds)
164
+ :param max_age: Maximum age of a session before it's considered expired (in seconds)
165
+ """
166
+ self._session_times: Dict[str, datetime] = {}
167
+ self.session_var = ContextVar("session_id", default=None)
168
+
169
+ self._max_age = max_age
170
+ self._cleanup_interval = cleanup_interval
171
+ self._cleanup_thread: Optional[threading.Thread] = None
172
+ self._stop_event = threading.Event()
173
+
174
+ # Start the cleanup thread
175
+ self._start_cleanup_thread()
176
+
177
+ def _start_cleanup_thread(self):
178
+ """Start a background thread for periodic session cleanup."""
179
+
180
+ def cleanup_worker():
181
+ while not self._stop_event.is_set():
182
+ self._perform_cleanup()
183
+ self._stop_event.wait(self._cleanup_interval)
184
+
185
+ self._cleanup_thread = threading.Thread(
186
+ target=cleanup_worker,
187
+ daemon=True, # Allows the thread to be automatically terminated when the main program exits
188
+ )
189
+ self._cleanup_thread.start()
190
+
191
+ def _perform_cleanup(self):
192
+ """Perform cleanup of expired sessions."""
193
+ current_time = datetime.now()
194
+ expired_sessions = [
195
+ session_id for session_id, timestamp in list(self._session_times.items()) if (current_time - timestamp).total_seconds() > self._max_age
196
+ ]
197
+
198
+ for session_id in expired_sessions:
199
+ self.remove_session(session_id)
200
+
201
+ def remove_session(self, session_id: str):
202
+ """Remove a specific session."""
203
+ self._session_times.pop(session_id, None)
204
+
205
+ def set_context(self, session_id: str):
206
+ """
207
+ Context manager for setting and resetting session context.
208
+
209
+ :param session_id: Unique identifier for the session
210
+ :return: Context manager for session
211
+ """
212
+ self.session_var.set(session_id)
213
+ self._session_times[session_id] = datetime.now()
214
+
215
+ def get_context(self) -> str:
216
+ """
217
+ Get the current session context.
218
+
219
+ :return: Current session ID
220
+ :raises RuntimeError: If no session context is available
221
+ """
222
+ return self.session_var.get()
223
+
224
+ def reset_context(self):
225
+ """Reset the session context."""
226
+ token = self.get_context()
227
+ if token is not None:
228
+ self.session_var.reset(token)
229
+
230
+ def stop_cleanup(self):
231
+ """
232
+ Stop the cleanup thread.
233
+ Useful for graceful shutdown of the application.
234
+ """
235
+ self._stop_event.set()
236
+ if self._cleanup_thread:
237
+ self._cleanup_thread.join()
238
+
239
+ def __del__(self):
240
+ """
241
+ Ensure cleanup thread is stopped when the object is deleted.
242
+ """
243
+ self.stop_cleanup()
244
+
245
+
246
+ context_store = ContextStore()
File without changes
@@ -0,0 +1,4 @@
1
+ from .repository import Model, PostgresRepository
2
+ from .config import SqlalchemyConfig
3
+
4
+ __all__ = ["Model", "PostgresRepository", "SqlalchemyConfig"]