qena-shared-lib 0.1.18__py3-none-any.whl → 0.1.20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,11 +1,22 @@
1
1
  try:
2
- from . import kafka, rabbitmq, scheduler, security
2
+ from . import (
3
+ cache,
4
+ eventbus,
5
+ kafka,
6
+ mongodb,
7
+ rabbitmq,
8
+ redis,
9
+ scheduler,
10
+ security,
11
+ sync,
12
+ )
3
13
  except NameError:
4
14
  pass
5
15
  from . import (
6
16
  application,
7
17
  background,
8
18
  dependencies,
19
+ enums,
9
20
  exceptions,
10
21
  http,
11
22
  logging,
@@ -16,14 +27,20 @@ from . import (
16
27
  __all__ = [
17
28
  "application",
18
29
  "background",
30
+ "cache",
19
31
  "dependencies",
32
+ "enums",
33
+ "eventbus",
20
34
  "exceptions",
21
35
  "http",
22
36
  "kafka",
23
37
  "logging",
38
+ "mongodb",
24
39
  "rabbitmq",
40
+ "redis",
25
41
  "remotelogging",
26
42
  "scheduler",
27
43
  "security",
44
+ "sync",
28
45
  "utils",
29
46
  ]
@@ -0,0 +1,27 @@
1
+ from pydantic import BaseModel, ConfigDict
2
+ from pydantic.alias_generators import to_camel, to_snake
3
+
4
+ __all__ = [
5
+ "CamelCaseAliasedBaseModel",
6
+ "SnakeCaseAliasedBaseModel",
7
+ ]
8
+
9
+
10
+ class CamelCaseAliasedBaseModel(BaseModel):
11
+ model_config = ConfigDict(
12
+ alias_generator=to_camel,
13
+ populate_by_name=True,
14
+ arbitrary_types_allowed=True,
15
+ extra="ignore",
16
+ strict=False,
17
+ )
18
+
19
+
20
+ class SnakeCaseAliasedBaseModel(BaseModel):
21
+ model_config = ConfigDict(
22
+ alias_generator=to_snake,
23
+ populate_by_name=True,
24
+ arbitrary_types_allowed=True,
25
+ extra="ignore",
26
+ strict=False,
27
+ )
@@ -0,0 +1,61 @@
1
+ from typing import Any, TypeVar, cast
2
+
3
+ from redis.asyncio import Redis
4
+
5
+ from .alias import CamelCaseAliasedBaseModel
6
+ from .redis import RedisDependent
7
+
8
+ __all__ = [
9
+ "CachedObject",
10
+ "CacheManager",
11
+ ]
12
+
13
+
14
+ CO = TypeVar("CO", bound="CachedObject")
15
+
16
+
17
+ class CachedObject(CamelCaseAliasedBaseModel):
18
+ @classmethod
19
+ def from_raw_value(
20
+ cls, obj: Any, *args: Any, **kwargs: Any
21
+ ) -> "CachedObject":
22
+ cache_object = cls.model_validate_json(json_data=obj, *args, **kwargs)
23
+
24
+ return cast(CachedObject, cache_object)
25
+
26
+ @classmethod
27
+ def redis_key(cls) -> str:
28
+ return cls.__name__
29
+
30
+
31
+ class CacheManager(RedisDependent):
32
+ def attach(self, redis_client: Redis) -> None:
33
+ self._redis_client = redis_client
34
+
35
+ @property
36
+ def redis(self) -> Redis:
37
+ return self._redis_client
38
+
39
+ async def get(self, cached_object_type: type[CO]) -> CO | None:
40
+ cache_object = await self._redis_client.get(
41
+ cached_object_type.redis_key()
42
+ )
43
+
44
+ if cache_object is None:
45
+ return None
46
+
47
+ return cast(CO, cached_object_type.from_raw_value(obj=cache_object))
48
+
49
+ async def set(self, cache_object: CachedObject) -> None:
50
+ if not isinstance(cache_object, CachedObject):
51
+ raise TypeError(
52
+ f"object is not type of `CachedObject`, got `{cache_object.__class__.__name__}`"
53
+ )
54
+
55
+ await self._redis_client.set(
56
+ name=cache_object.redis_key(),
57
+ value=cache_object.model_dump_json(),
58
+ )
59
+
60
+ async def unset(self, cached_object_type: type[CO]) -> None:
61
+ await self._redis_client.delete(cached_object_type.redis_key())
@@ -0,0 +1,8 @@
1
+ from enum import Enum
2
+
3
+ __all__ = ["ServiceType"]
4
+
5
+
6
+ class ServiceType(Enum):
7
+ HTTP = 0
8
+ RABBIT_MQ = 1
@@ -0,0 +1,373 @@
1
+ from asyncio import Future, Queue, Task, wait_for
2
+ from dataclasses import dataclass
3
+ from typing import Any, AsyncGenerator, Callable, cast
4
+ from uuid import UUID, uuid4
5
+
6
+ from pika.adapters.asyncio_connection import AsyncioConnection
7
+ from pika.channel import Channel
8
+ from pika.exchange_type import ExchangeType
9
+ from pika.frame import Method
10
+ from pika.spec import Basic, BasicProperties
11
+ from pydantic_core import to_json
12
+
13
+ from qena_shared_lib.rabbitmq import (
14
+ AbstractRabbitMQService,
15
+ BaseChannel,
16
+ ChannelPool,
17
+ )
18
+
19
+ from .alias import CamelCaseAliasedBaseModel
20
+ from .remotelogging import BaseRemoteLogSender
21
+ from .utils import AsyncEventLoopMixin
22
+
23
+ __all__ = [
24
+ "Event",
25
+ "EventBus",
26
+ "EventBusChannelAdapter",
27
+ "EventQueue",
28
+ "EventReciever",
29
+ "GlobalEvent",
30
+ ]
31
+
32
+
33
+ @dataclass
34
+ class Event:
35
+ payload: Any | None = None
36
+
37
+ def to_json(self) -> str:
38
+ return cast(str, to_json(self.payload).decode())
39
+
40
+
41
+ EventQueue = Queue[Event]
42
+
43
+
44
+ class GlobalEvent(CamelCaseAliasedBaseModel):
45
+ event_key: str
46
+ payload: Any | None = None
47
+
48
+
49
+ class EventBusChannelAdapter(BaseChannel):
50
+ def __init__(
51
+ self,
52
+ connection: AsyncioConnection,
53
+ on_event_bus_channel_opened: Callable[[Channel], None],
54
+ on_event_bus_consumer_cancel: Callable[[], None],
55
+ ):
56
+ super().__init__(connection=connection, failed_reopen_threshold=None)
57
+
58
+ self._on_event_bus_channel_opened = on_event_bus_channel_opened
59
+ self._on_event_bus_consumer_cancel = on_event_bus_consumer_cancel
60
+
61
+ def _hook_on_channel_opened(self) -> None:
62
+ if not isinstance(self._channel, Channel):
63
+ raise RuntimeError("channel not initialized")
64
+
65
+ self._on_event_bus_channel_opened(self._channel)
66
+
67
+ def _hook_on_cancelled(self) -> None:
68
+ self._on_event_bus_consumer_cancel()
69
+
70
+
71
+ class EventReciever:
72
+ def __init__(self, event_key: str) -> None:
73
+ self._event_key = event_key
74
+ self._event_queue = EventQueue()
75
+ self._reciever_id = uuid4()
76
+
77
+ @property
78
+ def event_key(self) -> str:
79
+ return self._event_key
80
+
81
+ @property
82
+ def event_queue(self) -> EventQueue:
83
+ return self._event_queue
84
+
85
+ @property
86
+ def reciever_id(self) -> UUID:
87
+ return self._reciever_id
88
+
89
+ async def wait_once(self, timeout: float) -> Event:
90
+ return await wait_for(fut=self._event_queue.get(), timeout=timeout)
91
+
92
+ async def subscribe(self) -> AsyncGenerator[Event, None]:
93
+ while True:
94
+ yield await self._event_queue.get()
95
+
96
+
97
+ class EventBus(AbstractRabbitMQService, AsyncEventLoopMixin):
98
+ EVENT_BUS_EXCHANGE = "event_bus"
99
+
100
+ @classmethod
101
+ def set_event_bus_exchange(cls, name: str) -> None:
102
+ cls.EVENT_BUS_EXCHANGE = name
103
+
104
+ def __init__(self, remote_logger: BaseRemoteLogSender):
105
+ self._events: dict[str, dict[UUID, EventQueue]] = {}
106
+ self._worker_id = uuid4()
107
+ self._remote_logger = remote_logger
108
+ self._first_connect = True
109
+ self._event_reciever_tasks: list[Task[None]] = []
110
+
111
+ def initialize(
112
+ self, connection: AsyncioConnection, channel_pool: ChannelPool
113
+ ) -> Future[None]:
114
+ self._connection = connection
115
+ self._channel_pool = channel_pool
116
+ self._event_bus_future = self.loop.create_future()
117
+ self._event_bus_channel_adapter = EventBusChannelAdapter(
118
+ connection=connection,
119
+ on_event_bus_channel_opened=self._declare_exchange,
120
+ on_event_bus_consumer_cancel=self._register_consumer,
121
+ )
122
+
123
+ self._event_bus_channel_adapter.open().add_done_callback(
124
+ self._on_event_bus_channel_open_done
125
+ )
126
+
127
+ return cast(Future[None], self._event_bus_future)
128
+
129
+ def close(self) -> Future[None]:
130
+ close_future = self.loop.create_future()
131
+
132
+ close_future.set_result(None)
133
+
134
+ return cast(Future[None], close_future)
135
+
136
+ def _on_event_bus_channel_open_done(self, future: Future[UUID]) -> None:
137
+ if future.cancelled():
138
+ if not self._event_bus_future.done():
139
+ self._event_bus_future.cancel()
140
+
141
+ return
142
+
143
+ exception = future.exception()
144
+
145
+ if exception is not None:
146
+ if not self._event_bus_future.done():
147
+ self._event_bus_future.set_exception(exception)
148
+
149
+ return
150
+
151
+ self._event_bus_channel_id = future.result()
152
+
153
+ def create_event_reciever(
154
+ self,
155
+ event_key: str,
156
+ ) -> EventReciever:
157
+ event_reciever = EventReciever(event_key=event_key)
158
+
159
+ if event_key not in self._events:
160
+ self._events[event_key] = {
161
+ event_reciever.reciever_id: event_reciever.event_queue
162
+ }
163
+ else:
164
+ self._events[event_key][event_reciever.reciever_id] = Queue()
165
+
166
+ return event_reciever
167
+
168
+ async def emit(
169
+ self,
170
+ event_key: str,
171
+ payload: Any | None = None,
172
+ event_reciever: EventReciever | None = None,
173
+ ) -> None:
174
+ await self._submit_event(
175
+ event_key=event_key,
176
+ event_reciever=event_reciever,
177
+ payload=payload,
178
+ )
179
+
180
+ async def _submit_event(
181
+ self,
182
+ event_key: str,
183
+ globally_emitted: bool = False,
184
+ event_reciever: EventReciever | None = None,
185
+ payload: Any | None = None,
186
+ ) -> None:
187
+ event = Event(payload)
188
+ event_queues = self._events.get(event_key)
189
+
190
+ if event_queues is None:
191
+ if not globally_emitted:
192
+ global_event = GlobalEvent(
193
+ event_key=event_key,
194
+ payload=payload,
195
+ )
196
+
197
+ await self._emit_globally(global_event)
198
+
199
+ return
200
+
201
+ if event_reciever is not None:
202
+ event_queue = event_queues.get(event_reciever.reciever_id)
203
+
204
+ if event_queue is None:
205
+ return
206
+
207
+ return await event_queue.put(event)
208
+
209
+ for event_queue in event_queues.values():
210
+ await event_queue.put(event)
211
+
212
+ async def _emit_globally(self, global_event: GlobalEvent) -> None:
213
+ if self._channel_pool is None:
214
+ self._remote_logger.error(
215
+ message="channel pool not initialized to publish to global event recievers",
216
+ tags=["event_bus", "event_pool_not_initialized"],
217
+ )
218
+
219
+ return
220
+
221
+ try:
222
+ with await self._channel_pool.get() as channel:
223
+ channel.basic_publish(
224
+ exchange=self.EVENT_BUS_EXCHANGE,
225
+ routing_key="IRRELEVANT",
226
+ body=global_event.model_dump_json().encode(),
227
+ )
228
+ except:
229
+ self._remote_logger.exception(
230
+ message="unable to publish event",
231
+ tags=["event_bus", "global_event_publishing_error"],
232
+ )
233
+
234
+ async def remove(self, event_reciever: EventReciever) -> None:
235
+ if (
236
+ event_reciever.event_key not in self._events
237
+ or event_reciever.reciever_id
238
+ not in self._events[event_reciever.event_key]
239
+ ):
240
+ return
241
+
242
+ del self._events[event_reciever.event_key][event_reciever.reciever_id]
243
+
244
+ if not self._events[event_reciever.event_key]:
245
+ del self._events[event_reciever.event_key]
246
+
247
+ def _declare_exchange(self, channel: Channel) -> None:
248
+ self._channel = channel
249
+
250
+ self._channel.exchange_declare(
251
+ exchange=self.EVENT_BUS_EXCHANGE,
252
+ exchange_type=ExchangeType.fanout,
253
+ auto_delete=True,
254
+ callback=self._on_exchange_declared,
255
+ )
256
+
257
+ def _on_exchange_declared(self, method: Method) -> None:
258
+ del method
259
+
260
+ self._declare_queue()
261
+
262
+ def _declare_queue(self) -> None:
263
+ self._consumer_queue = self._unique_consumer_queue()
264
+
265
+ try:
266
+ self._channel.queue_declare(
267
+ queue=self._consumer_queue,
268
+ auto_delete=True,
269
+ callback=self._on_queue_declared,
270
+ )
271
+ except Exception as e:
272
+ if not self._event_bus_future.done():
273
+ self._event_bus_future.set_exception(e)
274
+ else:
275
+ self._remote_logger.exception(
276
+ message=f"unable to declare queue {self._consumer_queue} for event bus",
277
+ tags=["event_bus", "queue_declaration_error"],
278
+ )
279
+
280
+ def _on_queue_declared(self, method: Method) -> None:
281
+ del method
282
+
283
+ try:
284
+ self._channel.queue_bind(
285
+ queue=self._consumer_queue,
286
+ exchange=self.EVENT_BUS_EXCHANGE,
287
+ callback=self._on_queue_bound,
288
+ )
289
+ except Exception as e:
290
+ if not self._event_bus_future.done():
291
+ self._event_bus_future.set_exception(e)
292
+ else:
293
+ self._remote_logger.exception(
294
+ message="unable to bind queue to exchange",
295
+ tags=["event_bus", "queue_binding_error"],
296
+ )
297
+
298
+ def _on_queue_bound(self, method: Method) -> None:
299
+ del method
300
+
301
+ self._register_consumer()
302
+
303
+ def _register_consumer(self) -> None:
304
+ try:
305
+ self._channel.basic_consume(
306
+ queue=self._consumer_queue,
307
+ on_message_callback=self._on_message_recieved,
308
+ auto_ack=True,
309
+ callback=self._on_consumer_registered,
310
+ )
311
+ except Exception as e:
312
+ if not self._event_bus_future.done():
313
+ self._event_bus_future.set_exception(e)
314
+ else:
315
+ self._remote_logger.exception(
316
+ message=f"unable to start consuming {self._consumer_queue}",
317
+ tags=["event_bus", "consumer_registration_error"],
318
+ )
319
+
320
+ def _on_consumer_registered(self, method: Method) -> None:
321
+ del method
322
+
323
+ if not self._event_bus_future.done():
324
+ self._event_bus_future.set_result(None)
325
+
326
+ def _on_message_recieved(
327
+ self,
328
+ channel: Channel,
329
+ method: Basic.Deliver,
330
+ properties: BasicProperties,
331
+ body: bytes,
332
+ ) -> None:
333
+ del channel, method, properties
334
+
335
+ try:
336
+ global_event = GlobalEvent.model_validate_json(body)
337
+ except:
338
+ self._remote_logger.exception(
339
+ message="event not deserializable",
340
+ tags=["event_bus", "event_deserialization_error"],
341
+ )
342
+
343
+ return
344
+
345
+ event_reciever_task = self.loop.create_task(
346
+ self._submit_event(
347
+ event_key=global_event.event_key,
348
+ globally_emitted=True,
349
+ payload=global_event.payload,
350
+ )
351
+ )
352
+
353
+ event_reciever_task.add_done_callback(self._on_event_emit)
354
+ self._event_reciever_tasks.append(event_reciever_task)
355
+
356
+ def _on_event_emit(self, task: Task[None]) -> None:
357
+ if task in self._event_reciever_tasks:
358
+ self._event_reciever_tasks.remove(task)
359
+
360
+ if task.cancelled():
361
+ return
362
+
363
+ e = task.exception()
364
+
365
+ if e is not None:
366
+ self._remote_logger.error(
367
+ message="unable to emit from global event",
368
+ tags=["event_bus", "emit_event_error", task.get_name()],
369
+ exception=e,
370
+ )
371
+
372
+ def _unique_consumer_queue(self) -> str:
373
+ return f"{self.EVENT_BUS_EXCHANGE}.worker.{self._worker_id.hex}"
@@ -4,6 +4,8 @@ from typing import Any
4
4
  from fastapi import status
5
5
  from typing_extensions import Self
6
6
 
7
+ from .enums import ServiceType
8
+
7
9
  __all__ = [
8
10
  "BadGateway",
9
11
  "BadRequest",
@@ -437,11 +439,6 @@ class KafkaDisconnectedError(Exception):
437
439
  pass
438
440
 
439
441
 
440
- class ServiceType(Enum):
441
- HTTP = 0
442
- RABBIT_MQ = 1
443
-
444
-
445
442
  class GenericServiceExceptionFactory:
446
443
  def __init__(
447
444
  self,
@@ -1,3 +1,5 @@
1
+ from pydantic import Field
2
+
1
3
  from ._base import (
2
4
  ROUTE_HANDLER_ATTRIBUTE,
3
5
  APIRouter,
@@ -43,6 +45,18 @@ from ._exception_handlers import (
43
45
  HTTPServiceExceptionHandler,
44
46
  RequestValidationErrorHandler,
45
47
  )
48
+ from ._request import (
49
+ CamelCaseRequest,
50
+ InboundRequest,
51
+ OutboundRequest,
52
+ SnakeCaseRequest,
53
+ )
54
+ from ._response import (
55
+ CamelCaseResponse,
56
+ InboundResponse,
57
+ OutboundResponse,
58
+ SnakeCaseRespose,
59
+ )
46
60
 
47
61
  __all__ = [
48
62
  "AbstractHttpExceptionHandler",
@@ -50,10 +64,13 @@ __all__ = [
50
64
  "APIRouter",
51
65
  "BackgroundTasks",
52
66
  "Body",
67
+ "CamelCaseRequest",
68
+ "CamelCaseResponse",
53
69
  "ControllerBase",
54
70
  "Cookie",
55
71
  "delete",
56
72
  "Depends",
73
+ "Field",
57
74
  "File",
58
75
  "FileResponse",
59
76
  "Form",
@@ -64,8 +81,12 @@ __all__ = [
64
81
  "HTTPException",
65
82
  "HttpGeneralExceptionHandler",
66
83
  "HTTPServiceExceptionHandler",
84
+ "InboundRequest",
85
+ "InboundResponse",
67
86
  "JSONResponse",
68
87
  "options",
88
+ "OutboundRequest",
89
+ "OutboundResponse",
69
90
  "patch",
70
91
  "Path",
71
92
  "PlainTextResponse",
@@ -80,6 +101,8 @@ __all__ = [
80
101
  "ROUTE_HANDLER_ATTRIBUTE",
81
102
  "RouteHandlerMetadata",
82
103
  "Security",
104
+ "SnakeCaseRequest",
105
+ "SnakeCaseRespose",
83
106
  "status",
84
107
  "StreamingResponse",
85
108
  "trace",
@@ -0,0 +1,24 @@
1
+ from ..alias import CamelCaseAliasedBaseModel, SnakeCaseAliasedBaseModel
2
+
3
+ __all__ = [
4
+ "CamelCaseRequest",
5
+ "InboundRequest",
6
+ "OutboundRequest",
7
+ "SnakeCaseRequest",
8
+ ]
9
+
10
+
11
+ class SnakeCaseRequest(SnakeCaseAliasedBaseModel):
12
+ pass
13
+
14
+
15
+ class CamelCaseRequest(CamelCaseAliasedBaseModel):
16
+ pass
17
+
18
+
19
+ class InboundRequest(CamelCaseAliasedBaseModel):
20
+ pass
21
+
22
+
23
+ class OutboundRequest(CamelCaseAliasedBaseModel):
24
+ pass
@@ -0,0 +1,24 @@
1
+ from ..alias import CamelCaseAliasedBaseModel, SnakeCaseAliasedBaseModel
2
+
3
+ __all__ = [
4
+ "CamelCaseResponse",
5
+ "InboundResponse",
6
+ "OutboundResponse",
7
+ "SnakeCaseRespose",
8
+ ]
9
+
10
+
11
+ class SnakeCaseRespose(SnakeCaseAliasedBaseModel):
12
+ pass
13
+
14
+
15
+ class CamelCaseResponse(CamelCaseAliasedBaseModel):
16
+ pass
17
+
18
+
19
+ class InboundResponse(CamelCaseAliasedBaseModel):
20
+ pass
21
+
22
+
23
+ class OutboundResponse(CamelCaseAliasedBaseModel):
24
+ pass