qena-shared-lib 0.1.17__py3-none-any.whl → 0.1.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. qena_shared_lib/__init__.py +20 -2
  2. qena_shared_lib/alias.py +27 -0
  3. qena_shared_lib/application.py +4 -4
  4. qena_shared_lib/background.py +9 -7
  5. qena_shared_lib/cache.py +61 -0
  6. qena_shared_lib/enums.py +8 -0
  7. qena_shared_lib/eventbus.py +373 -0
  8. qena_shared_lib/exception_handling.py +409 -0
  9. qena_shared_lib/exceptions.py +167 -57
  10. qena_shared_lib/http/__init__.py +110 -0
  11. qena_shared_lib/{http.py → http/_base.py} +36 -36
  12. qena_shared_lib/http/_exception_handlers.py +202 -0
  13. qena_shared_lib/http/_request.py +24 -0
  14. qena_shared_lib/http/_response.py +24 -0
  15. qena_shared_lib/kafka/__init__.py +21 -0
  16. qena_shared_lib/kafka/_base.py +233 -0
  17. qena_shared_lib/kafka/_consumer.py +597 -0
  18. qena_shared_lib/kafka/_exception_handlers.py +124 -0
  19. qena_shared_lib/kafka/_producer.py +133 -0
  20. qena_shared_lib/logging.py +17 -13
  21. qena_shared_lib/mongodb.py +575 -0
  22. qena_shared_lib/rabbitmq/__init__.py +6 -6
  23. qena_shared_lib/rabbitmq/_base.py +68 -132
  24. qena_shared_lib/rabbitmq/_channel.py +2 -4
  25. qena_shared_lib/rabbitmq/_exception_handlers.py +69 -142
  26. qena_shared_lib/rabbitmq/_listener.py +245 -180
  27. qena_shared_lib/rabbitmq/_publisher.py +5 -5
  28. qena_shared_lib/rabbitmq/_rpc_client.py +21 -22
  29. qena_shared_lib/rabbitmq/message/__init__.py +19 -0
  30. qena_shared_lib/rabbitmq/message/_inbound.py +13 -0
  31. qena_shared_lib/rabbitmq/message/_outbound.py +13 -0
  32. qena_shared_lib/redis.py +47 -0
  33. qena_shared_lib/remotelogging/_base.py +34 -28
  34. qena_shared_lib/remotelogging/logstash/_base.py +3 -2
  35. qena_shared_lib/remotelogging/logstash/_http_sender.py +2 -4
  36. qena_shared_lib/remotelogging/logstash/_tcp_sender.py +2 -2
  37. qena_shared_lib/scheduler.py +24 -15
  38. qena_shared_lib/security.py +39 -32
  39. qena_shared_lib/sync.py +91 -0
  40. qena_shared_lib/utils.py +13 -11
  41. {qena_shared_lib-0.1.17.dist-info → qena_shared_lib-0.1.19.dist-info}/METADATA +395 -32
  42. qena_shared_lib-0.1.19.dist-info/RECORD +50 -0
  43. qena_shared_lib-0.1.19.dist-info/WHEEL +4 -0
  44. qena_shared_lib/exception_handlers.py +0 -235
  45. qena_shared_lib-0.1.17.dist-info/RECORD +0 -31
  46. qena_shared_lib-0.1.17.dist-info/WHEEL +0 -4
@@ -1,11 +1,22 @@
1
1
  try:
2
- from . import rabbitmq, scheduler, security
2
+ from . import (
3
+ cache,
4
+ eventbus,
5
+ kafka,
6
+ mongodb,
7
+ rabbitmq,
8
+ redis,
9
+ scheduler,
10
+ security,
11
+ sync,
12
+ )
3
13
  except NameError:
4
14
  pass
5
15
  from . import (
6
16
  application,
7
17
  background,
8
18
  dependencies,
19
+ enums,
9
20
  exceptions,
10
21
  http,
11
22
  logging,
@@ -16,13 +27,20 @@ from . import (
16
27
  __all__ = [
17
28
  "application",
18
29
  "background",
30
+ "cache",
19
31
  "dependencies",
32
+ "enums",
33
+ "eventbus",
20
34
  "exceptions",
21
35
  "http",
36
+ "kafka",
22
37
  "logging",
23
- "remotelogging",
38
+ "mongodb",
24
39
  "rabbitmq",
40
+ "redis",
41
+ "remotelogging",
25
42
  "scheduler",
26
43
  "security",
44
+ "sync",
27
45
  "utils",
28
46
  ]
@@ -0,0 +1,27 @@
1
+ from pydantic import BaseModel, ConfigDict
2
+ from pydantic.alias_generators import to_camel, to_snake
3
+
4
+ __all__ = [
5
+ "CamelCaseAliasedBaseModel",
6
+ "SnakeCaseAliasedBaseModel",
7
+ ]
8
+
9
+
10
+ class CamelCaseAliasedBaseModel(BaseModel):
11
+ model_config = ConfigDict(
12
+ alias_generator=to_camel,
13
+ populate_by_name=True,
14
+ arbitrary_types_allowed=True,
15
+ extra="ignore",
16
+ strict=False,
17
+ )
18
+
19
+
20
+ class SnakeCaseAliasedBaseModel(BaseModel):
21
+ model_config = ConfigDict(
22
+ alias_generator=to_snake,
23
+ populate_by_name=True,
24
+ arbitrary_types_allowed=True,
25
+ extra="ignore",
26
+ strict=False,
27
+ )
@@ -7,13 +7,13 @@ from punq import Container, Scope, empty
7
7
  from starlette.types import Lifespan
8
8
  from typing_extensions import Self
9
9
 
10
- from .exception_handlers import (
10
+ from .http import ControllerBase
11
+ from .http._exception_handlers import (
11
12
  AbstractHttpExceptionHandler,
12
- GeneralHttpExceptionHandler,
13
+ HttpGeneralExceptionHandler,
13
14
  HTTPServiceExceptionHandler,
14
15
  RequestValidationErrorHandler,
15
16
  )
16
- from .http import ControllerBase
17
17
 
18
18
  __all__ = [
19
19
  "Builder",
@@ -127,7 +127,7 @@ class Builder:
127
127
 
128
128
  def with_default_exception_handlers(self) -> Self:
129
129
  self.with_exception_handlers(
130
- GeneralHttpExceptionHandler,
130
+ HttpGeneralExceptionHandler,
131
131
  HTTPServiceExceptionHandler,
132
132
  RequestValidationErrorHandler,
133
133
  )
@@ -9,7 +9,7 @@ from uuid import uuid4
9
9
  from prometheus_client import Enum as PrometheusEnum
10
10
  from starlette.background import BackgroundTask
11
11
 
12
- from .logging import LoggerProvider
12
+ from .logging import LoggerFactory
13
13
  from .remotelogging import BaseRemoteLogSender
14
14
  from .utils import AsyncEventLoopMixin
15
15
 
@@ -20,7 +20,7 @@ __all__ = [
20
20
 
21
21
 
22
22
  class Background(AsyncEventLoopMixin):
23
- BACKGROUND_RUNNER_STATE = PrometheusEnum(
23
+ _BACKGROUND_RUNNER_STATE = PrometheusEnum(
24
24
  name="background_runner_state",
25
25
  documentation="Background runner state",
26
26
  states=["running", "stopped"],
@@ -34,7 +34,7 @@ class Background(AsyncEventLoopMixin):
34
34
  self._started = False
35
35
  self._stopped = False
36
36
  self._remote_logger = remote_logger
37
- self._logger = LoggerProvider.default().get_logger("backgroud")
37
+ self._logger = LoggerFactory.get_logger("background")
38
38
  self._tasks: dict[str, Task[Any]] = {}
39
39
 
40
40
  async def _task_manager(
@@ -54,8 +54,10 @@ class Background(AsyncEventLoopMixin):
54
54
 
55
55
  await self._tasks[task_id]
56
56
  except Exception:
57
- self._remote_logger.error(
58
- "exception occured when running background task {task.func.__name__} with id {task_id}"
57
+ self._remote_logger.exception(
58
+ message=f"exception occured while running background task {task.func.__name__} with id {task_id}",
59
+ tags=["background", "task_execution_failed", task_id],
60
+ extra={"serviceType": "background", "taskId": task_id},
59
61
  )
60
62
  finally:
61
63
  self._logger.info("finished running %s", task.func.__name__)
@@ -91,7 +93,7 @@ class Background(AsyncEventLoopMixin):
91
93
  raise RuntimeError("background runner already running")
92
94
 
93
95
  self.loop.create_task(self._run_tasks())
94
- self.BACKGROUND_RUNNER_STATE.state("running")
96
+ self._BACKGROUND_RUNNER_STATE.state("running")
95
97
 
96
98
  self._started = True
97
99
 
@@ -101,7 +103,7 @@ class Background(AsyncEventLoopMixin):
101
103
 
102
104
  self._stopped = True
103
105
  self._queue.put_nowait((None, None))
104
- self.BACKGROUND_RUNNER_STATE.state("stopped")
106
+ self._BACKGROUND_RUNNER_STATE.state("stopped")
105
107
 
106
108
  def is_alive(self, task_id: str) -> bool:
107
109
  if task_id in self._tasks and not self._tasks[task_id].done():
@@ -0,0 +1,61 @@
1
+ from typing import Any, TypeVar, cast
2
+
3
+ from redis.asyncio import Redis
4
+
5
+ from .alias import CamelCaseAliasedBaseModel
6
+ from .redis import RedisDependent
7
+
8
+ __all__ = [
9
+ "CachedObject",
10
+ "CacheManager",
11
+ ]
12
+
13
+
14
+ CO = TypeVar("CO", bound="CachedObject")
15
+
16
+
17
+ class CachedObject(CamelCaseAliasedBaseModel):
18
+ @classmethod
19
+ def from_raw_value(
20
+ cls, obj: Any, *args: Any, **kwargs: Any
21
+ ) -> "CachedObject":
22
+ cache_object = cls.model_validate_json(json_data=obj, *args, **kwargs)
23
+
24
+ return cast(CachedObject, cache_object)
25
+
26
+ @classmethod
27
+ def redis_key(cls) -> str:
28
+ return cls.__name__
29
+
30
+
31
+ class CacheManager(RedisDependent):
32
+ def attach(self, redis_client: Redis) -> None:
33
+ self._redis_client = redis_client
34
+
35
+ @property
36
+ def redis(self) -> Redis:
37
+ return self._redis_client
38
+
39
+ async def get(self, cached_object_type: type[CO]) -> CO | None:
40
+ cache_object = await self._redis_client.get(
41
+ cached_object_type.redis_key()
42
+ )
43
+
44
+ if cache_object is None:
45
+ return None
46
+
47
+ return cast(CO, cached_object_type.from_raw_value(obj=cache_object))
48
+
49
+ async def set(self, cache_object: CachedObject) -> None:
50
+ if not isinstance(cache_object, CachedObject):
51
+ raise TypeError(
52
+ f"object is not type of `CachedObject`, got `{cache_object.__class__.__name__}`"
53
+ )
54
+
55
+ await self._redis_client.set(
56
+ name=cache_object.redis_key(),
57
+ value=cache_object.model_dump_json(),
58
+ )
59
+
60
+ async def unset(self, cached_object_type: type[CO]) -> None:
61
+ await self._redis_client.delete(cached_object_type.redis_key())
@@ -0,0 +1,8 @@
1
+ from enum import Enum
2
+
3
+ __all__ = ["ServiceType"]
4
+
5
+
6
+ class ServiceType(Enum):
7
+ HTTP = 0
8
+ RABBIT_MQ = 1
@@ -0,0 +1,373 @@
1
+ from asyncio import Future, Queue, Task, wait_for
2
+ from dataclasses import dataclass
3
+ from typing import Any, AsyncGenerator, Callable, cast
4
+ from uuid import UUID, uuid4
5
+
6
+ from pika.adapters.asyncio_connection import AsyncioConnection
7
+ from pika.channel import Channel
8
+ from pika.exchange_type import ExchangeType
9
+ from pika.frame import Method
10
+ from pika.spec import Basic, BasicProperties
11
+ from pydantic_core import to_json
12
+
13
+ from qena_shared_lib.rabbitmq import (
14
+ AbstractRabbitMQService,
15
+ BaseChannel,
16
+ ChannelPool,
17
+ )
18
+
19
+ from .alias import CamelCaseAliasedBaseModel
20
+ from .remotelogging import BaseRemoteLogSender
21
+ from .utils import AsyncEventLoopMixin
22
+
23
+ __all__ = [
24
+ "Event",
25
+ "EventBus",
26
+ "EventBusChannelAdapter",
27
+ "EventQueue",
28
+ "EventReciever",
29
+ "GlobalEvent",
30
+ ]
31
+
32
+
33
+ @dataclass
34
+ class Event:
35
+ payload: Any | None = None
36
+
37
+ def to_json(self) -> str:
38
+ return cast(str, to_json(self.payload).decode())
39
+
40
+
41
+ EventQueue = Queue[Event]
42
+
43
+
44
+ class GlobalEvent(CamelCaseAliasedBaseModel):
45
+ event_key: str
46
+ payload: Any | None = None
47
+
48
+
49
+ class EventBusChannelAdapter(BaseChannel):
50
+ def __init__(
51
+ self,
52
+ connection: AsyncioConnection,
53
+ on_event_bus_channel_opened: Callable[[Channel], None],
54
+ on_event_bus_consumer_cancel: Callable[[], None],
55
+ ):
56
+ super().__init__(connection=connection, failed_reopen_threshold=None)
57
+
58
+ self._on_event_bus_channel_opened = on_event_bus_channel_opened
59
+ self._on_event_bus_consumer_cancel = on_event_bus_consumer_cancel
60
+
61
+ def _hook_on_channel_opened(self) -> None:
62
+ if not isinstance(self._channel, Channel):
63
+ raise RuntimeError("channel not initialized")
64
+
65
+ self._on_event_bus_channel_opened(self._channel)
66
+
67
+ def _hook_on_cancelled(self) -> None:
68
+ self._on_event_bus_consumer_cancel()
69
+
70
+
71
+ class EventReciever:
72
+ def __init__(self, event_key: str) -> None:
73
+ self._event_key = event_key
74
+ self._event_queue = EventQueue()
75
+ self._reciever_id = uuid4()
76
+
77
+ @property
78
+ def event_key(self) -> str:
79
+ return self._event_key
80
+
81
+ @property
82
+ def event_queue(self) -> EventQueue:
83
+ return self._event_queue
84
+
85
+ @property
86
+ def reciever_id(self) -> UUID:
87
+ return self._reciever_id
88
+
89
+ async def wait_once(self, timeout: float) -> Event:
90
+ return await wait_for(fut=self._event_queue.get(), timeout=timeout)
91
+
92
+ async def subscribe(self) -> AsyncGenerator[Event, None]:
93
+ while True:
94
+ yield await self._event_queue.get()
95
+
96
+
97
+ class EventBus(AbstractRabbitMQService, AsyncEventLoopMixin):
98
+ EVENT_BUS_EXCHANGE = "event_bus"
99
+
100
+ @classmethod
101
+ def set_event_bus_exchange(cls, name: str) -> None:
102
+ cls.EVENT_BUS_EXCHANGE = name
103
+
104
+ def __init__(self, remote_logger: BaseRemoteLogSender):
105
+ self._events: dict[str, dict[UUID, EventQueue]] = {}
106
+ self._worker_id = uuid4()
107
+ self._remote_logger = remote_logger
108
+ self._first_connect = True
109
+ self._event_reciever_tasks: list[Task[None]] = []
110
+
111
+ def initialize(
112
+ self, connection: AsyncioConnection, channel_pool: ChannelPool
113
+ ) -> Future[None]:
114
+ self._connection = connection
115
+ self._channel_pool = channel_pool
116
+ self._event_bus_future = self.loop.create_future()
117
+ self._event_bus_channel_adapter = EventBusChannelAdapter(
118
+ connection=connection,
119
+ on_event_bus_channel_opened=self._declare_exchange,
120
+ on_event_bus_consumer_cancel=self._register_consumer,
121
+ )
122
+
123
+ self._event_bus_channel_adapter.open().add_done_callback(
124
+ self._on_event_bus_channel_open_done
125
+ )
126
+
127
+ return cast(Future[None], self._event_bus_future)
128
+
129
+ def close(self) -> Future[None]:
130
+ close_future = self.loop.create_future()
131
+
132
+ close_future.set_result(None)
133
+
134
+ return cast(Future[None], close_future)
135
+
136
+ def _on_event_bus_channel_open_done(self, future: Future[UUID]) -> None:
137
+ if future.cancelled():
138
+ if not self._event_bus_future.done():
139
+ self._event_bus_future.cancel()
140
+
141
+ return
142
+
143
+ exception = future.exception()
144
+
145
+ if exception is not None:
146
+ if not self._event_bus_future.done():
147
+ self._event_bus_future.set_exception(exception)
148
+
149
+ return
150
+
151
+ self._event_bus_channel_id = future.result()
152
+
153
+ def create_event_reciever(
154
+ self,
155
+ event_key: str,
156
+ ) -> EventReciever:
157
+ event_reciever = EventReciever(event_key=event_key)
158
+
159
+ if event_key not in self._events:
160
+ self._events[event_key] = {
161
+ event_reciever.reciever_id: event_reciever.event_queue
162
+ }
163
+ else:
164
+ self._events[event_key][event_reciever.reciever_id] = Queue()
165
+
166
+ return event_reciever
167
+
168
+ async def emit(
169
+ self,
170
+ event_key: str,
171
+ payload: Any | None = None,
172
+ event_reciever: EventReciever | None = None,
173
+ ) -> None:
174
+ await self._submit_event(
175
+ event_key=event_key,
176
+ event_reciever=event_reciever,
177
+ payload=payload,
178
+ )
179
+
180
+ async def _submit_event(
181
+ self,
182
+ event_key: str,
183
+ globally_emitted: bool = False,
184
+ event_reciever: EventReciever | None = None,
185
+ payload: Any | None = None,
186
+ ) -> None:
187
+ event = Event(payload)
188
+ event_queues = self._events.get(event_key)
189
+
190
+ if event_queues is None:
191
+ if not globally_emitted:
192
+ global_event = GlobalEvent(
193
+ event_key=event_key,
194
+ payload=payload,
195
+ )
196
+
197
+ await self._emit_globally(global_event)
198
+
199
+ return
200
+
201
+ if event_reciever is not None:
202
+ event_queue = event_queues.get(event_reciever.reciever_id)
203
+
204
+ if event_queue is None:
205
+ return
206
+
207
+ return await event_queue.put(event)
208
+
209
+ for event_queue in event_queues.values():
210
+ await event_queue.put(event)
211
+
212
+ async def _emit_globally(self, global_event: GlobalEvent) -> None:
213
+ if self._channel_pool is None:
214
+ self._remote_logger.error(
215
+ message="channel pool not initialized to publish to global event recievers",
216
+ tags=["event_bus", "event_pool_not_initialized"],
217
+ )
218
+
219
+ return
220
+
221
+ try:
222
+ with await self._channel_pool.get() as channel:
223
+ channel.basic_publish(
224
+ exchange=self.EVENT_BUS_EXCHANGE,
225
+ routing_key="IRRELEVANT",
226
+ body=global_event.model_dump_json().encode(),
227
+ )
228
+ except:
229
+ self._remote_logger.exception(
230
+ message="unable to publish event",
231
+ tags=["event_bus", "global_event_publishing_error"],
232
+ )
233
+
234
+ async def remove(self, event_reciever: EventReciever) -> None:
235
+ if (
236
+ event_reciever.event_key not in self._events
237
+ or event_reciever.reciever_id
238
+ not in self._events[event_reciever.event_key]
239
+ ):
240
+ return
241
+
242
+ del self._events[event_reciever.event_key][event_reciever.reciever_id]
243
+
244
+ if not self._events[event_reciever.event_key]:
245
+ del self._events[event_reciever.event_key]
246
+
247
+ def _declare_exchange(self, channel: Channel) -> None:
248
+ self._channel = channel
249
+
250
+ self._channel.exchange_declare(
251
+ exchange=self.EVENT_BUS_EXCHANGE,
252
+ exchange_type=ExchangeType.fanout,
253
+ auto_delete=True,
254
+ callback=self._on_exchange_declared,
255
+ )
256
+
257
+ def _on_exchange_declared(self, method: Method) -> None:
258
+ del method
259
+
260
+ self._declare_queue()
261
+
262
+ def _declare_queue(self) -> None:
263
+ self._consumer_queue = self._unique_consumer_queue()
264
+
265
+ try:
266
+ self._channel.queue_declare(
267
+ queue=self._consumer_queue,
268
+ auto_delete=True,
269
+ callback=self._on_queue_declared,
270
+ )
271
+ except Exception as e:
272
+ if not self._event_bus_future.done():
273
+ self._event_bus_future.set_exception(e)
274
+ else:
275
+ self._remote_logger.exception(
276
+ message=f"unable to declare queue {self._consumer_queue} for event bus",
277
+ tags=["event_bus", "queue_declaration_error"],
278
+ )
279
+
280
+ def _on_queue_declared(self, method: Method) -> None:
281
+ del method
282
+
283
+ try:
284
+ self._channel.queue_bind(
285
+ queue=self._consumer_queue,
286
+ exchange=self.EVENT_BUS_EXCHANGE,
287
+ callback=self._on_queue_bound,
288
+ )
289
+ except Exception as e:
290
+ if not self._event_bus_future.done():
291
+ self._event_bus_future.set_exception(e)
292
+ else:
293
+ self._remote_logger.exception(
294
+ message="unable to bind queue to exchange",
295
+ tags=["event_bus", "queue_binding_error"],
296
+ )
297
+
298
+ def _on_queue_bound(self, method: Method) -> None:
299
+ del method
300
+
301
+ self._register_consumer()
302
+
303
+ def _register_consumer(self) -> None:
304
+ try:
305
+ self._channel.basic_consume(
306
+ queue=self._consumer_queue,
307
+ on_message_callback=self._on_message_recieved,
308
+ auto_ack=True,
309
+ callback=self._on_consumer_registered,
310
+ )
311
+ except Exception as e:
312
+ if not self._event_bus_future.done():
313
+ self._event_bus_future.set_exception(e)
314
+ else:
315
+ self._remote_logger.exception(
316
+ message=f"unable to start consuming {self._consumer_queue}",
317
+ tags=["event_bus", "consumer_registration_error"],
318
+ )
319
+
320
+ def _on_consumer_registered(self, method: Method) -> None:
321
+ del method
322
+
323
+ if not self._event_bus_future.done():
324
+ self._event_bus_future.set_result(None)
325
+
326
+ def _on_message_recieved(
327
+ self,
328
+ channel: Channel,
329
+ method: Basic.Deliver,
330
+ properties: BasicProperties,
331
+ body: bytes,
332
+ ) -> None:
333
+ del channel, method, properties
334
+
335
+ try:
336
+ global_event = GlobalEvent.model_validate_json(body)
337
+ except:
338
+ self._remote_logger.exception(
339
+ message="event not deserializable",
340
+ tags=["event_bus", "event_deserialization_error"],
341
+ )
342
+
343
+ return
344
+
345
+ event_reciever_task = self.loop.create_task(
346
+ self._submit_event(
347
+ event_key=global_event.event_key,
348
+ globally_emitted=True,
349
+ payload=global_event.payload,
350
+ )
351
+ )
352
+
353
+ event_reciever_task.add_done_callback(self._on_event_emit)
354
+ self._event_reciever_tasks.append(event_reciever_task)
355
+
356
+ def _on_event_emit(self, task: Task[None]) -> None:
357
+ if task in self._event_reciever_tasks:
358
+ self._event_reciever_tasks.remove(task)
359
+
360
+ if task.cancelled():
361
+ return
362
+
363
+ e = task.exception()
364
+
365
+ if e is not None:
366
+ self._remote_logger.error(
367
+ message="unable to emit from global event",
368
+ tags=["event_bus", "emit_event_error", task.get_name()],
369
+ exception=e,
370
+ )
371
+
372
+ def _unique_consumer_queue(self) -> str:
373
+ return f"{self.EVENT_BUS_EXCHANGE}.worker.{self._worker_id.hex}"