qena-shared-lib 0.1.16__py3-none-any.whl → 0.1.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. qena_shared_lib/__init__.py +3 -2
  2. qena_shared_lib/application.py +4 -4
  3. qena_shared_lib/background.py +9 -7
  4. qena_shared_lib/exception_handling.py +409 -0
  5. qena_shared_lib/exceptions.py +170 -57
  6. qena_shared_lib/http/__init__.py +90 -0
  7. qena_shared_lib/{http.py → http/_base.py} +36 -36
  8. qena_shared_lib/http/_exception_handlers.py +202 -0
  9. qena_shared_lib/kafka/__init__.py +21 -0
  10. qena_shared_lib/kafka/_base.py +233 -0
  11. qena_shared_lib/kafka/_consumer.py +597 -0
  12. qena_shared_lib/kafka/_exception_handlers.py +124 -0
  13. qena_shared_lib/kafka/_producer.py +133 -0
  14. qena_shared_lib/logging.py +17 -13
  15. qena_shared_lib/rabbitmq/__init__.py +4 -6
  16. qena_shared_lib/rabbitmq/_base.py +68 -132
  17. qena_shared_lib/rabbitmq/_channel.py +2 -4
  18. qena_shared_lib/rabbitmq/_exception_handlers.py +69 -142
  19. qena_shared_lib/rabbitmq/_listener.py +246 -157
  20. qena_shared_lib/rabbitmq/_publisher.py +5 -5
  21. qena_shared_lib/rabbitmq/_rpc_client.py +21 -22
  22. qena_shared_lib/remotelogging/_base.py +20 -20
  23. qena_shared_lib/remotelogging/logstash/_base.py +2 -2
  24. qena_shared_lib/remotelogging/logstash/_http_sender.py +2 -4
  25. qena_shared_lib/remotelogging/logstash/_tcp_sender.py +2 -2
  26. qena_shared_lib/scheduler.py +24 -15
  27. qena_shared_lib/security.py +39 -32
  28. qena_shared_lib/utils.py +13 -11
  29. {qena_shared_lib-0.1.16.dist-info → qena_shared_lib-0.1.18.dist-info}/METADATA +9 -1
  30. qena_shared_lib-0.1.18.dist-info/RECORD +38 -0
  31. qena_shared_lib/exception_handlers.py +0 -235
  32. qena_shared_lib-0.1.16.dist-info/RECORD +0 -31
  33. {qena_shared_lib-0.1.16.dist-info → qena_shared_lib-0.1.18.dist-info}/WHEEL +0 -0
@@ -8,14 +8,14 @@ from prometheus_client import Counter
8
8
  from pydantic_core import to_json
9
9
 
10
10
  from ..exceptions import RabbitMQBlockedError
11
- from ..logging import LoggerProvider
11
+ from ..logging import LoggerFactory
12
12
  from ._pool import ChannelPool
13
13
 
14
14
  __all__ = ["Publisher"]
15
15
 
16
16
 
17
17
  class Publisher:
18
- PUBLISHED_MESSAGES = Counter(
18
+ _PUBLISHED_MESSAGES = Counter(
19
19
  name="published_messages",
20
20
  documentation="Published messages",
21
21
  labelnames=["routing_key", "target"],
@@ -41,7 +41,7 @@ class Publisher:
41
41
  self._blocked_connection_check_callback = (
42
42
  blocked_connection_check_callback
43
43
  )
44
- self._logger = LoggerProvider.default().get_logger("rabbitmq.publisher")
44
+ self._logger = LoggerFactory.get_logger("rabbitmq.publisher")
45
45
 
46
46
  async def publish_as_arguments(self, *args: Any, **kwargs: Any) -> None:
47
47
  await self._get_channel_and_publish({"args": args, "kwargs": kwargs})
@@ -52,7 +52,7 @@ class Publisher:
52
52
  async def _get_channel_and_publish(self, message: Any) -> None:
53
53
  if self._blocked_connection_check_callback():
54
54
  raise RabbitMQBlockedError(
55
- "rabbitmq broker is not able to accept message right now"
55
+ "rabbitmq broker is not able to accept message right now for publishing"
56
56
  )
57
57
 
58
58
  with await self._channel_pool.get() as channel:
@@ -72,6 +72,6 @@ class Publisher:
72
72
  self._routing_key,
73
73
  self._target,
74
74
  )
75
- self.PUBLISHED_MESSAGES.labels(
75
+ self._PUBLISHED_MESSAGES.labels(
76
76
  routing_key=self._routing_key, target=self._target
77
77
  ).inc()
@@ -2,6 +2,7 @@ from asyncio import Future, Lock
2
2
  from functools import partial
3
3
  from importlib import import_module
4
4
  from time import time
5
+ from types import UnionType
5
6
  from typing import Any, Callable, Generic, TypeVar
6
7
  from uuid import uuid4
7
8
 
@@ -21,7 +22,7 @@ from ..exceptions import (
21
22
  RabbitMQRpcRequestTimeoutError,
22
23
  RabbitMQServiceException,
23
24
  )
24
- from ..logging import LoggerProvider
25
+ from ..logging import LoggerFactory
25
26
  from ..utils import AsyncEventLoopMixin, TypeAdapterCache
26
27
  from ._pool import ChannelPool
27
28
 
@@ -29,30 +30,30 @@ __all__ = ["RpcClient"]
29
30
 
30
31
 
31
32
  class ExitHandler:
32
- _exiting = False
33
- _rpc_futures: list[Future[Any]] = []
34
- _original_exit_handler: Callable[..., None]
33
+ _EXITING = False
34
+ _RPC_FUTURES: list[Future[Any]] = []
35
+ _ORIGINAL_EXIT_HANDLER: Callable[..., None]
35
36
 
36
37
  @classmethod
37
38
  def is_exising(cls) -> bool:
38
- return cls._exiting
39
+ return cls._EXITING
39
40
 
40
41
  @classmethod
41
42
  def add_rpc_future(cls, rpc_future: Future[Any]) -> None:
42
- cls._rpc_futures.append(rpc_future)
43
+ cls._RPC_FUTURES.append(rpc_future)
43
44
 
44
45
  @classmethod
45
46
  def remove_rpc_future(cls, rpc_future: Future[Any]) -> None:
46
47
  try:
47
- cls._rpc_futures.remove(rpc_future)
48
+ cls._RPC_FUTURES.remove(rpc_future)
48
49
  except:
49
50
  pass
50
51
 
51
52
  @classmethod
52
53
  def cancel_futures(cls) -> None:
53
- cls._exiting = True
54
+ cls._EXITING = True
54
55
 
55
- for rpc_future in cls._rpc_futures:
56
+ for rpc_future in cls._RPC_FUTURES:
56
57
  if not rpc_future.done():
57
58
  rpc_future.cancel()
58
59
 
@@ -63,7 +64,7 @@ class ExitHandler:
63
64
  except ModuleNotFoundError:
64
65
  return
65
66
 
66
- ExitHandler._original_exit_handler = Server.handle_exit
67
+ ExitHandler._ORIGINAL_EXIT_HANDLER = Server.handle_exit
67
68
  Server.handle_exit = ExitHandler.handle_exit
68
69
 
69
70
  @staticmethod
@@ -73,7 +74,7 @@ class ExitHandler:
73
74
  @staticmethod
74
75
  def handle_exit(*args: Any, **kwargs: Any) -> None:
75
76
  ExitHandler.notify_clients()
76
- ExitHandler._original_exit_handler(*args, **kwargs)
77
+ ExitHandler._ORIGINAL_EXIT_HANDLER(*args, **kwargs)
77
78
 
78
79
 
79
80
  ExitHandler.patch_exit_handler()
@@ -83,17 +84,17 @@ R = TypeVar("R")
83
84
 
84
85
 
85
86
  class RpcClient(Generic[R], AsyncEventLoopMixin):
86
- SUCCEEDED_RPC_CALLS = Counter(
87
+ _SUCCEEDED_RPC_CALLS = Counter(
87
88
  name="succeeded_rpc_calls",
88
89
  documentation="RPC calls made",
89
90
  labelnames=["routing_key", "procedure"],
90
91
  )
91
- FAILED_RPC_CALL = Counter(
92
+ _FAILED_RPC_CALL = Counter(
92
93
  name="failed_rpc_call",
93
94
  documentation="Failed RPC calls",
94
95
  labelnames=["routing_key", "procedure", "exception"],
95
96
  )
96
- RPC_CALL_LATENCY = Summary(
97
+ _RPC_CALL_LATENCY = Summary(
97
98
  name="rpc_call_latency",
98
99
  documentation="Time it took for RPC calls",
99
100
  labelnames=["routing_key", "procedure"],
@@ -107,7 +108,7 @@ class RpcClient(Generic[R], AsyncEventLoopMixin):
107
108
  exchange: str | None = None,
108
109
  procedure: str | None = None,
109
110
  headers: dict[str, str] | None = None,
110
- return_type: type[R] | None = None,
111
+ return_type: type[R] | UnionType | None = None,
111
112
  timeout: float = 15,
112
113
  ):
113
114
  self._routing_key = routing_key
@@ -127,9 +128,7 @@ class RpcClient(Generic[R], AsyncEventLoopMixin):
127
128
  self._rpc_call_start_time: float | None = None
128
129
  self._rpc_call_lock = Lock()
129
130
  self._rpc_call_pending = False
130
- self._logger = LoggerProvider.default().get_logger(
131
- "rabbitmq.rpc_client"
132
- )
131
+ self._logger = LoggerFactory.get_logger("rabbitmq.rpc_client")
133
132
 
134
133
  async def call_with_arguments(self, *args: Any, **kwargs: Any) -> R:
135
134
  return await self._get_channel_and_call(
@@ -142,7 +141,7 @@ class RpcClient(Generic[R], AsyncEventLoopMixin):
142
141
  async def _get_channel_and_call(self, message: Any) -> R:
143
142
  if self._blocked_connection_check_callback():
144
143
  raise RabbitMQBlockedError(
145
- "rabbitmq broker is not able to accept message right now"
144
+ "rabbitmq broker is not able to accept message right now for rpc call"
146
145
  )
147
146
 
148
147
  async with self._rpc_call_lock:
@@ -287,17 +286,17 @@ class RpcClient(Generic[R], AsyncEventLoopMixin):
287
286
  return
288
287
  elif exception is not None:
289
288
  self._rpc_future.set_exception(exception)
290
- self.FAILED_RPC_CALL.labels(
289
+ self._FAILED_RPC_CALL.labels(
291
290
  routing_key=self._routing_key,
292
291
  procedure=self._procedure,
293
292
  exception=exception.__class__.__name__,
294
293
  ).inc()
295
294
  else:
296
295
  self._rpc_future.set_result(response)
297
- self.SUCCEEDED_RPC_CALLS.labels(
296
+ self._SUCCEEDED_RPC_CALLS.labels(
298
297
  routing_key=self._routing_key, procedure=self._procedure
299
298
  ).inc()
300
299
 
301
- self.RPC_CALL_LATENCY.labels(
300
+ self._RPC_CALL_LATENCY.labels(
302
301
  routing_key=self._routing_key, procedure=self._procedure
303
302
  ).observe((self._rpc_call_start_time or time()) - time())
@@ -13,7 +13,7 @@ from traceback import format_exception
13
13
  from prometheus_client import Counter
14
14
  from prometheus_client import Enum as PrometheusEnum
15
15
 
16
- from ..logging import LoggerProvider
16
+ from ..logging import LoggerFactory
17
17
  from ..utils import AsyncEventLoopMixin
18
18
 
19
19
  __all__ = [
@@ -43,7 +43,7 @@ class RemoteLogRecord:
43
43
  self._log_level = log_level
44
44
  self._log_logger = log_logger
45
45
  self._tags: list[str] | None = None
46
- self._labels: dict[str, str] | None = None
46
+ self._extra: dict[str, str] | None = None
47
47
  self._error_type: str | None = None
48
48
  self._error_message: str | None = None
49
49
  self._error_stack_trace: str | None = None
@@ -74,12 +74,12 @@ class RemoteLogRecord:
74
74
  self._tags = tags
75
75
 
76
76
  @property
77
- def labels(self) -> dict[str, str] | None:
78
- return self._labels
77
+ def extra(self) -> dict[str, str] | None:
78
+ return self._extra
79
79
 
80
- @labels.setter
81
- def labels(self, labels: dict[str, str]) -> None:
82
- self._labels = labels
80
+ @extra.setter
81
+ def extra(self, extra: dict[str, str]) -> None:
82
+ self._extra = extra
83
83
 
84
84
  @property
85
85
  def error(self) -> tuple[str | None, str | None, str | None]:
@@ -92,10 +92,10 @@ class RemoteLogRecord:
92
92
  if exception.__traceback__ is not None:
93
93
  self._error_stack_trace = "".join(format_exception(exception))
94
94
 
95
- if self._labels is None:
96
- self._labels = {}
95
+ if self._extra is None:
96
+ self._extra = {}
97
97
 
98
- self._labels.update(self._extract_exception_cause(exception))
98
+ self._extra.update(self._extract_exception_cause(exception))
99
99
 
100
100
  def _extract_exception_cause(
101
101
  self, exception: BaseException
@@ -145,7 +145,7 @@ class RemoteLogRecord:
145
145
  self._log_level.name,
146
146
  self._message,
147
147
  self._tags or [],
148
- self._labels or {},
148
+ self._extra or {},
149
149
  self._error_type or "None",
150
150
  self._error_message or "None",
151
151
  f"\n{self._error_stack_trace}"
@@ -175,17 +175,17 @@ class EndOfLogMarker:
175
175
 
176
176
 
177
177
  class BaseRemoteLogSender(AsyncEventLoopMixin):
178
- REMOTE_LOGS = Counter(
178
+ _REMOTE_LOGS = Counter(
179
179
  name="successful_remote_logs",
180
180
  documentation="Successfully sent remote log count",
181
181
  labelnames=["log_level"],
182
182
  )
183
- FAILED_REMOTE_LOGS = Counter(
183
+ _FAILED_REMOTE_LOGS = Counter(
184
184
  name="failed_remote_logs",
185
185
  documentation="Failed remote log count",
186
186
  labelnames=["log_level", "exception"],
187
187
  )
188
- REMOTE_SENDER_STATE = PrometheusEnum(
188
+ _REMOTE_SENDER_STATE = PrometheusEnum(
189
189
  name="remote_sender_state",
190
190
  documentation="Remote sender state",
191
191
  states=["running", "stopped"],
@@ -212,7 +212,7 @@ class BaseRemoteLogSender(AsyncEventLoopMixin):
212
212
  Queue(failed_log_queue_size)
213
213
  )
214
214
  self._level = LogLevel.INFO
215
- self._logger = LoggerProvider.default().get_logger(
215
+ self._logger = LoggerFactory.get_logger(
216
216
  f"remotelogging.{self.__class__.__name__.lower()}"
217
217
  )
218
218
 
@@ -235,7 +235,7 @@ class BaseRemoteLogSender(AsyncEventLoopMixin):
235
235
  "remote logger `%s` started accepting logs",
236
236
  self.__class__.__name__,
237
237
  )
238
- self.REMOTE_SENDER_STATE.state("running")
238
+ self._REMOTE_SENDER_STATE.state("running")
239
239
 
240
240
  def _hook_on_start(self) -> None:
241
241
  pass
@@ -299,7 +299,7 @@ class BaseRemoteLogSender(AsyncEventLoopMixin):
299
299
  def _on_close_future_done(self, future: Future[None]) -> None:
300
300
  del future
301
301
 
302
- self.REMOTE_SENDER_STATE.state("stopped")
302
+ self._REMOTE_SENDER_STATE.state("stopped")
303
303
 
304
304
  def _hook_on_stop(self) -> None:
305
305
  pass
@@ -348,7 +348,7 @@ class BaseRemoteLogSender(AsyncEventLoopMixin):
348
348
  self._logger.exception(
349
349
  "error occurred while sending log to remote logging facility"
350
350
  )
351
- self.FAILED_REMOTE_LOGS.labels(
351
+ self._FAILED_REMOTE_LOGS.labels(
352
352
  log_level=log.log_level.name, exception=e.__class__.__name__
353
353
  ).inc()
354
354
 
@@ -371,7 +371,7 @@ class BaseRemoteLogSender(AsyncEventLoopMixin):
371
371
  sender_response.reason or "No reason",
372
372
  )
373
373
  else:
374
- self.REMOTE_LOGS.labels(log_level=log.log_level.name).inc()
374
+ self._REMOTE_LOGS.labels(log_level=log.log_level.name).inc()
375
375
  self._logger.debug(
376
376
  "log sent to remote logging facility.\n%r", log
377
377
  )
@@ -546,7 +546,7 @@ class BaseRemoteLogSender(AsyncEventLoopMixin):
546
546
  isinstance(k, str) and isinstance(v, str)
547
547
  for (k, v) in extra.items()
548
548
  ):
549
- log.labels = extra
549
+ log.extra = extra
550
550
 
551
551
  if exception:
552
552
  log.error_from_exception(exception)
@@ -15,8 +15,8 @@ class BaseLogstashSender(BaseRemoteLogSender):
15
15
  if log.tags is not None:
16
16
  log_dict["tags"] = log.tags
17
17
 
18
- if log.labels is not None:
19
- log_dict["labels"] = log.labels
18
+ if log.extra is not None:
19
+ log_dict["labels"] = log.extra
20
20
 
21
21
  error_type, error_message, error_stack_trace = log.error
22
22
 
@@ -1,6 +1,6 @@
1
1
  from httpx import AsyncClient, Timeout
2
2
 
3
- from ...logging import LoggerProvider
3
+ from ...logging import LoggerFactory
4
4
  from .._base import RemoteLogRecord, SenderResponse
5
5
  from ._base import BaseLogstashSender
6
6
 
@@ -39,9 +39,7 @@ class HTTPSender(BaseLogstashSender):
39
39
  self._client = AsyncClient(
40
40
  auth=auth, timeout=http_client_timeout or 5.0
41
41
  )
42
- self._logger = LoggerProvider.default().get_logger(
43
- "logstash.httpsender"
44
- )
42
+ self._logger = LoggerFactory.get_logger("logstash.httpsender")
45
43
 
46
44
  async def _send(self, log: RemoteLogRecord) -> SenderResponse:
47
45
  send_log_response = await self._client.post(
@@ -3,7 +3,7 @@ from typing import Any
3
3
 
4
4
  from pydantic_core import to_json
5
5
 
6
- from ...logging import LoggerProvider
6
+ from ...logging import LoggerFactory
7
7
  from .._base import RemoteLogRecord, SenderResponse
8
8
  from ._base import BaseLogstashSender
9
9
 
@@ -28,7 +28,7 @@ class TCPSender(BaseLogstashSender):
28
28
  )
29
29
 
30
30
  self._client = AsyncTcpClient(host=host, port=port)
31
- self._logger = LoggerProvider.default().get_logger("logstash.tcpsender")
31
+ self._logger = LoggerFactory.get_logger("logstash.tcpsender")
32
32
 
33
33
  async def _send(self, log: RemoteLogRecord) -> SenderResponse:
34
34
  await self._client.write(self.remote_log_record_to_ecs(log))
@@ -16,7 +16,7 @@ from prometheus_client import Enum as PrometheusEnum
16
16
  from punq import Container, Scope
17
17
 
18
18
  from .dependencies.miscellaneous import validate_annotation
19
- from .logging import LoggerProvider
19
+ from .logging import LoggerFactory
20
20
  from .remotelogging import BaseRemoteLogSender
21
21
  from .utils import AsyncEventLoopMixin
22
22
 
@@ -35,7 +35,7 @@ SCHEDULED_TASK_ATTRIBUTE = "__scheduled_task__"
35
35
 
36
36
  @dataclass
37
37
  class ScheduledTask:
38
- task: Callable[..., None]
38
+ task: Callable[..., Any]
39
39
  cron_expression: str
40
40
  zone_info: ZoneInfo | None
41
41
 
@@ -43,6 +43,7 @@ class ScheduledTask:
43
43
  self._next_run_in: int | None = None
44
44
  self._ran = False
45
45
  self._paramters = {}
46
+ self._is_async_task = iscoroutinefunction(self.task)
46
47
 
47
48
  for parameter_name, paramter in signature(self.task).parameters.items():
48
49
  dependency = validate_annotation(paramter.annotation)
@@ -54,6 +55,10 @@ class ScheduledTask:
54
55
 
55
56
  self._paramters[parameter_name] = dependency
56
57
 
58
+ @property
59
+ def is_async_task(self) -> bool:
60
+ return self._is_async_task
61
+
57
62
  @property
58
63
  def next_run_in(self) -> int | None:
59
64
  return self._next_run_in
@@ -120,7 +125,7 @@ def scheduler() -> Scheduler:
120
125
 
121
126
 
122
127
  @dataclass
123
- class ScheduledTaskMeta:
128
+ class ScheduledTaskMetadata:
124
129
  cron_expression: str
125
130
  timezone: str | None = None
126
131
 
@@ -132,7 +137,7 @@ def schedule(
132
137
  setattr(
133
138
  task,
134
139
  SCHEDULED_TASK_ATTRIBUTE,
135
- ScheduledTaskMeta(
140
+ ScheduledTaskMetadata(
136
141
  cron_expression=cron_expression, timezone=timezone
137
142
  ),
138
143
  )
@@ -162,29 +167,29 @@ class SchedulerBase:
162
167
  if attribute is None:
163
168
  continue
164
169
 
165
- scheduled_task_meta = getattr(
170
+ scheduled_task_metadata = getattr(
166
171
  attribute, SCHEDULED_TASK_ATTRIBUTE, None
167
172
  )
168
173
 
169
- if scheduled_task_meta is None:
174
+ if scheduled_task_metadata is None:
170
175
  continue
171
176
 
172
- if not isinstance(scheduled_task_meta, ScheduledTaskMeta):
177
+ if not isinstance(scheduled_task_metadata, ScheduledTaskMetadata):
173
178
  raise TypeError(
174
- f"expected `{SCHEDULED_TASK_ATTRIBUTE}` to by of type `ScheduledTaskMeta`, got {type(scheduled_task_meta)}"
179
+ f"expected `{SCHEDULED_TASK_ATTRIBUTE}` to by of type `ScheduledTaskMetadata`, got {type(scheduled_task_metadata)}"
175
180
  )
176
181
 
177
182
  scheduler.add_task(
178
183
  task=attribute,
179
- cron_expression=scheduled_task_meta.cron_expression,
180
- timezone=scheduled_task_meta.timezone,
184
+ cron_expression=scheduled_task_metadata.cron_expression,
185
+ timezone=scheduled_task_metadata.timezone,
181
186
  )
182
187
 
183
188
  return scheduler
184
189
 
185
190
 
186
191
  class ScheduleManager(AsyncEventLoopMixin):
187
- SCHEDULE_MANAGER_STATE = PrometheusEnum(
192
+ _SCHEDULE_MANAGER_STATE = PrometheusEnum(
188
193
  name="schedule_manager_state",
189
194
  documentation="Schedule manager state",
190
195
  states=["running", "stopped"],
@@ -202,7 +207,7 @@ class ScheduleManager(AsyncEventLoopMixin):
202
207
  self._scheduler_task: Task[None] | None = None
203
208
  self._scheduled_tasks_or_futures: list[Task[Any] | Future[Any]] = []
204
209
  self._stopped = False
205
- self._logger = LoggerProvider.default().get_logger("schedule_manager")
210
+ self._logger = LoggerFactory.get_logger("schedule_manager")
206
211
 
207
212
  def include_scheduler(
208
213
  self, scheduler: Scheduler | type[SchedulerBase]
@@ -259,7 +264,7 @@ class ScheduleManager(AsyncEventLoopMixin):
259
264
  self._scheduler_task = self.loop.create_task(self._run_scheduler())
260
265
 
261
266
  self._scheduler_task.add_done_callback(self._on_scheduler_done)
262
- self.SCHEDULE_MANAGER_STATE.state("running")
267
+ self._SCHEDULE_MANAGER_STATE.state("running")
263
268
 
264
269
  def use_schedulers(self) -> None:
265
270
  for scheduler in [
@@ -278,7 +283,7 @@ class ScheduleManager(AsyncEventLoopMixin):
278
283
  if self._scheduler_task is not None and not self._scheduler_task.done():
279
284
  self._scheduler_task.cancel()
280
285
 
281
- self.SCHEDULE_MANAGER_STATE.state("stopped")
286
+ self._SCHEDULE_MANAGER_STATE.state("stopped")
282
287
 
283
288
  def _on_scheduler_done(self, task: Task[None]) -> None:
284
289
  if task.cancelled():
@@ -289,6 +294,8 @@ class ScheduleManager(AsyncEventLoopMixin):
289
294
  if exception is not None:
290
295
  self._remote_logger.error(
291
296
  message="error occured in schedule manager",
297
+ tags=["schedule_manager", "stop_schedule_manager_error"],
298
+ extra={"serviceType": "schedule_manager"},
292
299
  exception=exception,
293
300
  )
294
301
 
@@ -347,7 +354,7 @@ class ScheduleManager(AsyncEventLoopMixin):
347
354
  args = self._resolve_dependencies(scheduled_task)
348
355
  scheduled_task_or_future: Task[Any] | Future[Any] | None = None
349
356
 
350
- if iscoroutinefunction(scheduled_task.task):
357
+ if scheduled_task.is_async_task:
351
358
  scheduled_task_or_future = self.loop.create_task(
352
359
  scheduled_task.task(**args)
353
360
  )
@@ -393,6 +400,8 @@ class ScheduleManager(AsyncEventLoopMixin):
393
400
  if exception is not None:
394
401
  self._remote_logger.error(
395
402
  message="error occured while executing task",
403
+ tags=["schedule_manager", "scheduled_task_done_error"],
404
+ extra={"serviceType": "schedule_manager"},
396
405
  exception=exception,
397
406
  )
398
407
 
@@ -173,44 +173,51 @@ class EndpointAclValidator:
173
173
  permission_match_strategy or PermissionMatch.SOME
174
174
  )
175
175
 
176
+ if self._permissions is not None:
177
+ self._permissions = sorted(self._permissions)
178
+
176
179
  def __call__(
177
180
  self, user_info: Annotated[UserInfo, Depends(extract_user_info)]
178
181
  ) -> UserInfo:
179
- if (
180
- self._user_type is not None
181
- and (
182
- user_info.user_type is None
183
- or user_info.user_type != self._user_type
184
- )
185
- ) or (
186
- self._permissions is not None
187
- and (
188
- user_info.user_permissions is None
189
- or not self._permissions_match(user_info.user_permissions)
190
- )
182
+ if self._user_type_match(user_info) and self._permissions_match(
183
+ user_info
191
184
  ):
192
- raise Unauthorized(
193
- message=MESSAGE,
194
- response_code=RESPONSE_CODE,
195
- tags=[user_info.user_id],
196
- extra={
197
- "userId": user_info.user_id,
198
- "userType": user_info.user_type,
199
- "userPermissions": str(user_info.user_permissions or []),
200
- "requiredUserType": self._user_type or "None",
201
- "requiredPermissions": str(self._permissions or []),
202
- "permissionMatchStrategy": self._permission_match_strategy.name,
203
- },
204
- )
205
-
206
- return user_info
207
-
208
- def _permissions_match(self, user_permissions: list[str]) -> bool:
209
- assert self._permissions is not None
185
+ return user_info
186
+
187
+ raise Unauthorized(
188
+ message=MESSAGE,
189
+ response_code=RESPONSE_CODE,
190
+ tags=[user_info.user_id],
191
+ extra={
192
+ "userId": user_info.user_id,
193
+ "userType": user_info.user_type,
194
+ "userPermissions": str(user_info.user_permissions or []),
195
+ "requiredUserType": self._user_type or "None",
196
+ "requiredPermissions": str(self._permissions or []),
197
+ "permissionMatchStrategy": self._permission_match_strategy.name,
198
+ },
199
+ )
200
+
201
+ def _user_type_match(self, user_info: UserInfo) -> bool:
202
+ if self._user_type is None:
203
+ return True
204
+
205
+ if user_info.user_type is None:
206
+ return False
207
+
208
+ return user_info.user_type == self._user_type
209
+
210
+ def _permissions_match(self, user_info: UserInfo) -> bool:
211
+ if self._permissions is None:
212
+ return True
213
+
214
+ if user_info.user_permissions is None:
215
+ return False
210
216
 
211
217
  if self._permission_match_strategy == PermissionMatch.ALL:
212
- return sorted(self._permissions) == sorted(user_permissions)
218
+ return self._permissions == sorted(user_info.user_permissions)
213
219
 
214
220
  return any(
215
- permission in self._permissions for permission in user_permissions
221
+ permission in self._permissions
222
+ for permission in user_info.user_permissions
216
223
  )
qena_shared_lib/utils.py CHANGED
@@ -1,4 +1,5 @@
1
1
  from asyncio import AbstractEventLoop, get_running_loop
2
+ from types import UnionType
2
3
  from typing import Generator
3
4
 
4
5
  from pydantic import TypeAdapter
@@ -11,28 +12,29 @@ class AsyncEventLoopMixin:
11
12
 
12
13
  @property
13
14
  def loop(self) -> AbstractEventLoop:
14
- if self._LOOP is None:
15
- self._LOOP = get_running_loop()
15
+ if AsyncEventLoopMixin._LOOP is None:
16
+ AsyncEventLoopMixin._LOOP = get_running_loop()
16
17
 
17
- return self._LOOP
18
+ return AsyncEventLoopMixin._LOOP
18
19
 
19
- def init(self) -> None:
20
- self._LOOP = get_running_loop()
20
+ @staticmethod
21
+ def reset_running_loop() -> None:
22
+ AsyncEventLoopMixin._LOOP = get_running_loop()
21
23
 
22
24
 
23
25
  class TypeAdapterCache:
24
- _cache: dict[type, TypeAdapter] = {}
26
+ _CACHE: dict[type | UnionType, TypeAdapter] = {}
25
27
 
26
28
  @classmethod
27
- def cache_annotation(cls, annotation: type) -> None:
28
- if annotation not in cls._cache:
29
- cls._cache[annotation] = TypeAdapter(annotation)
29
+ def cache_annotation(cls, annotation: type | UnionType) -> None:
30
+ if annotation not in cls._CACHE:
31
+ cls._CACHE[annotation] = TypeAdapter(annotation)
30
32
 
31
33
  @classmethod
32
- def get_type_adapter(cls, annotation: type) -> TypeAdapter:
34
+ def get_type_adapter(cls, annotation: type | UnionType) -> TypeAdapter:
33
35
  cls.cache_annotation(annotation)
34
36
 
35
- return cls._cache[annotation]
37
+ return cls._CACHE[annotation]
36
38
 
37
39
 
38
40
  class YieldOnce:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: qena-shared-lib
3
- Version: 0.1.16
3
+ Version: 0.1.18
4
4
  Summary: A shared tools for other services
5
5
  Requires-Python: >=3.10
6
6
  Requires-Dist: fastapi[all]==0.115.6
@@ -11,6 +11,14 @@ Requires-Dist: pydantic-core==2.27.1
11
11
  Requires-Dist: pydantic==2.10.3
12
12
  Requires-Dist: starlette==0.41.3
13
13
  Requires-Dist: typing-extensions==4.12.2
14
+ Provides-Extra: all
15
+ Requires-Dist: aiokafka==0.12.0; extra == 'all'
16
+ Requires-Dist: cronsim==2.6; extra == 'all'
17
+ Requires-Dist: jwt==1.3.1; extra == 'all'
18
+ Requires-Dist: passlib[bcrypt]==1.7.4; extra == 'all'
19
+ Requires-Dist: pika==1.3.2; extra == 'all'
20
+ Provides-Extra: kafka
21
+ Requires-Dist: aiokafka==0.12.0; extra == 'kafka'
14
22
  Provides-Extra: rabbitmq
15
23
  Requires-Dist: pika==1.3.2; extra == 'rabbitmq'
16
24
  Provides-Extra: scheduler