qena-shared-lib 0.1.17__py3-none-any.whl → 0.1.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. qena_shared_lib/__init__.py +20 -2
  2. qena_shared_lib/alias.py +27 -0
  3. qena_shared_lib/application.py +4 -4
  4. qena_shared_lib/background.py +9 -7
  5. qena_shared_lib/cache.py +61 -0
  6. qena_shared_lib/enums.py +8 -0
  7. qena_shared_lib/eventbus.py +373 -0
  8. qena_shared_lib/exception_handling.py +409 -0
  9. qena_shared_lib/exceptions.py +167 -57
  10. qena_shared_lib/http/__init__.py +110 -0
  11. qena_shared_lib/{http.py → http/_base.py} +36 -36
  12. qena_shared_lib/http/_exception_handlers.py +202 -0
  13. qena_shared_lib/http/_request.py +24 -0
  14. qena_shared_lib/http/_response.py +24 -0
  15. qena_shared_lib/kafka/__init__.py +21 -0
  16. qena_shared_lib/kafka/_base.py +233 -0
  17. qena_shared_lib/kafka/_consumer.py +597 -0
  18. qena_shared_lib/kafka/_exception_handlers.py +124 -0
  19. qena_shared_lib/kafka/_producer.py +133 -0
  20. qena_shared_lib/logging.py +17 -13
  21. qena_shared_lib/mongodb.py +575 -0
  22. qena_shared_lib/rabbitmq/__init__.py +6 -6
  23. qena_shared_lib/rabbitmq/_base.py +68 -132
  24. qena_shared_lib/rabbitmq/_channel.py +2 -4
  25. qena_shared_lib/rabbitmq/_exception_handlers.py +69 -142
  26. qena_shared_lib/rabbitmq/_listener.py +245 -180
  27. qena_shared_lib/rabbitmq/_publisher.py +5 -5
  28. qena_shared_lib/rabbitmq/_rpc_client.py +21 -22
  29. qena_shared_lib/rabbitmq/message/__init__.py +19 -0
  30. qena_shared_lib/rabbitmq/message/_inbound.py +13 -0
  31. qena_shared_lib/rabbitmq/message/_outbound.py +13 -0
  32. qena_shared_lib/redis.py +47 -0
  33. qena_shared_lib/remotelogging/_base.py +34 -28
  34. qena_shared_lib/remotelogging/logstash/_base.py +3 -2
  35. qena_shared_lib/remotelogging/logstash/_http_sender.py +2 -4
  36. qena_shared_lib/remotelogging/logstash/_tcp_sender.py +2 -2
  37. qena_shared_lib/scheduler.py +24 -15
  38. qena_shared_lib/security.py +39 -32
  39. qena_shared_lib/sync.py +91 -0
  40. qena_shared_lib/utils.py +13 -11
  41. {qena_shared_lib-0.1.17.dist-info → qena_shared_lib-0.1.19.dist-info}/METADATA +395 -32
  42. qena_shared_lib-0.1.19.dist-info/RECORD +50 -0
  43. qena_shared_lib-0.1.19.dist-info/WHEEL +4 -0
  44. qena_shared_lib/exception_handlers.py +0 -235
  45. qena_shared_lib-0.1.17.dist-info/RECORD +0 -31
  46. qena_shared_lib-0.1.17.dist-info/WHEEL +0 -4
@@ -0,0 +1,575 @@
1
+ from contextlib import asynccontextmanager
2
+ from datetime import datetime
3
+ from typing import (
4
+ Annotated,
5
+ Any,
6
+ AsyncGenerator,
7
+ Generic,
8
+ TypeAlias,
9
+ TypeVar,
10
+ cast,
11
+ get_args,
12
+ overload,
13
+ )
14
+
15
+ from bson.objectid import ObjectId
16
+ from pydantic import BeforeValidator, Field, field_serializer
17
+ from pymongo import AsyncMongoClient, IndexModel
18
+ from pymongo.asynchronous.client_session import AsyncClientSession
19
+ from pymongo.asynchronous.collection import AsyncCollection
20
+ from pymongo.asynchronous.database import AsyncDatabase
21
+ from typing_extensions import Self
22
+
23
+ from .alias import CamelCaseAliasedBaseModel
24
+ from .logging import LoggerFactory
25
+
26
+ __all__ = [
27
+ "AggregatedDocument",
28
+ "Document",
29
+ "EmbeddedDocument",
30
+ "IndexManager",
31
+ "IndexModel",
32
+ "MongoDBManager",
33
+ "MongoDBObjectId",
34
+ "ProjectedDocument",
35
+ "RepositoryBase",
36
+ "validate_object_id",
37
+ ]
38
+
39
+
40
+ class MongoDBManager:
41
+ def __init__(self, connection_string: str, db: str | None = None):
42
+ self._client = AsyncMongoClient(connection_string)
43
+ self._db = self._client.get_database(db)
44
+ self._logger = LoggerFactory.get_logger("mongodb_manager")
45
+
46
+ async def connect(self) -> None:
47
+ await self._client.aconnect()
48
+
49
+ host = "localhost"
50
+ port = 27017
51
+ address = await self._client.address
52
+
53
+ if address is not None:
54
+ host, port = address
55
+
56
+ self._logger.info("connected to mongodb server `%s:%s`", host, port)
57
+
58
+ async def disconnect(self) -> None:
59
+ await self._client.aclose()
60
+ self._logger.info("disconnected from mongodb")
61
+
62
+ @property
63
+ def client(self) -> AsyncMongoClient:
64
+ return self._client
65
+
66
+ @property
67
+ def db(self) -> AsyncDatabase:
68
+ return self._db
69
+
70
+ @asynccontextmanager
71
+ async def transactional(self) -> AsyncGenerator[AsyncClientSession, None]:
72
+ async with self.client.start_session() as session:
73
+ async with await session.start_transaction():
74
+ yield session
75
+
76
+ def __getitem__(self, document: type["Document"]) -> AsyncCollection:
77
+ return self._db.get_collection(document.get_collection_name())
78
+
79
+
80
+ class TimeStampMixin(CamelCaseAliasedBaseModel):
81
+ created_at: datetime = Field(default_factory=datetime.now)
82
+ updated_at: datetime = Field(default_factory=datetime.now)
83
+
84
+ @field_serializer("updated_at", when_used="always")
85
+ def serialize_updated_at(self, _: datetime) -> datetime:
86
+ return datetime.now()
87
+
88
+
89
+ def validate_object_id(value: Any) -> ObjectId:
90
+ if not ObjectId.is_valid(value):
91
+ raise ValueError(f"{value} is not valid objectid")
92
+
93
+ return ObjectId(value)
94
+
95
+
96
+ MongoDBObjectId: TypeAlias = Annotated[
97
+ ObjectId, BeforeValidator(validate_object_id)
98
+ ]
99
+
100
+
101
+ class Document(CamelCaseAliasedBaseModel):
102
+ id: MongoDBObjectId = Field(alias="_id", default_factory=ObjectId)
103
+
104
+ @classmethod
105
+ def get_collection_name(cls) -> str:
106
+ collection_name = getattr(cls, "__collection_name__", None)
107
+
108
+ if collection_name is None:
109
+ collection_name = cls.__name__
110
+
111
+ return collection_name
112
+
113
+ @classmethod
114
+ def get_indexes(cls) -> list[IndexModel] | None:
115
+ return getattr(cls, "__indexes__", None)
116
+
117
+ @classmethod
118
+ def from_raw_document(cls, document: Any, **kwargs: Any) -> Self:
119
+ return cast(Self, cls.model_validate(document, **kwargs))
120
+
121
+
122
+ class EmbeddedDocument(CamelCaseAliasedBaseModel):
123
+ @classmethod
124
+ def from_raw_embedded_document(
125
+ cls, embedded_document: Any, **kwargs: Any
126
+ ) -> Self:
127
+ return cast(Self, cls.model_validate(embedded_document, **kwargs))
128
+
129
+
130
+ class ProjectedDocument(CamelCaseAliasedBaseModel):
131
+ @classmethod
132
+ def from_raw_projected_document(
133
+ cls, projected_document: Any, **kwargs: Any
134
+ ) -> Self:
135
+ return cast(Self, cls.model_validate(projected_document, **kwargs))
136
+
137
+ @classmethod
138
+ def get_projection(cls) -> list[str] | dict[str, Any]:
139
+ projection = getattr(cls, "__projection__", None)
140
+
141
+ if projection is None:
142
+ projection = cls._projection_from_field_info()
143
+
144
+ return cast(list[str] | dict[str, Any], projection)
145
+
146
+ @classmethod
147
+ def _projection_from_field_info(cls) -> list[str]:
148
+ cls.__projection__ = [
149
+ field_info.alias or field_name
150
+ for field_name, field_info in cls.model_fields.items()
151
+ ]
152
+
153
+ return cls.__projection__
154
+
155
+
156
+ class AggregatedDocument(CamelCaseAliasedBaseModel):
157
+ @classmethod
158
+ def from_raw_aggregated_document(cls, obj: Any, **kwargs: Any) -> Self:
159
+ return cast(Self, cls.model_validate(obj, **kwargs))
160
+
161
+ @classmethod
162
+ def get_pipeline(cls) -> list[Any]:
163
+ projection = getattr(cls, "__pipeline__", None)
164
+
165
+ if projection is None:
166
+ raise ValueError(
167
+ f"__pipeline__ is not defined for aggregated document {cls.__name__}"
168
+ )
169
+
170
+ return cast(list[Any], projection)
171
+
172
+
173
+ class IndexManager:
174
+ def __init__(
175
+ self, db: MongoDBManager, documents: list[type[Document]]
176
+ ) -> None:
177
+ self._db = db
178
+ self._documents = documents
179
+
180
+ async def create_indexes(self) -> None:
181
+ for document in self._documents:
182
+ indexes = document.get_indexes()
183
+
184
+ if indexes is None:
185
+ continue
186
+
187
+ await self._db[document].create_indexes(indexes)
188
+
189
+ async def get_indexes(self, collection_name: str) -> list[str]:
190
+ document = self._get_document(collection_name)
191
+ indexes = []
192
+
193
+ async with await self._db[document].list_indexes() as cursor:
194
+ async for index in cursor:
195
+ indexes.append(index.get("name"))
196
+
197
+ return indexes
198
+
199
+ async def drop_indexes(
200
+ self, collection_names: list[str] | None = None
201
+ ) -> None:
202
+ for document in self._documents:
203
+ if (
204
+ collection_names is not None
205
+ and document.get_collection_name() not in collection_names
206
+ ):
207
+ continue
208
+
209
+ await self._db[document].drop_indexes()
210
+
211
+ async def drop_index(self, collection_name: str, index_name: str) -> None:
212
+ document = self._get_document(collection_name)
213
+
214
+ await self._db[document].drop_index(index_name)
215
+
216
+ def _get_document(self, collection_name: str) -> type[Document]:
217
+ document = None
218
+
219
+ for document in self._documents:
220
+ if document.get_collection_name() == collection_name:
221
+ break
222
+
223
+ if document is None:
224
+ raise ValueError(
225
+ f"collection with name {collection_name} not found"
226
+ )
227
+
228
+ return document
229
+
230
+
231
+ T = TypeVar("T", bound=Document)
232
+ P = TypeVar("P", bound=ProjectedDocument)
233
+ A = TypeVar("A", bound=AggregatedDocument)
234
+ S = TypeVar("S")
235
+
236
+
237
+ class RepositoryBase(Generic[T]):
238
+ def __init__(
239
+ self, db: MongoDBManager, session: AsyncClientSession | None = None
240
+ ) -> None:
241
+ self._db = db
242
+ self._session = session
243
+ self._document_type = None
244
+
245
+ @property
246
+ def db(self) -> MongoDBManager:
247
+ return self._db
248
+
249
+ @property
250
+ def collection(self) -> AsyncCollection:
251
+ return self._db[self.document_type]
252
+
253
+ @property
254
+ def session(self) -> AsyncClientSession | None:
255
+ return self._session
256
+
257
+ @property
258
+ def document_type(self) -> type[T]:
259
+ document_type = self._document_type
260
+
261
+ if document_type is None:
262
+ orig_bases = getattr(self, "__orig_bases__", None)
263
+
264
+ if not orig_bases:
265
+ raise RuntimeError("generic variable T is not specified")
266
+
267
+ *_, orig_class = orig_bases
268
+ *_, self._document_type = get_args(orig_class)
269
+
270
+ return cast(type[T], self._document_type)
271
+
272
+ async def insert(
273
+ self, document: T, session: AsyncClientSession | None = None
274
+ ) -> ObjectId | str:
275
+ inserted_one_result = await self.collection.insert_one(
276
+ document=document.model_dump(by_alias=True),
277
+ session=session or self.session,
278
+ )
279
+
280
+ return inserted_one_result.inserted_id
281
+
282
+ async def insert_many(
283
+ self, documents: list[T], session: AsyncClientSession | None = None
284
+ ) -> list[ObjectId] | list[str]:
285
+ insert_many_result = await self.collection.insert_many(
286
+ documents=[
287
+ document.model_dump(by_alias=True) for document in documents
288
+ ],
289
+ session=session or self.session,
290
+ )
291
+
292
+ return cast(list[ObjectId] | list[str], insert_many_result.inserted_ids)
293
+
294
+ @overload
295
+ async def find_by_id(
296
+ self,
297
+ *,
298
+ id: Any,
299
+ skip: int = 0,
300
+ sort: dict[str, int] | None = None,
301
+ session: AsyncClientSession | None = None,
302
+ ) -> T | None:
303
+ pass
304
+
305
+ @overload
306
+ async def find_by_id(
307
+ self,
308
+ *,
309
+ id: Any,
310
+ projection: type[P],
311
+ skip: int = 0,
312
+ sort: dict[str, int] | None = None,
313
+ session: AsyncClientSession | None = None,
314
+ ) -> P | None:
315
+ pass
316
+
317
+ async def find_by_id(self, *_: Any, **kwargs: Any) -> Any:
318
+ return await self._find_one(
319
+ filter={"_id": kwargs["id"]},
320
+ projection=kwargs.get("projection"),
321
+ skip=kwargs.get("skip", 0),
322
+ sort=kwargs.get("sort"),
323
+ session=kwargs.get("session"),
324
+ )
325
+
326
+ @overload
327
+ async def find_by_filter(
328
+ self,
329
+ *,
330
+ filter: dict[str, Any],
331
+ skip: int = 0,
332
+ sort: dict[str, int] | None = None,
333
+ session: AsyncClientSession | None = None,
334
+ ) -> T | None:
335
+ pass
336
+
337
+ @overload
338
+ async def find_by_filter(
339
+ self,
340
+ *,
341
+ filter: dict[str, Any],
342
+ projection: type[P],
343
+ skip: int = 0,
344
+ sort: dict[str, int] | None = None,
345
+ session: AsyncClientSession | None = None,
346
+ ) -> P | None:
347
+ pass
348
+
349
+ async def find_by_filter(self, *_: Any, **kwargs: Any) -> Any:
350
+ return await self._find_one(
351
+ filter=kwargs["filter"],
352
+ projection=kwargs.get("projection"),
353
+ skip=kwargs.get("skip", 0),
354
+ sort=kwargs.get("sort"),
355
+ session=kwargs.get("session"),
356
+ )
357
+
358
+ async def _find_one(
359
+ self,
360
+ filter: dict[str, Any],
361
+ projection: type[P] | None = None,
362
+ skip: int = 0,
363
+ sort: dict[str, int] | None = None,
364
+ session: AsyncClientSession | None = None,
365
+ ) -> T | P | None:
366
+ if projection is not None:
367
+ return projection.from_raw_projected_document(
368
+ await self.collection.find_one(
369
+ filter=filter,
370
+ projection=projection.get_projection(),
371
+ skip=skip,
372
+ sort=sort,
373
+ session=session or self.session,
374
+ )
375
+ )
376
+
377
+ return self.document_type.from_raw_document(
378
+ await self.collection.find_one(
379
+ filter=filter,
380
+ projection=projection,
381
+ skip=skip,
382
+ sort=sort,
383
+ session=session or self.session,
384
+ )
385
+ )
386
+
387
+ async def replace(
388
+ self, replacement: T, session: AsyncClientSession | None = None
389
+ ) -> None:
390
+ await self.collection.replace_one(
391
+ filter={"_id": replacement.id},
392
+ replacement=replacement.model_dump(by_alias=True),
393
+ session=session or self.session,
394
+ )
395
+
396
+ @overload
397
+ def find_all(
398
+ self,
399
+ *,
400
+ skip: int = 0,
401
+ limit: int = 0,
402
+ sort: dict[str, int] | None = None,
403
+ session: AsyncClientSession | None = None,
404
+ ) -> AsyncGenerator[T, None]:
405
+ pass
406
+
407
+ @overload
408
+ def find_all(
409
+ self,
410
+ *,
411
+ projection: type[P],
412
+ skip: int = 0,
413
+ limit: int = 0,
414
+ sort: dict[str, int] | None = None,
415
+ session: AsyncClientSession | None = None,
416
+ ) -> AsyncGenerator[P, None]:
417
+ pass
418
+
419
+ async def find_all(
420
+ self, *_: Any, **kwargs: Any
421
+ ) -> AsyncGenerator[Any, None]:
422
+ async for document in self._find(
423
+ projection=kwargs.get("projection"),
424
+ skip=kwargs.get("skip", 0),
425
+ limit=kwargs.get("limit", 0),
426
+ sort=kwargs.get("sort"),
427
+ session=kwargs.get("session"),
428
+ ):
429
+ yield document
430
+
431
+ @overload
432
+ def find_all_by_filter(
433
+ self,
434
+ *,
435
+ filter: dict[str, Any],
436
+ skip: int = 0,
437
+ limit: int = 0,
438
+ sort: dict[str, int] | None = None,
439
+ session: AsyncClientSession | None = None,
440
+ ) -> AsyncGenerator[T, None]:
441
+ pass
442
+
443
+ @overload
444
+ def find_all_by_filter(
445
+ self,
446
+ *,
447
+ filter: dict[str, Any],
448
+ projection: type[P] | None = None,
449
+ skip: int = 0,
450
+ limit: int = 0,
451
+ sort: dict[str, int] | None = None,
452
+ session: AsyncClientSession | None = None,
453
+ ) -> AsyncGenerator[T, None]:
454
+ pass
455
+
456
+ async def find_all_by_filter(
457
+ self, *_: Any, **kwargs: Any
458
+ ) -> AsyncGenerator[Any, None]:
459
+ async for document in self._find(
460
+ filter=kwargs.get("filter"),
461
+ projection=kwargs.get("projection"),
462
+ skip=kwargs.get("skip", 0),
463
+ limit=kwargs.get("limit", 0),
464
+ sort=kwargs.get("sort"),
465
+ session=kwargs.get("session"),
466
+ ):
467
+ yield document
468
+
469
+ async def _find(
470
+ self,
471
+ filter: dict[str, Any] | None = None,
472
+ projection: type[P] | None = None,
473
+ skip: int = 0,
474
+ limit: int = 0,
475
+ sort: dict[str, int] | None = None,
476
+ session: AsyncClientSession | None = None,
477
+ ) -> AsyncGenerator[T | P, None]:
478
+ if projection:
479
+ async with self.collection.find(
480
+ filter=filter,
481
+ projection=projection.get_projection(),
482
+ skip=skip,
483
+ limit=limit,
484
+ sort=sort,
485
+ session=session or self.session,
486
+ ) as cursor:
487
+ async for document in cursor:
488
+ yield projection.from_raw_projected_document(document)
489
+
490
+ return
491
+
492
+ async with self.collection.find(
493
+ filter=filter,
494
+ skip=skip,
495
+ limit=limit,
496
+ sort=sort,
497
+ session=session or self.session,
498
+ ) as cursor:
499
+ async for document in cursor:
500
+ yield self.document_type.from_raw_document(document)
501
+
502
+ @overload
503
+ async def exists(
504
+ self,
505
+ *,
506
+ id: Any,
507
+ session: AsyncClientSession | None = None,
508
+ ) -> bool:
509
+ pass
510
+
511
+ @overload
512
+ async def exists(
513
+ self,
514
+ *,
515
+ filter: dict[str, Any],
516
+ session: AsyncClientSession | None = None,
517
+ ) -> bool:
518
+ pass
519
+
520
+ async def exists(self, *_: Any, **kwargs: Any) -> bool:
521
+ id = kwargs.get("id")
522
+ filter = kwargs.get("filter")
523
+
524
+ if id is not None:
525
+ filter = {"_id": id}
526
+
527
+ return (
528
+ await self.collection.find_one(
529
+ filter=filter,
530
+ projection={"_id": True},
531
+ session=kwargs.get("session") or self.session,
532
+ )
533
+ is not None
534
+ )
535
+
536
+ async def count(
537
+ self,
538
+ filter: dict[str, Any] | None = None,
539
+ skip: int | None = None,
540
+ limit: int | None = None,
541
+ session: AsyncClientSession | None = None,
542
+ ) -> int:
543
+ if filter is not None or skip is not None or limit is not None:
544
+ options = {}
545
+
546
+ if skip is not None:
547
+ options["skip"] = skip
548
+
549
+ if limit is not None and limit > 0:
550
+ options["limit"] = limit
551
+
552
+ return cast(
553
+ int,
554
+ await self.collection.count_documents(
555
+ filter=filter or {},
556
+ **options,
557
+ session=session or self.session,
558
+ ),
559
+ )
560
+
561
+ return cast(int, await self.collection.estimated_document_count())
562
+
563
+ async def aggregate(
564
+ self,
565
+ aggregation: type[A],
566
+ let: dict[str, Any] | None = None,
567
+ session: AsyncClientSession | None = None,
568
+ ) -> AsyncGenerator[A, None]:
569
+ async with await self.collection.aggregate(
570
+ pipeline=aggregation.get_pipeline(),
571
+ let=let,
572
+ session=session or self.session,
573
+ ) as cursor:
574
+ async for document in cursor:
575
+ yield aggregation.from_raw_aggregated_document(document)
@@ -1,10 +1,10 @@
1
+ from . import message
1
2
  from ._base import AbstractRabbitMQService, RabbitMqManager
2
3
  from ._channel import BaseChannel
3
4
  from ._exception_handlers import (
4
- AbstractRabbitMqExceptionHandler,
5
- GeneralMqExceptionHandler,
5
+ RabbitMqGeneralExceptionHandler,
6
6
  RabbitMqServiceExceptionHandler,
7
- ValidationErrorHandler,
7
+ RabbitMqValidationErrorHandler,
8
8
  )
9
9
  from ._listener import (
10
10
  CONSUMER_ATTRIBUTE,
@@ -28,7 +28,6 @@ from ._publisher import Publisher
28
28
  from ._rpc_client import RpcClient
29
29
 
30
30
  __all__ = [
31
- "AbstractRabbitMqExceptionHandler",
32
31
  "AbstractRabbitMQService",
33
32
  "BackoffRetryDelay",
34
33
  "BaseChannel",
@@ -39,18 +38,19 @@ __all__ = [
39
38
  "Consumer",
40
39
  "execute",
41
40
  "FixedRetryDelay",
42
- "GeneralMqExceptionHandler",
43
41
  "LISTENER_ATTRIBUTE",
44
42
  "ListenerBase",
45
43
  "ListenerContext",
44
+ "message",
46
45
  "Publisher",
46
+ "RabbitMqGeneralExceptionHandler",
47
47
  "RabbitMqManager",
48
48
  "RabbitMqServiceExceptionHandler",
49
+ "RabbitMqValidationErrorHandler",
49
50
  "RetryDelayJitter",
50
51
  "RetryPolicy",
51
52
  "RPC_WORKER_ATTRIBUTE",
52
53
  "rpc_worker",
53
54
  "RpcClient",
54
55
  "RpcWorker",
55
- "ValidationErrorHandler",
56
56
  ]