stackraise 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- stackraise/__init__.py +6 -0
- stackraise/ai/__init__.py +2 -0
- stackraise/ai/rpa.py +380 -0
- stackraise/ai/toolset.py +227 -0
- stackraise/app.py +23 -0
- stackraise/auth/__init__.py +2 -0
- stackraise/auth/model.py +24 -0
- stackraise/auth/service.py +240 -0
- stackraise/ctrl/__init__.py +4 -0
- stackraise/ctrl/change_stream.py +40 -0
- stackraise/ctrl/crud_controller.py +63 -0
- stackraise/ctrl/file_storage.py +68 -0
- stackraise/db/__init__.py +11 -0
- stackraise/db/adapter.py +60 -0
- stackraise/db/collection.py +292 -0
- stackraise/db/cursor.py +229 -0
- stackraise/db/document.py +282 -0
- stackraise/db/exceptions.py +9 -0
- stackraise/db/id.py +79 -0
- stackraise/db/index.py +84 -0
- stackraise/db/persistence.py +238 -0
- stackraise/db/pipeline.py +245 -0
- stackraise/db/protocols.py +141 -0
- stackraise/di.py +36 -0
- stackraise/event.py +150 -0
- stackraise/inflection.py +28 -0
- stackraise/io/__init__.py +3 -0
- stackraise/io/imap_client.py +400 -0
- stackraise/io/smtp_client.py +102 -0
- stackraise/logging.py +22 -0
- stackraise/model/__init__.py +11 -0
- stackraise/model/core.py +16 -0
- stackraise/model/dto.py +12 -0
- stackraise/model/email_message.py +88 -0
- stackraise/model/file.py +154 -0
- stackraise/model/name_email.py +45 -0
- stackraise/model/query_filters.py +231 -0
- stackraise/model/time_range.py +285 -0
- stackraise/model/validation.py +8 -0
- stackraise/templating/__init__.py +4 -0
- stackraise/templating/exceptions.py +23 -0
- stackraise/templating/image/__init__.py +2 -0
- stackraise/templating/image/model.py +51 -0
- stackraise/templating/image/processor.py +154 -0
- stackraise/templating/parser.py +156 -0
- stackraise/templating/pptx/__init__.py +3 -0
- stackraise/templating/pptx/pptx_engine.py +204 -0
- stackraise/templating/pptx/slide_renderer.py +181 -0
- stackraise/templating/tracer.py +57 -0
- stackraise-0.1.0.dist-info/METADATA +37 -0
- stackraise-0.1.0.dist-info/RECORD +52 -0
- stackraise-0.1.0.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,245 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any, Mapping, Optional, Self
|
|
4
|
+
import stackraise.db as db
|
|
5
|
+
import stackraise.model as model
|
|
6
|
+
import stackraise.inflection as inflection
|
|
7
|
+
from .protocols import (
|
|
8
|
+
QueryLike,
|
|
9
|
+
ensure_mongo_query,
|
|
10
|
+
SortLike,
|
|
11
|
+
ensure_mongo_sort,
|
|
12
|
+
DocumentProtocol,
|
|
13
|
+
)
|
|
14
|
+
from .cursor import CursorMixin, CursorIterator
|
|
15
|
+
from .adapter import Adapter
|
|
16
|
+
|
|
17
|
+
_MISSING = object()
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class Pipeline[T: model.Dto](CursorMixin[T]):
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
collection: db.Collection,
|
|
24
|
+
):
|
|
25
|
+
self._collection = collection
|
|
26
|
+
self._result_type = None
|
|
27
|
+
self._stages = []
|
|
28
|
+
|
|
29
|
+
def result_type[R](self, type: type[R]) -> Pipeline[R]:
|
|
30
|
+
"""Set the result type for the pipeline."""
|
|
31
|
+
self._result_type = type
|
|
32
|
+
return self
|
|
33
|
+
|
|
34
|
+
async def aiter(self):
|
|
35
|
+
adapter = (
|
|
36
|
+
Adapter(self._result_type)
|
|
37
|
+
if self._result_type
|
|
38
|
+
else self._collection.adapter
|
|
39
|
+
)
|
|
40
|
+
inner = await self._collection._aggregate(self._stages)
|
|
41
|
+
return CursorIterator(adapter, inner)
|
|
42
|
+
|
|
43
|
+
def stage(self, stage: Mapping[str, Any]) -> Pipeline:
|
|
44
|
+
"""Add a stage to the pipeline."""
|
|
45
|
+
self._stages.append(stage)
|
|
46
|
+
return self
|
|
47
|
+
|
|
48
|
+
def match(self, query: QueryLike) -> Self:
|
|
49
|
+
"""Add a $match stage to the pipeline."""
|
|
50
|
+
query = ensure_mongo_query(query)
|
|
51
|
+
if query:
|
|
52
|
+
self._stages.append({"$match": query})
|
|
53
|
+
return self
|
|
54
|
+
|
|
55
|
+
def unwind(
|
|
56
|
+
self,
|
|
57
|
+
field_path: str,
|
|
58
|
+
index_field: Optional[str] = None,
|
|
59
|
+
preserve: bool = False,
|
|
60
|
+
) -> Self:
|
|
61
|
+
"""Add an $unwind stage to the pipeline"""
|
|
62
|
+
|
|
63
|
+
if index_field is not None or preserve is not False:
|
|
64
|
+
params = {"path": f"${field_path}"}
|
|
65
|
+
|
|
66
|
+
if index_field is not None:
|
|
67
|
+
params["includeArrayIndex"] = index_field
|
|
68
|
+
|
|
69
|
+
if preserve is not False:
|
|
70
|
+
params["preserveNullAndEmptyArrays"] = True
|
|
71
|
+
else:
|
|
72
|
+
params = f"${field_path}"
|
|
73
|
+
|
|
74
|
+
self.stage({"$unwind": params})
|
|
75
|
+
|
|
76
|
+
return self
|
|
77
|
+
|
|
78
|
+
def sort(self, sort: SortLike) -> Self:
|
|
79
|
+
"""Add a $sort stage to the pipeline."""
|
|
80
|
+
sort = ensure_mongo_sort(sort)
|
|
81
|
+
if sort:
|
|
82
|
+
self.stage({"$sort": sort})
|
|
83
|
+
return self
|
|
84
|
+
|
|
85
|
+
def set(self, set_fields: dict[str, Any]):
|
|
86
|
+
"""Add a $set stage to the pipeline."""
|
|
87
|
+
if set_fields:
|
|
88
|
+
self.stage({"$set": set_fields})
|
|
89
|
+
return self
|
|
90
|
+
|
|
91
|
+
def unset(self, *unset_fields: list[str]):
|
|
92
|
+
"""Add a $unset stage to the pipeline"""
|
|
93
|
+
if unset_fields:
|
|
94
|
+
self.stage({"$unset": unset_fields})
|
|
95
|
+
return self
|
|
96
|
+
|
|
97
|
+
def facet(self, facet_pipelines: dict[str, list] = {}) -> Self:
|
|
98
|
+
if facet_pipelines:
|
|
99
|
+
self.stage({"$facet": facet_pipelines})
|
|
100
|
+
return self
|
|
101
|
+
|
|
102
|
+
def embed(
|
|
103
|
+
self,
|
|
104
|
+
from_: type[DocumentProtocol] | db.Collection[DocumentProtocol] | Pipeline[DocumentProtocol],
|
|
105
|
+
ref_field: Optional[str] = None,
|
|
106
|
+
foreign_field: str = "_id",
|
|
107
|
+
as_field: Optional[str] = None,
|
|
108
|
+
#pipeline: Optional[Pipeline] = None,
|
|
109
|
+
by_query: Optional[QueryLike] = _MISSING,
|
|
110
|
+
many: bool = False,
|
|
111
|
+
) -> Self:
|
|
112
|
+
"""
|
|
113
|
+
Embed an entity from another collection into the pipeline.
|
|
114
|
+
|
|
115
|
+
This will perform a $lookup and $unwind operations to embed (or nest) a single document
|
|
116
|
+
from another collection into the current pipeline. This produces the following
|
|
117
|
+
stages according to mongodb specification:
|
|
118
|
+
```
|
|
119
|
+
# lookup stage
|
|
120
|
+
{
|
|
121
|
+
"$lookup": {
|
|
122
|
+
"from": collection.name,
|
|
123
|
+
"localField": field or to_camelcase(entity_name),
|
|
124
|
+
"foreignField": foreign_field or "_id",
|
|
125
|
+
"as": as_field or field or to_camelcase(entity_name)
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
# unwind stage if many is False
|
|
129
|
+
{
|
|
130
|
+
"$unwind": f"${as_field or field or to_camelcase(entity_name)}"
|
|
131
|
+
}
|
|
132
|
+
```
|
|
133
|
+
"""
|
|
134
|
+
|
|
135
|
+
pipeline = []
|
|
136
|
+
if isinstance(from_, Pipeline):
|
|
137
|
+
collection = from_._collection
|
|
138
|
+
pipeline = from_._stages
|
|
139
|
+
elif isinstance(from_, type):
|
|
140
|
+
collection = from_.collection
|
|
141
|
+
else:
|
|
142
|
+
collection = from_
|
|
143
|
+
|
|
144
|
+
if ref_field is None:
|
|
145
|
+
ref_field = inflection.to_camelcase(collection.adapter.typename)
|
|
146
|
+
|
|
147
|
+
if as_field is None:
|
|
148
|
+
as_field = ref_field
|
|
149
|
+
|
|
150
|
+
if by_query is not _MISSING:
|
|
151
|
+
by_query = ensure_mongo_query(by_query)
|
|
152
|
+
|
|
153
|
+
self.stage(
|
|
154
|
+
{
|
|
155
|
+
"$lookup": {
|
|
156
|
+
"from": collection.name,
|
|
157
|
+
"as": as_field,
|
|
158
|
+
"pipeline": [{"$match": by_query}, *pipeline],
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
)
|
|
162
|
+
else:
|
|
163
|
+
|
|
164
|
+
self.stage(
|
|
165
|
+
{
|
|
166
|
+
"$lookup": {
|
|
167
|
+
"from": collection.name,
|
|
168
|
+
"localField": ref_field,
|
|
169
|
+
"foreignField": foreign_field,
|
|
170
|
+
"as": as_field,
|
|
171
|
+
"pipeline": pipeline,
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
if many is False:
|
|
177
|
+
self.unwind(as_field)
|
|
178
|
+
|
|
179
|
+
return self
|
|
180
|
+
|
|
181
|
+
def join(
|
|
182
|
+
self,
|
|
183
|
+
from_: (
|
|
184
|
+
type[DocumentProtocol]
|
|
185
|
+
| db.Collection[DocumentProtocol]
|
|
186
|
+
| Pipeline[DocumentProtocol]
|
|
187
|
+
),
|
|
188
|
+
local_field: str = "_id",
|
|
189
|
+
foreign_field: Optional[str] = None,
|
|
190
|
+
as_field: Optional[str] = None,
|
|
191
|
+
unwind: bool = False,
|
|
192
|
+
) -> Self:
|
|
193
|
+
"""
|
|
194
|
+
Join another collection into the pipeline.
|
|
195
|
+
This will perform a $lookup operation to join another collection into the current pipeline.
|
|
196
|
+
The resulting stages will look like this:
|
|
197
|
+
```
|
|
198
|
+
{
|
|
199
|
+
"$lookup": {
|
|
200
|
+
"from": collection.name,
|
|
201
|
+
"localField": local_field,
|
|
202
|
+
"foreignField": foreign_field or to_camelcase(entity_name),
|
|
203
|
+
"as": as_field or collection.name
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
```
|
|
207
|
+
If `unwind` is True, it will also add an `$unwind` stage
|
|
208
|
+
```
|
|
209
|
+
{
|
|
210
|
+
"$unwind": f"${as_field or collection.name}"
|
|
211
|
+
}
|
|
212
|
+
```
|
|
213
|
+
"""
|
|
214
|
+
pipeline = []
|
|
215
|
+
if isinstance(from_, Pipeline):
|
|
216
|
+
pipeline = from_._stages
|
|
217
|
+
collection = from_._collection
|
|
218
|
+
elif isinstance(from_, type):
|
|
219
|
+
collection = from_.collection
|
|
220
|
+
else:
|
|
221
|
+
collection = from_
|
|
222
|
+
|
|
223
|
+
if foreign_field is None:
|
|
224
|
+
foreign_field = inflection.to_camelcase(self._collection.adapter.typename)
|
|
225
|
+
|
|
226
|
+
if as_field is None:
|
|
227
|
+
as_field = inflection.to_camelcase(collection.name)
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
self.stage(
|
|
231
|
+
{
|
|
232
|
+
"$lookup": {
|
|
233
|
+
"from": collection.name,
|
|
234
|
+
"localField": local_field,
|
|
235
|
+
"foreignField": foreign_field,
|
|
236
|
+
"as": as_field,
|
|
237
|
+
"pipeline": pipeline,
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
if unwind:
|
|
243
|
+
self.unwind(as_field)
|
|
244
|
+
|
|
245
|
+
return self
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
#%%
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
from typing import Any, Literal, Mapping, Protocol, Self, ClassVar
|
|
5
|
+
from weakref import WeakSet
|
|
6
|
+
|
|
7
|
+
import stackraise.db as db
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class DocumentProtocol(Protocol):
|
|
11
|
+
#kind: ClassVar[str]
|
|
12
|
+
id: db.Id
|
|
13
|
+
ref: db.Document.Ref
|
|
14
|
+
collection: ClassVar[db.Collection[Self]]
|
|
15
|
+
|
|
16
|
+
async def __prepare_for_storage__(self) -> None:
|
|
17
|
+
"""Check the document's integrity."""
|
|
18
|
+
pass
|
|
19
|
+
|
|
20
|
+
async def __handle_post_deletion__(self) -> None:
|
|
21
|
+
"""Perform any cleanup after the document is deleted."""
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
type MongoQuery = Mapping[str, Any]
|
|
26
|
+
type QueryLike = QueryProtocol | Mapping[str, Any]
|
|
27
|
+
|
|
28
|
+
class QueryProtocol(Protocol):
|
|
29
|
+
async def to_mongo_query(self) -> MongoQuery:
|
|
30
|
+
"""Convert the query to a MongoDB query."""
|
|
31
|
+
pass
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def ensure_mongo_query(query: QueryLike) -> MongoQuery:
|
|
35
|
+
if not isinstance(query, Mapping):
|
|
36
|
+
if not hasattr(query, "to_mongo_query"):
|
|
37
|
+
raise TypeError(f"Expected a Mapping or QueryProtocol, got {type(query)}")
|
|
38
|
+
query = query.to_mongo_query()
|
|
39
|
+
assert isinstance(query, Mapping), f"Expected a Mapping, got {type(query)}"
|
|
40
|
+
return query
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def merge_queries(*queries: QueryLike) -> MongoQuery:
|
|
44
|
+
"""Merge multiple queries into a single MongoDB query."""
|
|
45
|
+
merged_query: MongoQuery = {}
|
|
46
|
+
for query in queries:
|
|
47
|
+
if isinstance(query, Mapping):
|
|
48
|
+
merged_query.update(query)
|
|
49
|
+
elif hasattr(query, "to_mongo_query"):
|
|
50
|
+
merged_query.update(query.to_mongo_query())
|
|
51
|
+
else:
|
|
52
|
+
raise TypeError(f"Expected a Mapping or QueryProtocol, got {type(query)}")
|
|
53
|
+
return merged_query
|
|
54
|
+
|
|
55
|
+
type MongoSort = dict[str, Literal[1, -1]]
|
|
56
|
+
type SortLike = MongoSort | str
|
|
57
|
+
|
|
58
|
+
class SortProtocol(Protocol):
|
|
59
|
+
async def to_mongo_sort(self) -> MongoSort:
|
|
60
|
+
"""Convert the sort to a MongoDB sort."""
|
|
61
|
+
pass
|
|
62
|
+
|
|
63
|
+
_SORT_MODE_MAPPING = {
|
|
64
|
+
'<': 1,
|
|
65
|
+
'>': -1,
|
|
66
|
+
'=': None, # No ordering by this field
|
|
67
|
+
1: 1,
|
|
68
|
+
-1: -1,
|
|
69
|
+
0: None, # No ordering by this field
|
|
70
|
+
}
|
|
71
|
+
def ensure_mongo_sort(sort: SortLike) -> MongoSort:
|
|
72
|
+
"""Ensure the sort is a valid MongoDB sort."""
|
|
73
|
+
def mode_mapping(mode: Any) -> Literal[1, -1, None]:
|
|
74
|
+
if mode not in _SORT_MODE_MAPPING:
|
|
75
|
+
raise ValueError(f"Invalid sort mode: {mode}")
|
|
76
|
+
return _SORT_MODE_MAPPING[mode]
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
if isinstance(sort, str):
|
|
80
|
+
sort = {sort: 1} # Default to ascending order
|
|
81
|
+
elif isinstance(sort, list):
|
|
82
|
+
sort = {field: 1 for field in sort} # Default to ascending order for all fields
|
|
83
|
+
if not isinstance(sort, dict):
|
|
84
|
+
if not hasattr(sort, "to_mongo_sort"):
|
|
85
|
+
raise TypeError(f"Expected a dict or SortProtocol, got {type(sort)}")
|
|
86
|
+
sort = sort.to_mongo_sort()
|
|
87
|
+
|
|
88
|
+
assert isinstance(sort, dict), f"Expected a dict, got {type(sort)}"
|
|
89
|
+
return {k: m for k, v in sort.items() if (m := mode_mapping(v)) is not None}
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
type MongoPipeline = list[Mapping[str, Any]]
|
|
93
|
+
type PipelineLike = PipelineProtocol | MongoPipeline
|
|
94
|
+
|
|
95
|
+
class PipelineProtocol(Protocol):
|
|
96
|
+
async def to_mongo_pipeline(self) -> MongoPipeline:
|
|
97
|
+
"""Convert the pipeline to a MongoDB pipeline."""
|
|
98
|
+
pass
|
|
99
|
+
|
|
100
|
+
def ensure_mongo_pipeline(pipeline: PipelineLike) -> MongoPipeline:
|
|
101
|
+
if not isinstance(pipeline, list):
|
|
102
|
+
if not hasattr(pipeline, "to_mongo_pipeline"):
|
|
103
|
+
raise TypeError(f"Expected a list or PipelineProtocol, got {type(pipeline)}")
|
|
104
|
+
pipeline = pipeline.to_mongo_pipeline()
|
|
105
|
+
return pipeline
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
_collection_instances: WeakSet[CollectionProtocol] = WeakSet()
|
|
110
|
+
|
|
111
|
+
def register_collection_instance(collection: CollectionProtocol):
|
|
112
|
+
"""Register a collection instance."""
|
|
113
|
+
_collection_instances.add(collection)
|
|
114
|
+
|
|
115
|
+
def get_collection_instances() -> list[CollectionProtocol]:
|
|
116
|
+
"""Return all registered collection instances."""
|
|
117
|
+
return list(_collection_instances)
|
|
118
|
+
|
|
119
|
+
class CollectionProtocol(Protocol):
|
|
120
|
+
name: str
|
|
121
|
+
document_cls: type[db.Document]
|
|
122
|
+
|
|
123
|
+
async def _startup_task(self) -> None:
|
|
124
|
+
"""Manage the lifespan of the collection."""
|
|
125
|
+
pass
|
|
126
|
+
|
|
127
|
+
async def find(self, query: QueryLike) -> list[db.Document]:
|
|
128
|
+
"""Find documents matching the query."""
|
|
129
|
+
pass
|
|
130
|
+
|
|
131
|
+
async def insert_one(self, document: db.Document) -> db.Id:
|
|
132
|
+
"""Insert a single document."""
|
|
133
|
+
pass
|
|
134
|
+
|
|
135
|
+
async def update_one(self, query: QueryLike, update: Mapping[str, Any]) -> None:
|
|
136
|
+
"""Update a single document."""
|
|
137
|
+
pass
|
|
138
|
+
|
|
139
|
+
async def delete_one(self, query: QueryLike) -> None:
|
|
140
|
+
"""Delete a single document."""
|
|
141
|
+
pass
|
stackraise/di.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from typing import Protocol, Self, runtime_checkable, Awaitable
|
|
2
|
+
from fastapi import Depends
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
@runtime_checkable
|
|
6
|
+
class Injectable[T](Protocol):
|
|
7
|
+
def inject(self) -> Awaitable[T] | T: ...
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def Inject[T](obj: Injectable[T]) -> Self:
|
|
11
|
+
assert issubclass(obj, Injectable), f"Cannot inject {obj}, it is not an Injectable"
|
|
12
|
+
return Depends(obj.inject)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Singleton:
|
|
16
|
+
_instance = None
|
|
17
|
+
|
|
18
|
+
def __new__(cls, *args, **kwargs):
|
|
19
|
+
assert (
|
|
20
|
+
cls._instance is None
|
|
21
|
+
), f"Singleton instance of {cls.__qualname__} already initialized"
|
|
22
|
+
|
|
23
|
+
if cls._instance is not None:
|
|
24
|
+
return cls._instance
|
|
25
|
+
|
|
26
|
+
self = super().__new__(cls)
|
|
27
|
+
|
|
28
|
+
cls._instance = self
|
|
29
|
+
return self
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def inject(cls) -> Self:
|
|
33
|
+
assert (
|
|
34
|
+
cls._instance is not None
|
|
35
|
+
), f"Singleton instance of {cls.__qualname__} not initialized"
|
|
36
|
+
return cls._instance
|
stackraise/event.py
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from inspect import iscoroutinefunction
|
|
3
|
+
from typing import Awaitable, Callable, Generic, Optional, TypeAlias, TypeVar
|
|
4
|
+
from anyio import create_task_group
|
|
5
|
+
from stackraise.logging import get_logger
|
|
6
|
+
|
|
7
|
+
E = TypeVar("E")
|
|
8
|
+
|
|
9
|
+
EventHandler: TypeAlias = Callable[[E], Awaitable[None] | None]
|
|
10
|
+
AsyncEventHandler: TypeAlias = Callable[[E], Awaitable[None]]
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class EventEmitter(Generic[E]):
|
|
14
|
+
"""Representa una fuente de eventos
|
|
15
|
+
|
|
16
|
+
>>> from pydantic import BaseModel
|
|
17
|
+
>>> class UserSigninEvent(BaseModel)
|
|
18
|
+
>>> user_signin_event = EventEmitter()
|
|
19
|
+
|
|
20
|
+
>>> user_signin_event.emit(UserSigninEvent())
|
|
21
|
+
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
_name: str
|
|
25
|
+
_subscriptions: set[EventSubscription]
|
|
26
|
+
|
|
27
|
+
def __init__(self, name: str):
|
|
28
|
+
self._name = name
|
|
29
|
+
self._log = get_logger(f"{name} event emitter")
|
|
30
|
+
self._subscriptions = set()
|
|
31
|
+
self._exception_count: int = 0
|
|
32
|
+
|
|
33
|
+
def __repr__(self):
|
|
34
|
+
return (
|
|
35
|
+
f"EventEmitter({self._name}, "
|
|
36
|
+
f"{len(self._subscriptions)} enabled subscriptions, "
|
|
37
|
+
f"{len(self._exception_count)} exceptions counted)"
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
@property
|
|
41
|
+
def name(self):
|
|
42
|
+
return self._name
|
|
43
|
+
|
|
44
|
+
@property
|
|
45
|
+
def log(self):
|
|
46
|
+
return self._log
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def subscriptions(self) -> list[EventSubscription]:
|
|
50
|
+
return list(self._subscriptions)
|
|
51
|
+
|
|
52
|
+
async def emit(self, event: E):
|
|
53
|
+
"""Emite un evento"""
|
|
54
|
+
try:
|
|
55
|
+
async with create_task_group() as tg:
|
|
56
|
+
for sub in self.subscriptions:
|
|
57
|
+
tg.start_soon(sub._event_task, event, name=self.name)
|
|
58
|
+
except* Exception as e:
|
|
59
|
+
self.log.exception("Occurred while broadcasting event")
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
__call__ = emit
|
|
63
|
+
|
|
64
|
+
@property
|
|
65
|
+
def exception_count(self):
|
|
66
|
+
return self._exception_count
|
|
67
|
+
|
|
68
|
+
def subscribe(
|
|
69
|
+
self, handler: EventHandler, name: Optional[str] = None
|
|
70
|
+
) -> EventSubscription:
|
|
71
|
+
subscription = EventSubscription(self, handler, name=name)
|
|
72
|
+
subscription.subscribe()
|
|
73
|
+
return subscription
|
|
74
|
+
|
|
75
|
+
def handler(
|
|
76
|
+
self, /, name: Optional[str] = None
|
|
77
|
+
) -> Callable[[EventHandler], EventHandler]:
|
|
78
|
+
def decorator(handler: EventHandler) -> EventHandler:
|
|
79
|
+
self.subscribe(handler, name=name)
|
|
80
|
+
return handler
|
|
81
|
+
|
|
82
|
+
return decorator
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class EventSubscription(Generic[E]):
|
|
86
|
+
_name: str
|
|
87
|
+
_emitter: EventEmitter[E]
|
|
88
|
+
_event_handler: AsyncEventHandler
|
|
89
|
+
_exception_handler: Callable[[ExceptionGroup]]
|
|
90
|
+
|
|
91
|
+
def __init__(
|
|
92
|
+
self,
|
|
93
|
+
emitter: EventEmitter[E],
|
|
94
|
+
handler: EventHandler,
|
|
95
|
+
name: Optional[str] = None,
|
|
96
|
+
):
|
|
97
|
+
self._name = name or handler.__name__
|
|
98
|
+
self._emitter = emitter
|
|
99
|
+
self._event_handler = ensure_async(handler)
|
|
100
|
+
self._exception_handler = default_exception_handler
|
|
101
|
+
|
|
102
|
+
def __repr__(self):
|
|
103
|
+
return (
|
|
104
|
+
f"EventSubscription({self._emitter._name}, "
|
|
105
|
+
f"{self._name} "
|
|
106
|
+
f'{"enabled" if self.is_enabled else "disabled"})'
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
@property
|
|
110
|
+
def emitter(self):
|
|
111
|
+
return self._emitter
|
|
112
|
+
|
|
113
|
+
@property
|
|
114
|
+
def name(self):
|
|
115
|
+
return self._name
|
|
116
|
+
|
|
117
|
+
@property
|
|
118
|
+
def is_enabled(self) -> bool:
|
|
119
|
+
return self in self._emitter._subscriptions
|
|
120
|
+
|
|
121
|
+
def subscribe(self):
|
|
122
|
+
self._emitter._subscriptions.add(self)
|
|
123
|
+
|
|
124
|
+
def unsubscribe(self):
|
|
125
|
+
self._emitter._subscriptions.remove(self)
|
|
126
|
+
|
|
127
|
+
async def _event_task(self, event: E):
|
|
128
|
+
try:
|
|
129
|
+
await self._event_handler(event)
|
|
130
|
+
except* Exception as exc:
|
|
131
|
+
self._emitter._exception_count += 1
|
|
132
|
+
try:
|
|
133
|
+
self._exception_handler(exc, self)
|
|
134
|
+
except* Exception as nested_exc:
|
|
135
|
+
self.emitter.log.fatal("Exception handling exception")
|
|
136
|
+
self.emitter.log.exception(nested_exc)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def default_exception_handler(exc: Exception, subscription: EventSubscription):
|
|
140
|
+
subscription.emitter.log.exception(exc)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def ensure_async(fn: Callable[[E], Awaitable[None]]):
|
|
144
|
+
if not iscoroutinefunction(fn):
|
|
145
|
+
|
|
146
|
+
async def async_wrapper(*args, **kwargs):
|
|
147
|
+
return fn(*args, **kwargs)
|
|
148
|
+
|
|
149
|
+
return async_wrapper
|
|
150
|
+
return fn
|
stackraise/inflection.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from functools import cache
|
|
2
|
+
|
|
3
|
+
from inflector import English
|
|
4
|
+
from slugify import slugify
|
|
5
|
+
|
|
6
|
+
_english_inflector = English()
|
|
7
|
+
|
|
8
|
+
@cache
|
|
9
|
+
def to_tablename(s: str) -> str:
|
|
10
|
+
return _english_inflector.tableize(s)
|
|
11
|
+
|
|
12
|
+
@cache
|
|
13
|
+
def to_camelcase(s: str) -> str:
|
|
14
|
+
ns = s.rstrip('_')
|
|
15
|
+
|
|
16
|
+
camel = _english_inflector.camelize(ns)
|
|
17
|
+
camel = camel[0].lower() + camel[1:]
|
|
18
|
+
|
|
19
|
+
return camel + '_' * (len(s) - len(ns))
|
|
20
|
+
|
|
21
|
+
@cache
|
|
22
|
+
def to_slug(s: str) -> str:
|
|
23
|
+
return slugify(s)
|
|
24
|
+
|
|
25
|
+
@cache
|
|
26
|
+
def to_underscore(s: str) -> str:
|
|
27
|
+
return _english_inflector.underscore(s)
|
|
28
|
+
|