stackraise 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- stackraise/__init__.py +6 -0
- stackraise/ai/__init__.py +2 -0
- stackraise/ai/rpa.py +380 -0
- stackraise/ai/toolset.py +227 -0
- stackraise/app.py +23 -0
- stackraise/auth/__init__.py +2 -0
- stackraise/auth/model.py +24 -0
- stackraise/auth/service.py +240 -0
- stackraise/ctrl/__init__.py +4 -0
- stackraise/ctrl/change_stream.py +40 -0
- stackraise/ctrl/crud_controller.py +63 -0
- stackraise/ctrl/file_storage.py +68 -0
- stackraise/db/__init__.py +11 -0
- stackraise/db/adapter.py +60 -0
- stackraise/db/collection.py +292 -0
- stackraise/db/cursor.py +229 -0
- stackraise/db/document.py +282 -0
- stackraise/db/exceptions.py +9 -0
- stackraise/db/id.py +79 -0
- stackraise/db/index.py +84 -0
- stackraise/db/persistence.py +238 -0
- stackraise/db/pipeline.py +245 -0
- stackraise/db/protocols.py +141 -0
- stackraise/di.py +36 -0
- stackraise/event.py +150 -0
- stackraise/inflection.py +28 -0
- stackraise/io/__init__.py +3 -0
- stackraise/io/imap_client.py +400 -0
- stackraise/io/smtp_client.py +102 -0
- stackraise/logging.py +22 -0
- stackraise/model/__init__.py +11 -0
- stackraise/model/core.py +16 -0
- stackraise/model/dto.py +12 -0
- stackraise/model/email_message.py +88 -0
- stackraise/model/file.py +154 -0
- stackraise/model/name_email.py +45 -0
- stackraise/model/query_filters.py +231 -0
- stackraise/model/time_range.py +285 -0
- stackraise/model/validation.py +8 -0
- stackraise/templating/__init__.py +4 -0
- stackraise/templating/exceptions.py +23 -0
- stackraise/templating/image/__init__.py +2 -0
- stackraise/templating/image/model.py +51 -0
- stackraise/templating/image/processor.py +154 -0
- stackraise/templating/parser.py +156 -0
- stackraise/templating/pptx/__init__.py +3 -0
- stackraise/templating/pptx/pptx_engine.py +204 -0
- stackraise/templating/pptx/slide_renderer.py +181 -0
- stackraise/templating/tracer.py +57 -0
- stackraise-0.1.0.dist-info/METADATA +37 -0
- stackraise-0.1.0.dist-info/RECORD +52 -0
- stackraise-0.1.0.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,292 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from functools import wraps
|
|
4
|
+
import inspect
|
|
5
|
+
from typing import Any, Callable, Mapping, Optional, Unpack
|
|
6
|
+
from weakref import WeakSet
|
|
7
|
+
|
|
8
|
+
import stackraise.db as db
|
|
9
|
+
|
|
10
|
+
from pymongo import results
|
|
11
|
+
|
|
12
|
+
from .adapter import Adapter
|
|
13
|
+
from .cursor import FindCursor
|
|
14
|
+
from .exceptions import NotFoundError
|
|
15
|
+
from .id import Id
|
|
16
|
+
from .persistence import current_context
|
|
17
|
+
from .protocols import (
|
|
18
|
+
DocumentProtocol,
|
|
19
|
+
QueryLike,
|
|
20
|
+
ensure_mongo_query,
|
|
21
|
+
register_collection_instance,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
from .index import _update_indices
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Collection[T: DocumentProtocol]:
|
|
28
|
+
|
|
29
|
+
def __init__(self, adapter: Adapter[T], name: Optional[str] = None):
|
|
30
|
+
self._adapter = adapter
|
|
31
|
+
self._name = name or adapter.tablename
|
|
32
|
+
register_collection_instance(self)
|
|
33
|
+
|
|
34
|
+
@property
|
|
35
|
+
def name(self) -> str:
|
|
36
|
+
"""Return the name of the collection."""
|
|
37
|
+
return self._name
|
|
38
|
+
|
|
39
|
+
@property
|
|
40
|
+
def adapter(self) -> db.Adapter[T]:
|
|
41
|
+
"""Return the adapter for this collection."""
|
|
42
|
+
return self._adapter
|
|
43
|
+
|
|
44
|
+
@property
|
|
45
|
+
def document_class(self) -> type[T]:
|
|
46
|
+
"""Return the document class for this collection."""
|
|
47
|
+
return self._adapter.document_class
|
|
48
|
+
|
|
49
|
+
async def _startup_task(self, persistence: db.Persistence):
|
|
50
|
+
"""Watch de mongo para actualizar los cambios en el repositorio."""
|
|
51
|
+
inner, session = self._inner_collection_and_session()
|
|
52
|
+
|
|
53
|
+
# undate collection indices
|
|
54
|
+
try:
|
|
55
|
+
print(f"Updating indices for collection {self._name}...")
|
|
56
|
+
await _update_indices(self._adapter.document_class, inner, session)
|
|
57
|
+
print(f"Updating indices for collection {self._name}... OK")
|
|
58
|
+
except Exception as e:
|
|
59
|
+
print(f"Error updating indices for collection {self._name}: {e}")
|
|
60
|
+
raise
|
|
61
|
+
|
|
62
|
+
def _inner_collection_and_session(self):
|
|
63
|
+
persistence, session = current_context()
|
|
64
|
+
inner = persistence.database.get_collection(self._name)
|
|
65
|
+
return inner, session # if session.in_transaction else None
|
|
66
|
+
|
|
67
|
+
# async def _create_index(self, *args, **kwargs):
|
|
68
|
+
# """Creates an index on the collection."""
|
|
69
|
+
# inner, session = self._inner_collection_and_session()
|
|
70
|
+
# await inner.create_index(*args, session=session, **kwargs)
|
|
71
|
+
|
|
72
|
+
# async def _drop_index(self, *args, **kwargs):
|
|
73
|
+
# """Creates an index on the collection."""
|
|
74
|
+
# collection, session = self._inner_collection_and_session()
|
|
75
|
+
# await collection.drop_index(*args, session=session, **kwargs)
|
|
76
|
+
|
|
77
|
+
# async def _drop_indexes(self, *args, **kwargs):
|
|
78
|
+
# """Creates an index on the collection."""
|
|
79
|
+
# collection, session = self._inner_collection_and_session()
|
|
80
|
+
# await collection.drop_indexes(*args, session=session, **kwargs)
|
|
81
|
+
|
|
82
|
+
async def _count_documents(self, *args, **kwargs) -> int:
|
|
83
|
+
collection, session = self._inner_collection_and_session()
|
|
84
|
+
return await collection.count_documents(*args, session=session, **kwargs)
|
|
85
|
+
|
|
86
|
+
async def _find_one(self, *args, **kwargs) -> None | T:
|
|
87
|
+
collection, session = self._inner_collection_and_session()
|
|
88
|
+
raw = await collection.find_one(*args, session=session, **kwargs)
|
|
89
|
+
if raw is None:
|
|
90
|
+
return None
|
|
91
|
+
return self._adapter.parse_item(raw)
|
|
92
|
+
|
|
93
|
+
async def _update_one(self, *args, **kwargs) -> results.UpdateResult:
|
|
94
|
+
collection, session = self._inner_collection_and_session()
|
|
95
|
+
result = await collection.update_one(*args, session=session, **kwargs)
|
|
96
|
+
assert result.acknowledged
|
|
97
|
+
return result
|
|
98
|
+
|
|
99
|
+
async def _update_many(self, *args, **kwargs) -> results.UpdateResult:
|
|
100
|
+
collection, session = self._inner_collection_and_session()
|
|
101
|
+
result = await collection.update_many(*args, session=session, **kwargs)
|
|
102
|
+
assert result.acknowledged
|
|
103
|
+
return result
|
|
104
|
+
|
|
105
|
+
async def _delete_many(self, *args, **kwargs) -> results.DeleteResult:
|
|
106
|
+
collection, session = self._inner_collection_and_session()
|
|
107
|
+
result = await collection.delete_many(*args, session=session, **kwargs)
|
|
108
|
+
assert result.acknowledged
|
|
109
|
+
return result
|
|
110
|
+
|
|
111
|
+
async def _find_one_and_update(self, *args, **kwargs):
|
|
112
|
+
collection, session = self._inner_collection_and_session()
|
|
113
|
+
raw = await collection.find_one_and_update(*args, session=session, **kwargs)
|
|
114
|
+
if raw is None:
|
|
115
|
+
return None
|
|
116
|
+
return self._adapter.parse_item(raw)
|
|
117
|
+
|
|
118
|
+
def _find(self, *args, **kwargs):
|
|
119
|
+
collection, session = self._inner_collection_and_session()
|
|
120
|
+
return collection.find(*args, session=session, **kwargs)
|
|
121
|
+
|
|
122
|
+
def _aggregate[R](
|
|
123
|
+
self, *args, result_adapter: Optional[db.Adapter] = None, **kwargs
|
|
124
|
+
):
|
|
125
|
+
collection, session = self._inner_collection_and_session()
|
|
126
|
+
return collection.aggregate(*args, session=session, **kwargs)
|
|
127
|
+
|
|
128
|
+
async def insert_item(self, item: T, with_id: Optional[Id] = None) -> T:
|
|
129
|
+
"""
|
|
130
|
+
Insert a new item into the collection.
|
|
131
|
+
Args:
|
|
132
|
+
item (T): The item to insert.
|
|
133
|
+
Returns:
|
|
134
|
+
T: The inserted item with its ID populated.
|
|
135
|
+
"""
|
|
136
|
+
collection, session = self._inner_collection_and_session()
|
|
137
|
+
await item.__prepare_for_storage__()
|
|
138
|
+
raw = self._adapter.dump_item(item)
|
|
139
|
+
if with_id is not None:
|
|
140
|
+
raw["_id"] = with_id
|
|
141
|
+
result = await collection.insert_one(raw, session=session)
|
|
142
|
+
assert result.acknowledged
|
|
143
|
+
item.id = Id(result.inserted_id)
|
|
144
|
+
|
|
145
|
+
return item
|
|
146
|
+
|
|
147
|
+
async def update_item(self, item: T):
|
|
148
|
+
"""
|
|
149
|
+
Update an existing item in the collection.
|
|
150
|
+
Args:
|
|
151
|
+
item (T): The item to update. Must have an ID.
|
|
152
|
+
Returns:
|
|
153
|
+
T: The updated item.
|
|
154
|
+
Raises:
|
|
155
|
+
ValueError: If the item does not have an ID.
|
|
156
|
+
NotFoundError: If the item with the given ID does not exist in the collection.
|
|
157
|
+
"""
|
|
158
|
+
assert item.id is not None, f"{type(item)} must have an id to be updated"
|
|
159
|
+
await item.__prepare_for_storage__()
|
|
160
|
+
if item.id is None:
|
|
161
|
+
raise ValueError(f"You are trying to update a non-inserted object")
|
|
162
|
+
raw = self._adapter.dump_item(item, with_id=False)
|
|
163
|
+
result = await self._update_one({"_id": item.id}, {"$set": raw})
|
|
164
|
+
if result.matched_count != 1:
|
|
165
|
+
raise NotFoundError(item.ref)
|
|
166
|
+
return item
|
|
167
|
+
|
|
168
|
+
async def fetch_by_id(self, id: Id, not_found_error=True) -> T | None:
|
|
169
|
+
"""
|
|
170
|
+
Read a document from the collection by its ID.
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
id (Id): The ID of the document to fetch.
|
|
174
|
+
not_found_error (bool, optional): Whether to raise a NotFoundError if the document is not found.
|
|
175
|
+
Defaults to True.
|
|
176
|
+
|
|
177
|
+
Returns:
|
|
178
|
+
P | None: The fetched document, or None if it is not found.
|
|
179
|
+
"""
|
|
180
|
+
inner, session = self._inner_collection_and_session()
|
|
181
|
+
raw = await inner.find_one({"_id": id}, session=session)
|
|
182
|
+
|
|
183
|
+
if raw is None:
|
|
184
|
+
if not_found_error:
|
|
185
|
+
raise NotFoundError(id)
|
|
186
|
+
return None
|
|
187
|
+
|
|
188
|
+
return self._adapter.parse_item(raw)
|
|
189
|
+
|
|
190
|
+
async def delete_by_id(self, id: Id, not_found_error=True):
|
|
191
|
+
"""
|
|
192
|
+
Delete a document from the collection by its ID.
|
|
193
|
+
|
|
194
|
+
Args:
|
|
195
|
+
id (Id): The ID of the document to delete.
|
|
196
|
+
not_found_error (bool, optional): Whether to raise a KeyError if the document is not found.
|
|
197
|
+
Defaults to True.
|
|
198
|
+
|
|
199
|
+
Raises:
|
|
200
|
+
KeyError: If the document is not found and `not_found_error` is True.
|
|
201
|
+
"""
|
|
202
|
+
inner, session = self._inner_collection_and_session()
|
|
203
|
+
|
|
204
|
+
result = await inner.delete_one({"_id": id}, session=session)
|
|
205
|
+
if not_found_error and result.deleted_count != 1:
|
|
206
|
+
raise KeyError(id)
|
|
207
|
+
|
|
208
|
+
await self.adapter.document_class.__handle_post_deletion__(id)
|
|
209
|
+
|
|
210
|
+
async def count(self, query: QueryLike = {}):
|
|
211
|
+
"""
|
|
212
|
+
Counts the number of documents in the collection that match the given filter.
|
|
213
|
+
|
|
214
|
+
Args:
|
|
215
|
+
filter (Mapping[str, Any], optional): The filter to apply when counting documents. Defaults to {}.
|
|
216
|
+
|
|
217
|
+
Returns:
|
|
218
|
+
int: The number of documents that match the filter.
|
|
219
|
+
"""
|
|
220
|
+
query = ensure_mongo_query(query)
|
|
221
|
+
inner, session = self._inner_collection_and_session()
|
|
222
|
+
return await inner.count_documents(query, session=session)
|
|
223
|
+
|
|
224
|
+
def find(self, query: QueryLike = {}) -> FindCursor[T]:
|
|
225
|
+
"""
|
|
226
|
+
Find documents in the collection based on the provided filter.
|
|
227
|
+
|
|
228
|
+
Args:
|
|
229
|
+
filter (Mapping[str, Any], optional): The filter to apply when searching for documents. Defaults to {}.
|
|
230
|
+
|
|
231
|
+
Returns:
|
|
232
|
+
Cursor[P]: A cursor object containing the matching documents.
|
|
233
|
+
"""
|
|
234
|
+
|
|
235
|
+
def inner_cursor() -> db.InnerCursor:
|
|
236
|
+
return self._find(ensure_mongo_query(query))
|
|
237
|
+
|
|
238
|
+
return FindCursor(self._adapter, inner_cursor)
|
|
239
|
+
|
|
240
|
+
def pipeline[R](self, result_type: Optional[type[R]] = None) -> db.Pipeline[R]:
|
|
241
|
+
"""
|
|
242
|
+
Perform an aggregation operation on the collection.
|
|
243
|
+
|
|
244
|
+
Usage:
|
|
245
|
+
```python
|
|
246
|
+
@User.collection.pipeline
|
|
247
|
+
def vip_users(pipe: db.Pipeline):
|
|
248
|
+
pipe.match({"vip": True})
|
|
249
|
+
|
|
250
|
+
async for user in vip_users():
|
|
251
|
+
print(user)
|
|
252
|
+
|
|
253
|
+
```
|
|
254
|
+
Args:
|
|
255
|
+
fn (Callable[[db.Pipeline.Builder, *ARGS], None]): A function that takes a Pipeline.Builder and additional arguments.
|
|
256
|
+
Returns:
|
|
257
|
+
Callable[[*ARGS], db.Pipeline[T]]: A function that returns a Pipeline object
|
|
258
|
+
|
|
259
|
+
"""
|
|
260
|
+
pl = db.Pipeline(self)
|
|
261
|
+
if result_type is not None:
|
|
262
|
+
pl = pl.result_type(result_type)
|
|
263
|
+
return pl
|
|
264
|
+
|
|
265
|
+
def decorator[*ARGS](
|
|
266
|
+
fn: Callable[[*ARGS], db.Pipeline[R]],
|
|
267
|
+
) -> Callable[[*ARGS], db.Pipeline[R]]:
|
|
268
|
+
"""
|
|
269
|
+
Decorator to create a Pipeline object with the provided function.
|
|
270
|
+
"""
|
|
271
|
+
if inspect.iscoroutinefunction(fn):
|
|
272
|
+
|
|
273
|
+
@wraps(fn)
|
|
274
|
+
async def wrapper(*args, **kwargs):
|
|
275
|
+
pipeline = db.Pipeline(self, result_type)
|
|
276
|
+
await fn(pipeline, *args, **kwargs)
|
|
277
|
+
return pipeline
|
|
278
|
+
|
|
279
|
+
else:
|
|
280
|
+
|
|
281
|
+
@wraps(fn)
|
|
282
|
+
def wrapper(*args, **kwargs) -> db.Pipeline[R]:
|
|
283
|
+
"""
|
|
284
|
+
Wrapper function to create a Pipeline object with the provided function.
|
|
285
|
+
"""
|
|
286
|
+
pipeline = db.Pipeline(self, result_type)
|
|
287
|
+
fn(pipeline, *args, **kwargs)
|
|
288
|
+
return pipeline
|
|
289
|
+
|
|
290
|
+
return wrapper
|
|
291
|
+
|
|
292
|
+
return decorator
|
stackraise/db/cursor.py
ADDED
|
@@ -0,0 +1,229 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from ast import Not
|
|
4
|
+
from contextlib import asynccontextmanager
|
|
5
|
+
from functools import reduce
|
|
6
|
+
from typing import AsyncIterable, Awaitable, Callable, Self
|
|
7
|
+
from pymongo.asynchronous.cursor import AsyncCursor as InnerCursor
|
|
8
|
+
from pymongo.asynchronous.command_cursor import AsyncCommandCursor as InnerCommandCursor
|
|
9
|
+
from stackraise.db.adapter import Adapter
|
|
10
|
+
from fastapi.responses import StreamingResponse
|
|
11
|
+
from .protocols import SortLike, ensure_mongo_sort
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class CursorIterator[T]:
|
|
17
|
+
__slots__ = ("_inner", "_adapter")
|
|
18
|
+
|
|
19
|
+
def __init__(
|
|
20
|
+
self,
|
|
21
|
+
adapter: Adapter[T],
|
|
22
|
+
inner: InnerCursor | InnerCommandCursor,
|
|
23
|
+
|
|
24
|
+
):
|
|
25
|
+
self._adapter = adapter
|
|
26
|
+
self._inner = inner
|
|
27
|
+
|
|
28
|
+
def __aiter__(self):
|
|
29
|
+
return self
|
|
30
|
+
|
|
31
|
+
async def __anext__(self) -> T:
|
|
32
|
+
item = await self._inner.next()
|
|
33
|
+
return self._adapter.parse_item(item)
|
|
34
|
+
|
|
35
|
+
async def close(self):
|
|
36
|
+
await self._inner.close()
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class CursorMixin[T]:
|
|
40
|
+
async def aiter(self) -> CursorIterator[T]:
|
|
41
|
+
"""Returns an async iterator over the cursor."""
|
|
42
|
+
raise NotImplementedError(
|
|
43
|
+
"This method should be implemented in subclasses to return an async context manager for the cursor."
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@asynccontextmanager
|
|
49
|
+
async def cursor(self):
|
|
50
|
+
iter = await self.aiter()
|
|
51
|
+
try:
|
|
52
|
+
yield iter
|
|
53
|
+
finally:
|
|
54
|
+
await iter.close()
|
|
55
|
+
|
|
56
|
+
async def first(self) -> T | None:
|
|
57
|
+
async with self.cursor() as cursor:
|
|
58
|
+
async for x in cursor:
|
|
59
|
+
return x
|
|
60
|
+
|
|
61
|
+
async def last(self) -> T | None:
|
|
62
|
+
async with self.cursor() as cursor:
|
|
63
|
+
x = None
|
|
64
|
+
async for x in cursor:
|
|
65
|
+
continue
|
|
66
|
+
return x
|
|
67
|
+
|
|
68
|
+
async def single(self) -> T:
|
|
69
|
+
async with self.cursor() as cursor:
|
|
70
|
+
try:
|
|
71
|
+
item = await cursor.__anext__()
|
|
72
|
+
except StopAsyncIteration:
|
|
73
|
+
raise ValueError("Expected a single item, but got none.")
|
|
74
|
+
try:
|
|
75
|
+
await cursor.__anext__()
|
|
76
|
+
except StopAsyncIteration:
|
|
77
|
+
return item
|
|
78
|
+
raise ValueError("Expected a single item, but got multiple items.")
|
|
79
|
+
|
|
80
|
+
async def all(self):
|
|
81
|
+
"""Returns all items in the cursor as a list."""
|
|
82
|
+
async with self.cursor() as cursor:
|
|
83
|
+
return [x async for x in cursor]
|
|
84
|
+
|
|
85
|
+
as_list = all
|
|
86
|
+
|
|
87
|
+
async def as_bytes(self):
|
|
88
|
+
async with self.cursor() as cursor:
|
|
89
|
+
items = [x async for x in cursor]
|
|
90
|
+
return cursor._adapter.list.dump_json(items)
|
|
91
|
+
|
|
92
|
+
async def as_str(self):
|
|
93
|
+
return (await self.as_bytes()).decode("utf-8")
|
|
94
|
+
|
|
95
|
+
def as_stream(
|
|
96
|
+
self, status_code=200, headers: dict[str, str] | None = None, **kwargs
|
|
97
|
+
) -> StreamingResponse:
|
|
98
|
+
"""Returns a streaming response that yields JSON items."""
|
|
99
|
+
|
|
100
|
+
async def generate():
|
|
101
|
+
async with self.cursor() as cursor:
|
|
102
|
+
yield b"["
|
|
103
|
+
first = True
|
|
104
|
+
async for item in cursor:
|
|
105
|
+
if not first:
|
|
106
|
+
yield b","
|
|
107
|
+
first = False
|
|
108
|
+
yield cursor._adapter.item.dump_json(item, **kwargs)
|
|
109
|
+
yield b"]"
|
|
110
|
+
|
|
111
|
+
return StreamingResponse(
|
|
112
|
+
content=generate(),
|
|
113
|
+
status_code=status_code,
|
|
114
|
+
media_type="application/json",
|
|
115
|
+
headers=headers,
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
# results
|
|
120
|
+
class FindCursor[T](CursorMixin[T]):
|
|
121
|
+
|
|
122
|
+
__slots__ = ("_inner",)
|
|
123
|
+
|
|
124
|
+
def __init__(
|
|
125
|
+
self,
|
|
126
|
+
adapter: Adapter[T],
|
|
127
|
+
inner: Callable[[], InnerCursor],
|
|
128
|
+
):
|
|
129
|
+
self._adapter = adapter
|
|
130
|
+
self._inner = inner
|
|
131
|
+
|
|
132
|
+
async def aiter(self):
|
|
133
|
+
return CursorIterator(self._adapter, self._inner())
|
|
134
|
+
|
|
135
|
+
def skip(self, count: int) -> Self:
|
|
136
|
+
inner = self._inner
|
|
137
|
+
def inner_wrap() -> InnerCursor:
|
|
138
|
+
return inner().skip(count)
|
|
139
|
+
return self.__class__(self._adapter, inner_wrap)
|
|
140
|
+
|
|
141
|
+
def limit(self, count: int) -> Self:
|
|
142
|
+
inner = self._inner
|
|
143
|
+
def inner_wrap() -> InnerCursor:
|
|
144
|
+
return inner().limit(count)
|
|
145
|
+
return self.__class__(self._adapter, inner_wrap)
|
|
146
|
+
|
|
147
|
+
def sort(self, sort: SortLike) -> Self:
|
|
148
|
+
inner = self._inner
|
|
149
|
+
def inner_wrap() -> InnerCursor:
|
|
150
|
+
return inner().sort(ensure_mongo_sort(sort))
|
|
151
|
+
return self.__class__(self._adapter, inner_wrap)
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
# class AggregationCursor[T](CursorResult[T]):
|
|
155
|
+
|
|
156
|
+
# __slots__ = ("_inner", "_adapter", "_started")
|
|
157
|
+
|
|
158
|
+
# def __init__(
|
|
159
|
+
# self,
|
|
160
|
+
# adapter: Adapter[T],
|
|
161
|
+
# inner: Callable[[], Awaitable[InnerCommandCursor]],
|
|
162
|
+
# ):
|
|
163
|
+
# super().__init__(adapter)
|
|
164
|
+
# self._inner = inner
|
|
165
|
+
|
|
166
|
+
# async def aiter(self) -> CursorIterator[T]:
|
|
167
|
+
# return CursorIterator(self._adapter, await self._inner())
|
|
168
|
+
|
|
169
|
+
# def result_type[U](self, type: type[U]) -> AggregationCursor[U]:
|
|
170
|
+
# return self.__class__(self._inner, Adapter(type))
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
# class CursorBase[T]:
|
|
177
|
+
# __slots__ = ("_adapter",)
|
|
178
|
+
# _adapter: Adapter[T]
|
|
179
|
+
|
|
180
|
+
# def __init__(self, adapter: Adapter[T]):
|
|
181
|
+
# self._adapter = adapter
|
|
182
|
+
|
|
183
|
+
# def __aiter__(self):
|
|
184
|
+
# return self
|
|
185
|
+
|
|
186
|
+
# def __await__(self):
|
|
187
|
+
# return self.as_list().__await__()
|
|
188
|
+
|
|
189
|
+
# async def first(self) -> T | None:
|
|
190
|
+
# async for x in self:
|
|
191
|
+
# return x
|
|
192
|
+
|
|
193
|
+
# async def last(self) -> T | None: # Single
|
|
194
|
+
# x = None
|
|
195
|
+
# async for x in self:
|
|
196
|
+
# continue
|
|
197
|
+
# return x
|
|
198
|
+
|
|
199
|
+
# async def as_list(self) -> list[T]:
|
|
200
|
+
# return [x async for x in self]
|
|
201
|
+
|
|
202
|
+
# async def as_bytes(self):
|
|
203
|
+
# return self._adapter.list.dump_json(await self.as_list())
|
|
204
|
+
|
|
205
|
+
# async def as_str(self):
|
|
206
|
+
# return (await self.as_bytes()).decode("utf-8")
|
|
207
|
+
|
|
208
|
+
# def as_stream(
|
|
209
|
+
# self, status_code=200, headers: dict[str, str] | None = None, **kwargs
|
|
210
|
+
# ) -> StreamingResponse:
|
|
211
|
+
# """Returns a streaming response that yields JSON items."""
|
|
212
|
+
|
|
213
|
+
# async def generate():
|
|
214
|
+
# yield b"["
|
|
215
|
+
# first = True
|
|
216
|
+
# async for item in self:
|
|
217
|
+
# if not first:
|
|
218
|
+
# yield b","
|
|
219
|
+
# first = False
|
|
220
|
+
# yield self._adapter.item.dump_json(item, **kwargs)
|
|
221
|
+
# yield b"]"
|
|
222
|
+
|
|
223
|
+
# return StreamingResponse(
|
|
224
|
+
# content=generate(),
|
|
225
|
+
# status_code=status_code,
|
|
226
|
+
# media_type="application/json",
|
|
227
|
+
# headers=headers,
|
|
228
|
+
# )
|
|
229
|
+
|