coredis 5.5.0__cp313-cp313-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- 22fe76227e35f92ab5c3__mypyc.cpython-313-darwin.so +0 -0
- coredis/__init__.py +42 -0
- coredis/_enum.py +42 -0
- coredis/_json.py +11 -0
- coredis/_packer.cpython-313-darwin.so +0 -0
- coredis/_packer.py +71 -0
- coredis/_protocols.py +50 -0
- coredis/_py_311_typing.py +20 -0
- coredis/_py_312_typing.py +17 -0
- coredis/_sidecar.py +114 -0
- coredis/_utils.cpython-313-darwin.so +0 -0
- coredis/_utils.py +440 -0
- coredis/_version.py +34 -0
- coredis/_version.pyi +1 -0
- coredis/cache.py +801 -0
- coredis/client/__init__.py +6 -0
- coredis/client/basic.py +1240 -0
- coredis/client/cluster.py +1265 -0
- coredis/commands/__init__.py +64 -0
- coredis/commands/_key_spec.py +517 -0
- coredis/commands/_utils.py +108 -0
- coredis/commands/_validators.py +159 -0
- coredis/commands/_wrappers.py +175 -0
- coredis/commands/bitfield.py +110 -0
- coredis/commands/constants.py +662 -0
- coredis/commands/core.py +8484 -0
- coredis/commands/function.py +408 -0
- coredis/commands/monitor.py +168 -0
- coredis/commands/pubsub.py +905 -0
- coredis/commands/request.py +108 -0
- coredis/commands/script.py +296 -0
- coredis/commands/sentinel.py +246 -0
- coredis/config.py +50 -0
- coredis/connection.py +906 -0
- coredis/constants.cpython-313-darwin.so +0 -0
- coredis/constants.py +37 -0
- coredis/credentials.py +45 -0
- coredis/exceptions.py +360 -0
- coredis/experimental/__init__.py +1 -0
- coredis/globals.py +23 -0
- coredis/modules/__init__.py +121 -0
- coredis/modules/autocomplete.py +138 -0
- coredis/modules/base.py +262 -0
- coredis/modules/filters.py +1319 -0
- coredis/modules/graph.py +362 -0
- coredis/modules/json.py +691 -0
- coredis/modules/response/__init__.py +0 -0
- coredis/modules/response/_callbacks/__init__.py +0 -0
- coredis/modules/response/_callbacks/autocomplete.py +42 -0
- coredis/modules/response/_callbacks/graph.py +237 -0
- coredis/modules/response/_callbacks/json.py +21 -0
- coredis/modules/response/_callbacks/search.py +221 -0
- coredis/modules/response/_callbacks/timeseries.py +158 -0
- coredis/modules/response/types.py +179 -0
- coredis/modules/search.py +1089 -0
- coredis/modules/timeseries.py +1139 -0
- coredis/parser.cpython-313-darwin.so +0 -0
- coredis/parser.py +344 -0
- coredis/pipeline.py +1225 -0
- coredis/pool/__init__.py +11 -0
- coredis/pool/basic.py +453 -0
- coredis/pool/cluster.py +517 -0
- coredis/pool/nodemanager.py +340 -0
- coredis/py.typed +0 -0
- coredis/recipes/__init__.py +0 -0
- coredis/recipes/credentials/__init__.py +5 -0
- coredis/recipes/credentials/iam_provider.py +63 -0
- coredis/recipes/locks/__init__.py +5 -0
- coredis/recipes/locks/extend.lua +17 -0
- coredis/recipes/locks/lua_lock.py +281 -0
- coredis/recipes/locks/release.lua +10 -0
- coredis/response/__init__.py +5 -0
- coredis/response/_callbacks/__init__.py +538 -0
- coredis/response/_callbacks/acl.py +32 -0
- coredis/response/_callbacks/cluster.py +183 -0
- coredis/response/_callbacks/command.py +86 -0
- coredis/response/_callbacks/connection.py +31 -0
- coredis/response/_callbacks/geo.py +58 -0
- coredis/response/_callbacks/hash.py +85 -0
- coredis/response/_callbacks/keys.py +59 -0
- coredis/response/_callbacks/module.py +33 -0
- coredis/response/_callbacks/script.py +85 -0
- coredis/response/_callbacks/sentinel.py +179 -0
- coredis/response/_callbacks/server.py +241 -0
- coredis/response/_callbacks/sets.py +44 -0
- coredis/response/_callbacks/sorted_set.py +204 -0
- coredis/response/_callbacks/streams.py +185 -0
- coredis/response/_callbacks/strings.py +70 -0
- coredis/response/_callbacks/vector_sets.py +159 -0
- coredis/response/_utils.py +33 -0
- coredis/response/types.py +416 -0
- coredis/retry.py +233 -0
- coredis/sentinel.py +477 -0
- coredis/stream.py +369 -0
- coredis/tokens.py +2286 -0
- coredis/typing.py +593 -0
- coredis-5.5.0.dist-info/METADATA +211 -0
- coredis-5.5.0.dist-info/RECORD +100 -0
- coredis-5.5.0.dist-info/WHEEL +6 -0
- coredis-5.5.0.dist-info/licenses/LICENSE +23 -0
coredis/stream.py
ADDED
|
@@ -0,0 +1,369 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from deprecated.sphinx import versionadded
|
|
6
|
+
|
|
7
|
+
from coredis._utils import EncodingInsensitiveDict, nativestr
|
|
8
|
+
from coredis.client import Client
|
|
9
|
+
from coredis.exceptions import (
|
|
10
|
+
ResponseError,
|
|
11
|
+
StreamConsumerInitializationError,
|
|
12
|
+
StreamDuplicateConsumerGroupError,
|
|
13
|
+
)
|
|
14
|
+
from coredis.response.types import StreamEntry
|
|
15
|
+
from coredis.tokens import PureToken
|
|
16
|
+
from coredis.typing import (
|
|
17
|
+
AnyStr,
|
|
18
|
+
ClassVar,
|
|
19
|
+
Generator,
|
|
20
|
+
Generic,
|
|
21
|
+
KeyT,
|
|
22
|
+
MutableMapping,
|
|
23
|
+
Parameters,
|
|
24
|
+
StringT,
|
|
25
|
+
TypedDict,
|
|
26
|
+
ValueT,
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class StreamParameters(TypedDict):
|
|
31
|
+
#: Starting ``identifier`` for the consumer. If not present it will start
|
|
32
|
+
#: from the latest entry
|
|
33
|
+
identifier: StringT
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class State(TypedDict, total=False):
|
|
37
|
+
identifier: StringT | None
|
|
38
|
+
pending: bool | None
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class Consumer(Generic[AnyStr]):
|
|
42
|
+
state: MutableMapping[KeyT, State]
|
|
43
|
+
DEFAULT_START_ID: ClassVar[bytes] = b"0-0"
|
|
44
|
+
|
|
45
|
+
def __init__(
|
|
46
|
+
self,
|
|
47
|
+
client: Client[AnyStr],
|
|
48
|
+
streams: Parameters[KeyT],
|
|
49
|
+
buffer_size: int = 0,
|
|
50
|
+
timeout: int | None = 0,
|
|
51
|
+
**stream_parameters: StreamParameters,
|
|
52
|
+
):
|
|
53
|
+
"""
|
|
54
|
+
Standalone stream consumer that starts reading from the latest entry
|
|
55
|
+
of each stream provided in :paramref:`streams`.
|
|
56
|
+
|
|
57
|
+
The latest entry is determined by calling :meth:`coredis.Redis.xinfo_stream`
|
|
58
|
+
and using the :data:`last-entry` attribute
|
|
59
|
+
at the point of initializing the consumer instance or on first fetch (whichever comes
|
|
60
|
+
first). If the stream(s) do not exist at the time of consumer creation, the
|
|
61
|
+
consumer will simply start from the minimum identifier (``0-0``)
|
|
62
|
+
|
|
63
|
+
:param client: The redis client to use
|
|
64
|
+
:param streams: the stream identifiers to consume from
|
|
65
|
+
:param buffer_size: Size of buffer (per stream) to maintain. This
|
|
66
|
+
translates to the maximum number of stream entries that are fetched
|
|
67
|
+
on each request to redis.
|
|
68
|
+
:param timeout: Maximum amount of time in milliseconds to block for new
|
|
69
|
+
entries to appear on the streams the consumer is reading from.
|
|
70
|
+
:param stream_parameters: Mapping of optional parameters to use
|
|
71
|
+
by stream for the streams provided in :paramref:`streams`.
|
|
72
|
+
"""
|
|
73
|
+
self.client: Client[AnyStr] = client
|
|
74
|
+
self.streams: set[KeyT] = set(streams)
|
|
75
|
+
self.state: MutableMapping[StringT, State] = EncodingInsensitiveDict(
|
|
76
|
+
{stream: stream_parameters.get(nativestr(stream), {}) for stream in streams}
|
|
77
|
+
)
|
|
78
|
+
self.buffer: MutableMapping[AnyStr, list[StreamEntry]] = EncodingInsensitiveDict({})
|
|
79
|
+
self.buffer_size = buffer_size
|
|
80
|
+
self.timeout = timeout
|
|
81
|
+
self._initialized = False
|
|
82
|
+
self._initialized_streams: dict[StringT, bool] = {}
|
|
83
|
+
|
|
84
|
+
def chunk_streams(self) -> list[dict[ValueT, StringT]]:
|
|
85
|
+
import coredis.client
|
|
86
|
+
|
|
87
|
+
if isinstance(self.client, coredis.client.RedisCluster):
|
|
88
|
+
return [
|
|
89
|
+
{stream: self.state[stream].get("identifier", None) or self.DEFAULT_START_ID}
|
|
90
|
+
for stream in self.streams
|
|
91
|
+
]
|
|
92
|
+
else:
|
|
93
|
+
return [
|
|
94
|
+
{
|
|
95
|
+
stream: self.state[stream].get("identifier", None) or self.DEFAULT_START_ID
|
|
96
|
+
for stream in self.streams
|
|
97
|
+
}
|
|
98
|
+
]
|
|
99
|
+
|
|
100
|
+
async def initialize(self, partial: bool = False) -> Consumer[AnyStr]:
|
|
101
|
+
if self._initialized and not partial:
|
|
102
|
+
return self
|
|
103
|
+
|
|
104
|
+
for stream in self.streams:
|
|
105
|
+
if partial and self._initialized_streams.get(stream):
|
|
106
|
+
continue
|
|
107
|
+
try:
|
|
108
|
+
info = await self.client.xinfo_stream(stream)
|
|
109
|
+
if info:
|
|
110
|
+
last_entry = info["last-entry"]
|
|
111
|
+
if last_entry:
|
|
112
|
+
self.state[stream].setdefault("identifier", last_entry.identifier)
|
|
113
|
+
except ResponseError:
|
|
114
|
+
pass
|
|
115
|
+
self._initialized_streams[stream] = True
|
|
116
|
+
self._initialized = True
|
|
117
|
+
return self
|
|
118
|
+
|
|
119
|
+
async def add_stream(self, stream: StringT, identifier: StringT | None = None) -> bool:
|
|
120
|
+
"""
|
|
121
|
+
Adds a new stream identifier to this consumer
|
|
122
|
+
|
|
123
|
+
:param stream: The stream identifier
|
|
124
|
+
:return: ``True`` if the stream was added successfully, ``False`` otherwise
|
|
125
|
+
"""
|
|
126
|
+
self.streams.add(stream)
|
|
127
|
+
self.state.setdefault(stream, {"identifier": identifier} if identifier else {})
|
|
128
|
+
await self.initialize(partial=True)
|
|
129
|
+
return stream in self._initialized_streams
|
|
130
|
+
|
|
131
|
+
def __await__(self) -> Generator[Any, None, Consumer[AnyStr]]:
|
|
132
|
+
return self.initialize().__await__()
|
|
133
|
+
|
|
134
|
+
def __aiter__(self) -> Consumer[AnyStr]:
|
|
135
|
+
"""
|
|
136
|
+
Returns the instance of the consumer itself which can be iterated over
|
|
137
|
+
"""
|
|
138
|
+
return self
|
|
139
|
+
|
|
140
|
+
async def __anext__(self) -> tuple[AnyStr, StreamEntry]:
|
|
141
|
+
"""
|
|
142
|
+
Returns the next available stream entry available from any of
|
|
143
|
+
:paramref:`Consumer.streams`.
|
|
144
|
+
|
|
145
|
+
:raises: :exc:`StopIteration` if no more entries are available
|
|
146
|
+
"""
|
|
147
|
+
stream, entry = await self.get_entry()
|
|
148
|
+
if not (stream and entry):
|
|
149
|
+
raise StopAsyncIteration()
|
|
150
|
+
return stream, entry
|
|
151
|
+
|
|
152
|
+
async def get_entry(self) -> tuple[AnyStr | None, StreamEntry | None]:
|
|
153
|
+
"""
|
|
154
|
+
Fetches the next available entry from the streams specified in
|
|
155
|
+
:paramref:`Consumer.streams`. If there were any entries
|
|
156
|
+
previously fetched and buffered, they will be returned before
|
|
157
|
+
making a new request to the server.
|
|
158
|
+
"""
|
|
159
|
+
await self.initialize()
|
|
160
|
+
cur = None
|
|
161
|
+
cur_stream = None
|
|
162
|
+
for stream, buffer_entries in list(self.buffer.items()):
|
|
163
|
+
if buffer_entries:
|
|
164
|
+
cur_stream, cur = stream, self.buffer[stream].pop(0)
|
|
165
|
+
break
|
|
166
|
+
else:
|
|
167
|
+
consumed_entries: dict[AnyStr, tuple[StreamEntry, ...]] = {}
|
|
168
|
+
for chunk in self.chunk_streams():
|
|
169
|
+
consumed_entries.update(
|
|
170
|
+
await self.client.xread(
|
|
171
|
+
chunk,
|
|
172
|
+
count=self.buffer_size + 1,
|
|
173
|
+
block=(self.timeout if (self.timeout and self.timeout > 0) else None),
|
|
174
|
+
)
|
|
175
|
+
or {}
|
|
176
|
+
)
|
|
177
|
+
for stream, entries in consumed_entries.items():
|
|
178
|
+
if entries:
|
|
179
|
+
if not cur:
|
|
180
|
+
cur = entries[0]
|
|
181
|
+
cur_stream = stream
|
|
182
|
+
if entries[1:]:
|
|
183
|
+
self.buffer.setdefault(stream, []).extend(entries[1:])
|
|
184
|
+
else:
|
|
185
|
+
self.buffer.setdefault(stream, []).extend(entries)
|
|
186
|
+
if cur and cur_stream:
|
|
187
|
+
self.state[cur_stream]["identifier"] = cur.identifier
|
|
188
|
+
return cur_stream, cur
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
class GroupConsumer(Consumer[AnyStr]):
|
|
192
|
+
DEFAULT_START_ID: ClassVar[bytes] = b">"
|
|
193
|
+
|
|
194
|
+
def __init__(
|
|
195
|
+
self,
|
|
196
|
+
client: Client[AnyStr],
|
|
197
|
+
streams: Parameters[KeyT],
|
|
198
|
+
group: StringT,
|
|
199
|
+
consumer: StringT,
|
|
200
|
+
buffer_size: int = 0,
|
|
201
|
+
auto_create: bool = True,
|
|
202
|
+
auto_acknowledge: bool = False,
|
|
203
|
+
start_from_backlog: bool = False,
|
|
204
|
+
timeout: int | None = None,
|
|
205
|
+
**stream_parameters: StreamParameters,
|
|
206
|
+
):
|
|
207
|
+
"""
|
|
208
|
+
A member of a stream consumer group. The consumer has an identical
|
|
209
|
+
interface as :class:`coredis.stream.Consumer`.
|
|
210
|
+
|
|
211
|
+
:param client: The redis client to use
|
|
212
|
+
:param streams: The stream identifiers to consume from
|
|
213
|
+
:param group: The name of the group this consumer is part of
|
|
214
|
+
:param consumer: The unique name (within :paramref:`group`) of the consumer
|
|
215
|
+
:param auto_create: If True the group will be created upon initialization
|
|
216
|
+
or first fetch if it doesn't already exist.
|
|
217
|
+
:param auto_acknowledge: If ``True`` the stream entries fetched will be fetched
|
|
218
|
+
without needing to be acknowledged with :meth:`coredis.Redis.xack` to remove
|
|
219
|
+
them from the pending entries list.
|
|
220
|
+
:param start_from_backlog: If ``True`` the consumer will start by fetching any pending
|
|
221
|
+
entries from the pending entry list before considering any new messages
|
|
222
|
+
not seen by any other consumer in the :paramref:`group`
|
|
223
|
+
:param buffer_size: Size of buffer (per stream) to maintain. This
|
|
224
|
+
translates to the maximum number of stream entries that are fetched
|
|
225
|
+
on each request to redis.
|
|
226
|
+
:param timeout: Maximum amount of time to block for new
|
|
227
|
+
entries to appear on the streams the consumer is reading from.
|
|
228
|
+
:param stream_parameters: Mapping of optional parameters to use
|
|
229
|
+
by stream for the streams provided in :paramref:`streams`.
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
.. warning:: Providing an ``identifier`` in ``stream_parameters`` has a different
|
|
233
|
+
meaning for a group consumer. If the value is any valid identifier other than ``>``
|
|
234
|
+
the consumer will only access the history of pending messages. That is, the set of
|
|
235
|
+
messages that were delivered to this consumer (identified by :paramref:`consumer`)
|
|
236
|
+
and never acknowledged.
|
|
237
|
+
"""
|
|
238
|
+
super().__init__(
|
|
239
|
+
client, # type: ignore[arg-type]
|
|
240
|
+
streams,
|
|
241
|
+
buffer_size,
|
|
242
|
+
timeout,
|
|
243
|
+
**stream_parameters,
|
|
244
|
+
)
|
|
245
|
+
self.group = group
|
|
246
|
+
self.consumer = consumer
|
|
247
|
+
self.auto_create = auto_create
|
|
248
|
+
self.auto_acknowledge = auto_acknowledge
|
|
249
|
+
self.start_from_backlog = start_from_backlog
|
|
250
|
+
|
|
251
|
+
async def initialize(self, partial: bool = False) -> GroupConsumer[AnyStr]:
|
|
252
|
+
if not self._initialized or partial:
|
|
253
|
+
group_presence: dict[KeyT, bool] = {
|
|
254
|
+
stream: stream in self._initialized_streams for stream in self.streams
|
|
255
|
+
}
|
|
256
|
+
for stream in self.streams:
|
|
257
|
+
try:
|
|
258
|
+
if self._initialized_streams.get(stream):
|
|
259
|
+
continue
|
|
260
|
+
group_presence[stream] = (
|
|
261
|
+
len(
|
|
262
|
+
[
|
|
263
|
+
info
|
|
264
|
+
for info in [
|
|
265
|
+
EncodingInsensitiveDict(d)
|
|
266
|
+
for d in await self.client.xinfo_groups(stream)
|
|
267
|
+
]
|
|
268
|
+
if nativestr(info["name"]) == self.group
|
|
269
|
+
]
|
|
270
|
+
)
|
|
271
|
+
== 1
|
|
272
|
+
)
|
|
273
|
+
if group_presence[stream] and self.start_from_backlog:
|
|
274
|
+
self.state[stream]["pending"] = True
|
|
275
|
+
self.state[stream]["identifier"] = "0-0"
|
|
276
|
+
except ResponseError:
|
|
277
|
+
self.state[stream].setdefault("identifier", ">")
|
|
278
|
+
|
|
279
|
+
if not (self.auto_create or all(group_presence.values())):
|
|
280
|
+
missing_streams = self.streams - {
|
|
281
|
+
k for k in group_presence if not group_presence[k]
|
|
282
|
+
}
|
|
283
|
+
raise StreamConsumerInitializationError(
|
|
284
|
+
f"Consumer group: {self.group!r} does not exist for streams: {missing_streams}"
|
|
285
|
+
)
|
|
286
|
+
for stream in self.streams:
|
|
287
|
+
if self.auto_create and not group_presence.get(stream):
|
|
288
|
+
try:
|
|
289
|
+
await self.client.xgroup_create(
|
|
290
|
+
stream, self.group, PureToken.NEW_ID, mkstream=True
|
|
291
|
+
)
|
|
292
|
+
except StreamDuplicateConsumerGroupError: # noqa
|
|
293
|
+
pass
|
|
294
|
+
self._initialized_streams[stream] = True
|
|
295
|
+
self.state[stream].setdefault("identifier", ">")
|
|
296
|
+
|
|
297
|
+
self._initialized = True
|
|
298
|
+
return self
|
|
299
|
+
|
|
300
|
+
@versionadded(version="4.12.0")
|
|
301
|
+
async def add_stream(self, stream: StringT, identifier: StringT | None = ">") -> bool:
|
|
302
|
+
"""
|
|
303
|
+
Adds a new stream identifier to this consumer
|
|
304
|
+
|
|
305
|
+
:param stream: The stream identifier
|
|
306
|
+
:param identifier: The identifier to start consuming from. For group
|
|
307
|
+
consumers this should almost always be ``>`` (the default).
|
|
308
|
+
|
|
309
|
+
:return: ``True`` if the stream was added successfully, ``False`` otherwise
|
|
310
|
+
"""
|
|
311
|
+
return await super().add_stream(stream, identifier)
|
|
312
|
+
|
|
313
|
+
def __await__(self) -> Generator[Any, None, GroupConsumer[AnyStr]]:
|
|
314
|
+
return self.initialize().__await__()
|
|
315
|
+
|
|
316
|
+
def __aiter__(self) -> GroupConsumer[AnyStr]:
|
|
317
|
+
"""
|
|
318
|
+
Returns the instance of the consumer itself which can be iterated over
|
|
319
|
+
"""
|
|
320
|
+
return self
|
|
321
|
+
|
|
322
|
+
async def get_entry(self) -> tuple[AnyStr | None, StreamEntry | None]:
|
|
323
|
+
"""
|
|
324
|
+
Fetches the next available entry from the streams specified in
|
|
325
|
+
:paramref:`GroupConsumer.streams`. If there were any entries
|
|
326
|
+
previously fetched and buffered, they will be returned before
|
|
327
|
+
making a new request to the server.
|
|
328
|
+
"""
|
|
329
|
+
await self.initialize()
|
|
330
|
+
|
|
331
|
+
cur = None
|
|
332
|
+
cur_stream = None
|
|
333
|
+
for stream, buffer_entries in list(self.buffer.items()):
|
|
334
|
+
if buffer_entries:
|
|
335
|
+
cur_stream, cur = stream, self.buffer[stream].pop(0)
|
|
336
|
+
break
|
|
337
|
+
else:
|
|
338
|
+
consumed_entries: dict[AnyStr, tuple[StreamEntry, ...]] = {}
|
|
339
|
+
for chunk in self.chunk_streams():
|
|
340
|
+
consumed_entries.update(
|
|
341
|
+
await self.client.xreadgroup(
|
|
342
|
+
self.group,
|
|
343
|
+
self.consumer,
|
|
344
|
+
count=self.buffer_size + 1,
|
|
345
|
+
block=(self.timeout if (self.timeout and self.timeout > 0) else None),
|
|
346
|
+
noack=self.auto_acknowledge,
|
|
347
|
+
streams=chunk,
|
|
348
|
+
)
|
|
349
|
+
or {}
|
|
350
|
+
)
|
|
351
|
+
for stream, entries in consumed_entries.items():
|
|
352
|
+
if entries:
|
|
353
|
+
if not cur:
|
|
354
|
+
cur = entries[0]
|
|
355
|
+
cur_stream = stream
|
|
356
|
+
if entries[1:]:
|
|
357
|
+
self.buffer.setdefault(stream, []).extend(entries[1:])
|
|
358
|
+
|
|
359
|
+
else:
|
|
360
|
+
self.buffer.setdefault(stream, []).extend(entries)
|
|
361
|
+
if self.state[stream].get("pending"):
|
|
362
|
+
self.state[stream]["identifier"] = entries[-1].identifier
|
|
363
|
+
else:
|
|
364
|
+
if self.state[stream].get("pending"):
|
|
365
|
+
self.state[stream].pop("identifier", None)
|
|
366
|
+
self.state[stream].pop("pending", None)
|
|
367
|
+
if not cur:
|
|
368
|
+
return await self.get_entry()
|
|
369
|
+
return cur_stream, cur
|