taskiq-redis 1.0.8__tar.gz → 1.0.9__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: taskiq-redis
3
- Version: 1.0.8
3
+ Version: 1.0.9
4
4
  Summary: Redis integration for taskiq
5
5
  Keywords: taskiq,tasks,distributed,async,redis,result_backend
6
6
  Author: taskiq-team
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "taskiq-redis"
3
- version = "1.0.8"
3
+ version = "1.0.9"
4
4
  description = "Redis integration for taskiq"
5
5
  authors = ["taskiq-team <taskiq@norely.com>"]
6
6
  readme = "README.md"
@@ -166,6 +166,7 @@ class RedisStreamBroker(BaseRedisBroker):
166
166
  mkstream: bool = True,
167
167
  xread_block: int = 2000,
168
168
  maxlen: Optional[int] = None,
169
+ approximate: bool = True,
169
170
  idle_timeout: int = 600000, # 10 minutes
170
171
  unacknowledged_batch_size: int = 100,
171
172
  xread_count: Optional[int] = 100,
@@ -190,6 +191,8 @@ class RedisStreamBroker(BaseRedisBroker):
190
191
  Better to set it to a bigger value, to avoid unnecessary calls.
191
192
  :param maxlen: sets the maximum length of the stream
192
193
  trims (the old values of) the stream each time a new element is added
194
+ :param approximate: decides wether to trim the stream immediately (False) or
195
+ later on (True)
193
196
  :param xread_count: number of messages to fetch from the stream at once.
194
197
  :param additional_streams: additional streams to read from.
195
198
  Each key is a stream name, value is a consumer id.
@@ -210,6 +213,7 @@ class RedisStreamBroker(BaseRedisBroker):
210
213
  self.mkstream = mkstream
211
214
  self.block = xread_block
212
215
  self.maxlen = maxlen
216
+ self.approximate = approximate
213
217
  self.additional_streams = additional_streams or {}
214
218
  self.idle_timeout = idle_timeout
215
219
  self.unacknowledged_batch_size = unacknowledged_batch_size
@@ -252,6 +256,7 @@ class RedisStreamBroker(BaseRedisBroker):
252
256
  self.queue_name,
253
257
  {b"data": message.message},
254
258
  maxlen=self.maxlen,
259
+ approximate=self.approximate,
255
260
  )
256
261
 
257
262
  def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]:
@@ -92,6 +92,8 @@ class RedisStreamClusterBroker(BaseRedisClusterBroker):
92
92
  consumer_id: str = "$",
93
93
  mkstream: bool = True,
94
94
  xread_block: int = 10000,
95
+ maxlen: Optional[int] = None,
96
+ approximate: bool = True,
95
97
  additional_streams: Optional[Dict[str, str]] = None,
96
98
  **connection_kwargs: Any,
97
99
  ) -> None:
@@ -111,6 +113,10 @@ class RedisStreamClusterBroker(BaseRedisClusterBroker):
111
113
  :param mkstream: create stream if it does not exist.
112
114
  :param xread_block: block time in ms for xreadgroup.
113
115
  Better to set it to a bigger value, to avoid unnecessary calls.
116
+ :param maxlen: sets the maximum length of the stream
117
+ trims (the old values of) the stream each time a new element is added
118
+ :param approximate: decides wether to trim the stream immediately (False) or
119
+ later on (True)
114
120
  :param additional_streams: additional streams to read from.
115
121
  Each key is a stream name, value is a consumer id.
116
122
  """
@@ -125,6 +131,8 @@ class RedisStreamClusterBroker(BaseRedisClusterBroker):
125
131
  self.consumer_id = consumer_id
126
132
  self.mkstream = mkstream
127
133
  self.block = xread_block
134
+ self.maxlen = maxlen
135
+ self.approximate = approximate
128
136
  self.additional_streams = additional_streams or {}
129
137
 
130
138
  async def _declare_consumer_group(self) -> None:
@@ -154,7 +162,12 @@ class RedisStreamClusterBroker(BaseRedisClusterBroker):
154
162
 
155
163
  :param message: message to append.
156
164
  """
157
- await self.redis.xadd(self.queue_name, {b"data": message.message})
165
+ await self.redis.xadd(
166
+ self.queue_name,
167
+ {b"data": message.message},
168
+ maxlen=self.maxlen,
169
+ approximate=self.approximate,
170
+ )
158
171
 
159
172
  def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]:
160
173
  async def _ack() -> None:
@@ -157,6 +157,8 @@ class RedisStreamSentinelBroker(BaseSentinelBroker):
157
157
  consumer_id: str = "$",
158
158
  mkstream: bool = True,
159
159
  xread_block: int = 10000,
160
+ maxlen: Optional[int] = None,
161
+ approximate: bool = True,
160
162
  additional_streams: Optional[Dict[str, str]] = None,
161
163
  **connection_kwargs: Any,
162
164
  ) -> None:
@@ -176,6 +178,10 @@ class RedisStreamSentinelBroker(BaseSentinelBroker):
176
178
  :param mkstream: create stream if it does not exist.
177
179
  :param xread_block: block time in ms for xreadgroup.
178
180
  Better to set it to a bigger value, to avoid unnecessary calls.
181
+ :param maxlen: sets the maximum length of the stream
182
+ trims (the old values of) the stream each time a new element is added
183
+ :param approximate: decides wether to trim the stream immediately (False) or
184
+ later on (True)
179
185
  :param additional_streams: additional streams to read from.
180
186
  Each key is a stream name, value is a consumer id.
181
187
  """
@@ -193,6 +199,8 @@ class RedisStreamSentinelBroker(BaseSentinelBroker):
193
199
  self.consumer_id = consumer_id
194
200
  self.mkstream = mkstream
195
201
  self.block = xread_block
202
+ self.maxlen = maxlen
203
+ self.approximate = approximate
196
204
  self.additional_streams = additional_streams or {}
197
205
 
198
206
  async def _declare_consumer_group(self) -> None:
@@ -223,7 +231,12 @@ class RedisStreamSentinelBroker(BaseSentinelBroker):
223
231
  :param message: message to append.
224
232
  """
225
233
  async with self._acquire_master_conn() as redis_conn:
226
- await redis_conn.xadd(self.queue_name, {b"data": message.message})
234
+ await redis_conn.xadd(
235
+ self.queue_name,
236
+ {b"data": message.message},
237
+ maxlen=self.maxlen,
238
+ approximate=self.approximate,
239
+ )
227
240
 
228
241
  def _ack_generator(self, id: str) -> Callable[[], Awaitable[None]]:
229
242
  async def _ack() -> None:
File without changes
File without changes