rapyer 1.1.3__tar.gz → 1.1.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {rapyer-1.1.3 → rapyer-1.1.4}/PKG-INFO +1 -1
  2. {rapyer-1.1.3 → rapyer-1.1.4}/pyproject.toml +1 -1
  3. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/__init__.py +9 -1
  4. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/base.py +115 -53
  5. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/config.py +2 -0
  6. rapyer-1.1.4/rapyer/errors/__init__.py +8 -0
  7. rapyer-1.1.4/rapyer/errors/base.py +26 -0
  8. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/fields/expression.py +16 -2
  9. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/fields/index.py +5 -1
  10. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/fields/key.py +5 -2
  11. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/init.py +5 -2
  12. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/types/base.py +18 -11
  13. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/types/byte.py +5 -2
  14. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/types/convert.py +1 -25
  15. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/types/datetime.py +4 -3
  16. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/types/dct.py +32 -11
  17. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/types/float.py +7 -2
  18. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/types/integer.py +7 -2
  19. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/types/lst.py +26 -15
  20. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/types/string.py +1 -1
  21. rapyer-1.1.4/rapyer/typing_support.py +7 -0
  22. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/utils/annotation.py +4 -7
  23. rapyer-1.1.4/rapyer/utils/redis.py +22 -0
  24. rapyer-1.1.3/rapyer/errors/base.py +0 -10
  25. rapyer-1.1.3/rapyer/typing_support.py +0 -13
  26. rapyer-1.1.3/rapyer/utils/__init__.py +0 -0
  27. rapyer-1.1.3/rapyer/utils/redis.py +0 -25
  28. {rapyer-1.1.3 → rapyer-1.1.4}/README.md +0 -0
  29. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/context.py +0 -0
  30. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/fields/__init__.py +0 -0
  31. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/links.py +0 -0
  32. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/types/__init__.py +0 -0
  33. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/types/init.py +0 -0
  34. {rapyer-1.1.3/rapyer/errors → rapyer-1.1.4/rapyer/utils}/__init__.py +0 -0
  35. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/utils/fields.py +0 -0
  36. {rapyer-1.1.3 → rapyer-1.1.4}/rapyer/utils/pythonic.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: rapyer
3
- Version: 1.1.3
3
+ Version: 1.1.4
4
4
  Summary: Pydantic models with Redis as the backend
5
5
  License: MIT
6
6
  Keywords: redis,redis-json,pydantic,pydantic-v2,orm,database,async,nosql,cache,key-value,data-modeling,python,backend,storage,serialization,validation
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
4
4
 
5
5
  [project]
6
6
  name = "rapyer"
7
- version = "1.1.3"
7
+ version = "1.1.4"
8
8
  description = "Pydantic models with Redis as the backend"
9
9
  authors = [{name = "YedidyaHKfir", email = "yedidyakfir@gmail.com"}]
10
10
  readme = "README.md"
@@ -1,6 +1,13 @@
1
1
  """Redis Pydantic - Pydantic models with Redis as the backend."""
2
2
 
3
- from rapyer.base import AtomicRedisModel, aget, find_redis_models, ainsert
3
+ from rapyer.base import (
4
+ AtomicRedisModel,
5
+ aget,
6
+ find_redis_models,
7
+ ainsert,
8
+ get,
9
+ alock_from_key,
10
+ )
4
11
  from rapyer.init import init_rapyer, teardown_rapyer
5
12
 
6
13
  __all__ = [
@@ -11,4 +18,5 @@ __all__ = [
11
18
  "get",
12
19
  "find_redis_models",
13
20
  "ainsert",
21
+ "alock_from_key",
14
22
  ]
@@ -4,7 +4,8 @@ import contextlib
4
4
  import functools
5
5
  import pickle
6
6
  import uuid
7
- from typing import ClassVar, Any, AsyncGenerator
7
+ from contextlib import AbstractAsyncContextManager
8
+ from typing import ClassVar, Any, get_origin
8
9
 
9
10
  from pydantic import (
10
11
  BaseModel,
@@ -15,17 +16,20 @@ from pydantic import (
15
16
  field_validator,
16
17
  )
17
18
  from pydantic_core.core_schema import FieldSerializationInfo, ValidationInfo
19
+ from redis.commands.search.index_definition import IndexDefinition, IndexType
20
+ from redis.commands.search.query import Query
21
+ from typing_extensions import deprecated
22
+
18
23
  from rapyer.config import RedisConfig
19
24
  from rapyer.context import _context_var, _context_xx_pipe
20
- from rapyer.errors.base import KeyNotFound
21
- from rapyer.fields.expression import ExpressionField
25
+ from rapyer.errors.base import KeyNotFound, UnsupportedIndexedFieldError
26
+ from rapyer.fields.expression import ExpressionField, AtomicField, Expression
22
27
  from rapyer.fields.index import IndexAnnotation
23
28
  from rapyer.fields.key import KeyAnnotation
24
29
  from rapyer.links import REDIS_SUPPORTED_LINK
25
30
  from rapyer.types.base import RedisType, REDIS_DUMP_FLAG_NAME
26
31
  from rapyer.types.convert import RedisConverter
27
32
  from rapyer.typing_support import Self, Unpack
28
- from rapyer.typing_support import deprecated
29
33
  from rapyer.utils.annotation import (
30
34
  replace_to_redis_types_in_annotation,
31
35
  has_annotation,
@@ -33,9 +37,12 @@ from rapyer.utils.annotation import (
33
37
  DYNAMIC_CLASS_DOC,
34
38
  )
35
39
  from rapyer.utils.fields import get_all_pydantic_annotation, is_redis_field
36
- from rapyer.utils.redis import acquire_lock, update_keys_in_pipeline
37
- from redis.commands.search.index_definition import IndexDefinition, IndexType
38
- from redis.commands.search.query import Query
40
+ from rapyer.utils.pythonic import safe_issubclass
41
+ from rapyer.utils.redis import (
42
+ acquire_lock,
43
+ update_keys_in_pipeline,
44
+ refresh_ttl_if_needed,
45
+ )
39
46
 
40
47
 
41
48
  def make_pickle_field_serializer(field: str):
@@ -106,34 +113,33 @@ class AtomicRedisModel(BaseModel):
106
113
  return f"${field_path}" if field_path else "$"
107
114
 
108
115
  @classmethod
109
- def redis_schema(cls):
116
+ def redis_schema(cls, redis_name: str = ""):
110
117
  fields = []
111
118
 
112
119
  for field_name, field_info in cls.model_fields.items():
113
120
  real_type = field_info.annotation
114
- if not is_redis_field(field_name, real_type):
115
- continue
116
-
117
- if not field_with_flag(field_info, IndexAnnotation):
118
- continue
119
-
120
121
  # Check if real_type is a class before using issubclass
121
- if isinstance(real_type, type):
122
- if issubclass(real_type, AtomicRedisModel):
123
- sub_fields = real_type.redis_schema()
124
- for sub_field in sub_fields:
125
- sub_field.name = f"{field_name}.{sub_field.name}"
126
- fields.append(sub_field)
127
- elif issubclass(real_type, RedisType):
128
- field_schema = real_type.redis_schema(field_name)
129
- fields.append(field_schema)
130
- else:
131
- raise RuntimeError(
132
- f"Indexed field {field_name} must be redis-supported to be indexed, see {REDIS_SUPPORTED_LINK}"
122
+ if get_origin(real_type) is not None or not isinstance(real_type, type):
123
+ if field_with_flag(field_info, IndexAnnotation):
124
+ raise UnsupportedIndexedFieldError(
125
+ f"Field {field_name} is type {real_type}, and not supported for indexing"
133
126
  )
127
+ else:
128
+ continue
129
+
130
+ full_redis_name = f"{redis_name}.{field_name}" if redis_name else field_name
131
+ if issubclass(real_type, AtomicRedisModel):
132
+ real_type: type[AtomicRedisModel]
133
+ sub_fields = real_type.redis_schema(full_redis_name)
134
+ fields.extend(sub_fields)
135
+ elif not field_with_flag(field_info, IndexAnnotation):
136
+ continue
137
+ elif issubclass(real_type, RedisType):
138
+ field_schema = real_type.redis_schema(full_redis_name)
139
+ fields.append(field_schema)
134
140
  else:
135
- raise RuntimeError(
136
- f"Indexed field {field_name} must be a simple redis-supported type, see {REDIS_SUPPORTED_LINK}"
141
+ raise UnsupportedIndexedFieldError(
142
+ f"Indexed field {field_name} must be redis-supported to be indexed, see {REDIS_SUPPORTED_LINK}"
137
143
  )
138
144
 
139
145
  return fields
@@ -217,14 +223,27 @@ class AtomicRedisModel(BaseModel):
217
223
 
218
224
  # Update the redis model list for initialization
219
225
  # Skip dynamically created classes from type conversion
220
- if cls.__doc__ != DYNAMIC_CLASS_DOC:
226
+ if cls.__doc__ != DYNAMIC_CLASS_DOC and cls.Meta.init_with_rapyer:
221
227
  REDIS_MODELS.append(cls)
222
228
 
223
229
  @classmethod
224
- def init_class(cls):
230
+ def create_expressions(cls, base_path: str = "") -> dict[str, Expression]:
231
+ expressions = {}
225
232
  for field_name, field_info in cls.model_fields.items():
233
+ full_field_name = rf"{base_path}\.{field_name}" if base_path else field_name
226
234
  field_type = field_info.annotation
227
- setattr(cls, field_name, ExpressionField(field_name, field_type))
235
+ if safe_issubclass(field_type, AtomicRedisModel):
236
+ expressions[field_name] = AtomicField(
237
+ field_name, **field_type.create_expressions(full_field_name)
238
+ )
239
+ else:
240
+ expressions[field_name] = ExpressionField(full_field_name, field_type)
241
+ return expressions
242
+
243
+ @classmethod
244
+ def init_class(cls):
245
+ for field_name, field_expression in cls.create_expressions().items():
246
+ setattr(cls, field_name, field_expression)
228
247
 
229
248
  def is_inner_model(self) -> bool:
230
249
  return bool(self.field_name)
@@ -233,13 +252,14 @@ class AtomicRedisModel(BaseModel):
233
252
  f"save function is deprecated and will become sync function in rapyer 1.2.0, use asave() instead"
234
253
  )
235
254
  async def save(self):
236
- return await self.asave()
255
+ return await self.asave() # pragma: no cover
237
256
 
238
257
  async def asave(self) -> Self:
239
258
  model_dump = self.redis_dump()
240
259
  await self.Meta.redis.json().set(self.key, self.json_path, model_dump)
241
260
  if self.Meta.ttl is not None:
242
- await self.Meta.redis.expire(self.key, self.Meta.ttl)
261
+ nx = not self.Meta.refresh_ttl
262
+ await self.Meta.redis.expire(self.key, self.Meta.ttl, nx=nx)
243
263
  return self
244
264
 
245
265
  def redis_dump(self):
@@ -252,7 +272,7 @@ class AtomicRedisModel(BaseModel):
252
272
  "duplicate function is deprecated and will be removed in rapyer 1.2.0, use aduplicate instead"
253
273
  )
254
274
  async def duplicate(self) -> Self:
255
- return await self.aduplicate()
275
+ return await self.aduplicate() # pragma: no cover
256
276
 
257
277
  async def aduplicate(self) -> Self:
258
278
  if self.is_inner_model():
@@ -266,7 +286,7 @@ class AtomicRedisModel(BaseModel):
266
286
  "duplicate_many function is deprecated and will be removed in rapyer 1.2.0, use aduplicate_many instead"
267
287
  )
268
288
  async def duplicate_many(self, num: int) -> list[Self]:
269
- return await self.aduplicate_many(num)
289
+ return await self.aduplicate_many(num) # pragma: no cover
270
290
 
271
291
  async def aduplicate_many(self, num: int) -> list[Self]:
272
292
  if self.is_inner_model():
@@ -297,13 +317,16 @@ class AtomicRedisModel(BaseModel):
297
317
  async with self.Meta.redis.pipeline() as pipe:
298
318
  update_keys_in_pipeline(pipe, self.key, **json_path_kwargs)
299
319
  await pipe.execute()
320
+ await refresh_ttl_if_needed(
321
+ self.Meta.redis, self.key, self.Meta.ttl, self.Meta.refresh_ttl
322
+ )
300
323
 
301
324
  @classmethod
302
325
  @deprecated(
303
326
  "get() classmethod is deprecated and will be removed in rapyer 1.2.0, use aget instead"
304
327
  )
305
328
  async def get(cls, key: str) -> Self:
306
- return await cls.aget(key)
329
+ return await cls.aget(key) # pragma: no cover
307
330
 
308
331
  @classmethod
309
332
  async def aget(cls, key: str) -> Self:
@@ -316,13 +339,16 @@ class AtomicRedisModel(BaseModel):
316
339
 
317
340
  instance = cls.model_validate(model_dump, context={REDIS_DUMP_FLAG_NAME: True})
318
341
  instance.key = key
342
+ await refresh_ttl_if_needed(
343
+ cls.Meta.redis, key, cls.Meta.ttl, cls.Meta.refresh_ttl
344
+ )
319
345
  return instance
320
346
 
321
347
  @deprecated(
322
348
  "load function is deprecated and will be removed in rapyer 1.2.0, use aload() instead"
323
349
  )
324
350
  async def load(self):
325
- return await self.aload()
351
+ return await self.aload() # pragma: no cover
326
352
 
327
353
  async def aload(self) -> Self:
328
354
  model_dump = await self.Meta.redis.json().get(self.key, self.json_path)
@@ -332,6 +358,9 @@ class AtomicRedisModel(BaseModel):
332
358
  instance = self.__class__(**model_dump)
333
359
  instance._pk = self._pk
334
360
  instance._base_model_link = self._base_model_link
361
+ await refresh_ttl_if_needed(
362
+ self.Meta.redis, self.key, self.Meta.ttl, self.Meta.refresh_ttl
363
+ )
335
364
  return instance
336
365
 
337
366
  @classmethod
@@ -365,6 +394,12 @@ class AtomicRedisModel(BaseModel):
365
394
  # Fetch the actual documents
366
395
  models = await cls.Meta.redis.json().mget(keys=keys, path="$")
367
396
 
397
+ if cls.Meta.ttl is not None and cls.Meta.refresh_ttl:
398
+ async with cls.Meta.redis.pipeline() as pipe:
399
+ for key in keys:
400
+ pipe.expire(key, cls.Meta.ttl)
401
+ await pipe.execute()
402
+
368
403
  instances = []
369
404
  for model, key in zip(models, keys):
370
405
  model = cls.model_validate(model[0], context={REDIS_DUMP_FLAG_NAME: True})
@@ -381,6 +416,8 @@ class AtomicRedisModel(BaseModel):
381
416
  async with cls.Meta.redis.pipeline() as pipe:
382
417
  for model in models:
383
418
  pipe.json().set(model.key, model.json_path, model.redis_dump())
419
+ if cls.Meta.ttl is not None:
420
+ pipe.expire(model.key, cls.Meta.ttl)
384
421
  await pipe.execute()
385
422
 
386
423
  @classmethod
@@ -388,7 +425,7 @@ class AtomicRedisModel(BaseModel):
388
425
  "function delete is deprecated and will be removed in rapyer 1.2.0, use adelete instead"
389
426
  )
390
427
  async def delete_by_key(cls, key: str) -> bool:
391
- return await cls.adelete_by_key(key)
428
+ return await cls.adelete_by_key(key) # pragma: no cover
392
429
 
393
430
  @classmethod
394
431
  async def adelete_by_key(cls, key: str) -> bool:
@@ -399,7 +436,7 @@ class AtomicRedisModel(BaseModel):
399
436
  "function delete is deprecated and will be removed in rapyer 1.2.0, use adelete instead"
400
437
  )
401
438
  async def delete(self):
402
- return await self.adelete()
439
+ return await self.adelete() # pragma: no cover
403
440
 
404
441
  async def adelete(self):
405
442
  if self.is_inner_model():
@@ -419,15 +456,17 @@ class AtomicRedisModel(BaseModel):
419
456
  )
420
457
  async def lock_from_key(
421
458
  cls, key: str, action: str = "default", save_at_end: bool = False
422
- ) -> AsyncGenerator[Self, None]:
423
- async with cls.alock_from_key(key, action, save_at_end) as redis_model:
424
- yield redis_model
459
+ ) -> AbstractAsyncContextManager[Self]:
460
+ async with cls.alock_from_key( # pragma: no cover
461
+ key, action, save_at_end # pragma: no cover
462
+ ) as redis_model: # pragma: no cover
463
+ yield redis_model # pragma: no cover
425
464
 
426
465
  @classmethod
427
466
  @contextlib.asynccontextmanager
428
467
  async def alock_from_key(
429
468
  cls, key: str, action: str = "default", save_at_end: bool = False
430
- ) -> AsyncGenerator[Self, None]:
469
+ ) -> AbstractAsyncContextManager[Self]:
431
470
  async with acquire_lock(cls.Meta.redis, f"{key}/{action}"):
432
471
  redis_model = await cls.aget(key)
433
472
  yield redis_model
@@ -440,14 +479,16 @@ class AtomicRedisModel(BaseModel):
440
479
  )
441
480
  async def lock(
442
481
  self, action: str = "default", save_at_end: bool = False
443
- ) -> AsyncGenerator[Self, None]:
444
- async with self.alock_from_key(self.key, action, save_at_end) as redis_model:
445
- yield redis_model
482
+ ) -> AbstractAsyncContextManager[Self]:
483
+ async with self.alock_from_key( # pragma: no cover
484
+ self.key, action, save_at_end # pragma: no cover
485
+ ) as redis_model: # pragma: no cover
486
+ yield redis_model # pragma: no cover
446
487
 
447
488
  @contextlib.asynccontextmanager
448
489
  async def alock(
449
490
  self, action: str = "default", save_at_end: bool = False
450
- ) -> AsyncGenerator[Self, None]:
491
+ ) -> AbstractAsyncContextManager[Self]:
451
492
  async with self.alock_from_key(self.key, action, save_at_end) as redis_model:
452
493
  unset_fields = {
453
494
  k: redis_model.__dict__[k] for k in redis_model.model_fields_set
@@ -461,14 +502,16 @@ class AtomicRedisModel(BaseModel):
461
502
  )
462
503
  async def pipeline(
463
504
  self, ignore_if_deleted: bool = False
464
- ) -> AsyncGenerator[Self, None]:
465
- async with self.apipeline(ignore_if_deleted=ignore_if_deleted) as redis_model:
466
- yield redis_model
505
+ ) -> AbstractAsyncContextManager[Self]:
506
+ async with self.apipeline( # pragma: no cover
507
+ ignore_if_deleted=ignore_if_deleted # pragma: no cover
508
+ ) as redis_model: # pragma: no cover
509
+ yield redis_model # pragma: no cover
467
510
 
468
511
  @contextlib.asynccontextmanager
469
512
  async def apipeline(
470
513
  self, ignore_if_deleted: bool = False
471
- ) -> AsyncGenerator[Self, None]:
514
+ ) -> AbstractAsyncContextManager[Self]:
472
515
  async with self.Meta.redis.pipeline() as pipe:
473
516
  try:
474
517
  redis_model = await self.__class__.aget(self.key)
@@ -476,7 +519,7 @@ class AtomicRedisModel(BaseModel):
476
519
  k: redis_model.__dict__[k] for k in redis_model.model_fields_set
477
520
  }
478
521
  self.__dict__.update(unset_fields)
479
- except (TypeError, IndexError):
522
+ except (TypeError, KeyNotFound):
480
523
  if ignore_if_deleted:
481
524
  redis_model = self
482
525
  else:
@@ -485,6 +528,9 @@ class AtomicRedisModel(BaseModel):
485
528
  _context_xx_pipe.set(ignore_if_deleted)
486
529
  yield redis_model
487
530
  await pipe.execute()
531
+ await refresh_ttl_if_needed(
532
+ self.Meta.redis, self.key, self.Meta.ttl, self.Meta.refresh_ttl
533
+ )
488
534
  _context_var.set(None)
489
535
  _context_xx_pipe.set(False)
490
536
 
@@ -530,13 +576,15 @@ REDIS_MODELS: list[type[AtomicRedisModel]] = []
530
576
  "get function is deprecated and will be removed in rapyer 1.2.0, use aget instead"
531
577
  )
532
578
  async def get(redis_key: str) -> AtomicRedisModel:
533
- return await aget(redis_key)
579
+ return await aget(redis_key) # pragma: no cover
534
580
 
535
581
 
536
582
  async def aget(redis_key: str) -> AtomicRedisModel:
537
583
  redis_model_mapping = {klass.__name__: klass for klass in REDIS_MODELS}
538
584
  class_name = redis_key.split(":")[0]
539
585
  klass = redis_model_mapping.get(class_name)
586
+ if klass is None:
587
+ raise KeyNotFound(f"{redis_key} is missing in redis")
540
588
  return await klass.aget(redis_key)
541
589
 
542
590
 
@@ -550,3 +598,17 @@ async def ainsert(*models: Unpack[AtomicRedisModel]) -> list[AtomicRedisModel]:
550
598
  pipe.json().set(model.key, model.json_path, model.redis_dump())
551
599
  await pipe.execute()
552
600
  return models
601
+
602
+
603
+ @contextlib.asynccontextmanager
604
+ async def alock_from_key(
605
+ key: str, action: str = "default", save_at_end: bool = False
606
+ ) -> AbstractAsyncContextManager[AtomicRedisModel | None]:
607
+ async with acquire_lock(AtomicRedisModel.Meta.redis, f"{key}/{action}"):
608
+ try:
609
+ redis_model = await aget(key)
610
+ except KeyNotFound:
611
+ redis_model = None
612
+ yield redis_model
613
+ if save_at_end and redis_model is not None:
614
+ await redis_model.asave()
@@ -21,3 +21,5 @@ class RedisConfig:
21
21
  )
22
22
  redis_type: dict[type, type] = dataclasses.field(default_factory=create_all_types)
23
23
  ttl: int | None = None
24
+ init_with_rapyer: bool = True
25
+ refresh_ttl: bool = True # Enable TTL refresh on read/write operations by default
@@ -0,0 +1,8 @@
1
+ from rapyer.errors.base import (
2
+ BadFilterError,
3
+ FindError,
4
+ RapyerError,
5
+ UnsupportedIndexedFieldError,
6
+ )
7
+
8
+ __all__ = ["BadFilterError", "FindError", "RapyerError", "UnsupportedIndexedFieldError"]
@@ -0,0 +1,26 @@
1
+ class RapyerError(Exception):
2
+ """Base exception for all rapyer errors."""
3
+
4
+ pass
5
+
6
+
7
+ class KeyNotFound(RapyerError):
8
+ """Raised when a key is not found in Redis."""
9
+
10
+ pass
11
+
12
+
13
+ class FindError(RapyerError):
14
+ """Raised when a model cannot be found."""
15
+
16
+ pass
17
+
18
+
19
+ class BadFilterError(FindError):
20
+ """Raised when a filter is invalid."""
21
+
22
+ pass
23
+
24
+
25
+ class UnsupportedIndexedFieldError(FindError):
26
+ pass
@@ -1,12 +1,17 @@
1
1
  from typing import Any
2
2
 
3
3
  from pydantic import TypeAdapter
4
+
5
+ from rapyer.errors import BadFilterError
4
6
  from rapyer.types.base import REDIS_DUMP_FLAG_NAME
7
+ from rapyer.typing_support import Unpack
5
8
 
6
9
 
7
10
  class Expression:
8
11
  def create_filter(self) -> str:
9
- raise NotImplementedError("Subclasses must implement create_filter")
12
+ raise NotImplementedError( # pragma: no cover
13
+ "Subclasses must implement create_filter" # pragma: no cover
14
+ ) # pragma: no cover
10
15
 
11
16
  def __and__(self, other: "Expression") -> "AndExpression":
12
17
  return AndExpression(self, other)
@@ -18,13 +23,22 @@ class Expression:
18
23
  return NotExpression(self)
19
24
 
20
25
 
26
+ class AtomicField(Expression):
27
+ def __init__(self, field_name: str, **sub_fields: Unpack[Expression]):
28
+ self.field_name = field_name
29
+ for sub_field_name, sub_field in sub_fields.items():
30
+ setattr(self, sub_field_name, sub_field)
31
+
32
+
21
33
  class ExpressionField(Expression):
22
34
  def __init__(self, field_name: str, field_type: Any = None):
23
35
  self.field_name = field_name
24
36
  self._adapter = TypeAdapter(field_type)
25
37
 
26
38
  def create_filter(self) -> str:
27
- return f"@{self.field_name}:*"
39
+ raise BadFilterError(
40
+ "You must use an operator to filter an expression in redis"
41
+ )
28
42
 
29
43
  def serialize_value(self, value: Any) -> Any:
30
44
  return self._adapter.dump_python(
@@ -1,6 +1,6 @@
1
1
  import dataclasses
2
2
  from datetime import datetime
3
- from typing import Annotated, Any, Generic, TypeVar
3
+ from typing import TYPE_CHECKING, Annotated, Any, Generic, TypeAlias, TypeVar
4
4
 
5
5
  from rapyer.types.datetime import RedisDatetimeTimestamp
6
6
 
@@ -33,3 +33,7 @@ class _IndexType(Generic[T]):
33
33
 
34
34
 
35
35
  Index = _IndexType
36
+
37
+
38
+ if TYPE_CHECKING:
39
+ Index: TypeAlias = Annotated[T, IndexAnnotation()] # pragma: no cover
@@ -1,5 +1,5 @@
1
1
  import dataclasses
2
- from typing import Annotated, Any, Generic, TypeVar
2
+ from typing import TYPE_CHECKING, Annotated, Any, Generic, TypeAlias, TypeVar
3
3
 
4
4
 
5
5
  @dataclasses.dataclass(frozen=True)
@@ -20,5 +20,8 @@ class _KeyType(Generic[T]):
20
20
  return Annotated[item, KeyAnnotation()]
21
21
 
22
22
 
23
- # Create the Key callable that works both as a function and generic type
24
23
  Key = _KeyType
24
+
25
+
26
+ if TYPE_CHECKING:
27
+ Key: TypeAlias = Annotated[T, KeyAnnotation()] # pragma: no cover
@@ -1,8 +1,9 @@
1
1
  import redis.asyncio as redis_async
2
- from rapyer.base import REDIS_MODELS
3
2
  from redis import ResponseError
4
3
  from redis.asyncio.client import Redis
5
4
 
5
+ from rapyer.base import REDIS_MODELS
6
+
6
7
 
7
8
  async def init_rapyer(
8
9
  redis: str | Redis = None, ttl: int = None, override_old_idx: bool = True
@@ -36,6 +37,8 @@ async def init_rapyer(
36
37
 
37
38
 
38
39
  async def teardown_rapyer():
40
+ closed_clients = set()
39
41
  for model in REDIS_MODELS:
40
- if model.Meta.ttl is not None:
42
+ if id(model.Meta.redis) not in closed_clients:
43
+ closed_clients.add(id(model.Meta.redis))
41
44
  await model.Meta.redis.aclose()
@@ -7,10 +7,12 @@ from typing import get_args, Any, TypeVar, Generic
7
7
  from pydantic import GetCoreSchemaHandler, TypeAdapter
8
8
  from pydantic_core import core_schema
9
9
  from pydantic_core.core_schema import ValidationInfo, CoreSchema, SerializationInfo
10
+ from redis.commands.search.field import TextField
11
+ from typing_extensions import deprecated
12
+
10
13
  from rapyer.context import _context_var
11
14
  from rapyer.typing_support import Self
12
- from rapyer.typing_support import deprecated
13
- from redis.commands.search.field import TextField
15
+ from rapyer.utils.redis import refresh_ttl_if_needed
14
16
 
15
17
  REDIS_DUMP_FLAG_NAME = "__rapyer_dumped__"
16
18
 
@@ -68,7 +70,7 @@ class RedisType(ABC):
68
70
  f"save function is deprecated and will become sync function in rapyer 1.2.0, use asave() instead"
69
71
  )
70
72
  async def save(self):
71
- return await self.asave()
73
+ return await self.asave() # pragma: no cover
72
74
 
73
75
  async def asave(self) -> Self:
74
76
  model_dump = self._adapter.dump_python(
@@ -76,26 +78,31 @@ class RedisType(ABC):
76
78
  )
77
79
  await self.client.json().set(self.key, self.json_path, model_dump)
78
80
  if self.Meta.ttl is not None:
79
- await self.client.expire(self.key, self.Meta.ttl)
81
+ nx = not self.Meta.refresh_ttl
82
+ await self.client.expire(self.key, self.Meta.ttl, nx=nx)
80
83
  return self
81
84
 
82
85
  @deprecated(
83
86
  "load function is deprecated and will be removed in rapyer 1.2.0, use aload() instead"
84
87
  )
85
88
  async def load(self):
86
- return await self.aload()
89
+ return await self.aload() # pragma: no cover
87
90
 
88
91
  async def aload(self):
89
92
  redis_value = await self.client.json().get(self.key, self.field_path)
90
93
  if redis_value is None:
91
94
  return None
92
- return self._adapter.validate_python(
95
+ result = self._adapter.validate_python(
93
96
  redis_value, context={REDIS_DUMP_FLAG_NAME: True}
94
97
  )
98
+ await refresh_ttl_if_needed(
99
+ self.client, self.key, self.Meta.ttl, self.Meta.refresh_ttl
100
+ )
101
+ return result
95
102
 
96
103
  @abc.abstractmethod
97
104
  def clone(self):
98
- pass
105
+ pass # pragma: no cover
99
106
 
100
107
  @classmethod
101
108
  def redis_schema(cls, field_name: str):
@@ -134,22 +141,22 @@ class GenericRedisType(RedisType, Generic[T], ABC):
134
141
 
135
142
  @abc.abstractmethod
136
143
  def iterate_items(self):
137
- pass
144
+ pass # pragma: no cover
138
145
 
139
146
  @classmethod
140
147
  @abc.abstractmethod
141
148
  def full_serializer(cls, value, info: SerializationInfo):
142
- pass
149
+ pass # pragma: no cover
143
150
 
144
151
  @classmethod
145
152
  @abc.abstractmethod
146
153
  def full_deserializer(cls, value, info: ValidationInfo):
147
- pass
154
+ pass # pragma: no cover
148
155
 
149
156
  @classmethod
150
157
  @abc.abstractmethod
151
158
  def schema_for_unknown(cls):
152
- pass
159
+ pass # pragma: no cover
153
160
 
154
161
  @classmethod
155
162
  def __get_pydantic_core_schema__(
@@ -2,6 +2,7 @@ from typing import TypeAlias, TYPE_CHECKING
2
2
 
3
3
  from pydantic_core import core_schema
4
4
  from pydantic_core.core_schema import ValidationInfo, SerializationInfo
5
+
5
6
  from rapyer.types.base import RedisType, REDIS_DUMP_FLAG_NAME
6
7
 
7
8
 
@@ -14,7 +15,9 @@ class RedisBytes(bytes, RedisType):
14
15
  def __iadd__(self, other):
15
16
  new_value = self + other
16
17
  if self.pipeline:
17
- self.pipeline.json().set(self.key, self.json_path, new_value)
18
+ self.pipeline.json().set(
19
+ self.key, self.json_path, self.serialize_unknown(new_value)
20
+ )
18
21
  return self.__class__(new_value)
19
22
 
20
23
  @classmethod
@@ -52,4 +55,4 @@ class RedisBytes(bytes, RedisType):
52
55
 
53
56
 
54
57
  if TYPE_CHECKING:
55
- RedisBytes: TypeAlias = RedisBytes | bytes
58
+ RedisBytes: TypeAlias = RedisBytes | bytes # pragma: no cover
@@ -1,6 +1,7 @@
1
1
  from typing import Any, get_origin
2
2
 
3
3
  from pydantic import BaseModel, PrivateAttr, TypeAdapter
4
+
4
5
  from rapyer.types.base import RedisType
5
6
  from rapyer.utils.annotation import TypeConverter, DYNAMIC_CLASS_DOC
6
7
  from rapyer.utils.pythonic import safe_issubclass
@@ -27,9 +28,6 @@ class RedisConverter(TypeConverter):
27
28
  return type_to_check in self.supported_types
28
29
 
29
30
  def convert_flat_type(self, type_to_convert: type) -> type:
30
- if type_to_convert is Any:
31
- return Any
32
-
33
31
  from rapyer.base import AtomicRedisModel
34
32
 
35
33
  if safe_issubclass(type_to_convert, AtomicRedisModel):
@@ -74,28 +72,6 @@ class RedisConverter(TypeConverter):
74
72
  def covert_generic_type(
75
73
  self, type_to_covert: type, generic_values: tuple[type]
76
74
  ) -> type:
77
- from rapyer.base import AtomicRedisModel
78
-
79
- if safe_issubclass(type_to_covert, AtomicRedisModel):
80
- return type(
81
- type_to_covert.__name__,
82
- (type_to_covert,),
83
- dict(
84
- _field_name=PrivateAttr(default=self.field_name),
85
- __doc__=DYNAMIC_CLASS_DOC,
86
- ),
87
- )
88
- if safe_issubclass(type_to_covert, BaseModel):
89
- type_to_covert: type[BaseModel]
90
- return type(
91
- f"Redis{type_to_covert.__name__}",
92
- (AtomicRedisModel, type_to_covert),
93
- dict(
94
- _field_name=PrivateAttr(default=self.field_name),
95
- __doc__=DYNAMIC_CLASS_DOC,
96
- ),
97
- )
98
-
99
75
  if safe_issubclass(type_to_covert, RedisType):
100
76
  redis_type = type_to_covert
101
77
  original_type = type_to_covert.original_type
@@ -3,9 +3,10 @@ from typing import TYPE_CHECKING
3
3
 
4
4
  from pydantic_core import core_schema
5
5
  from pydantic_core.core_schema import ValidationInfo, SerializationInfo
6
- from rapyer.types.base import RedisType, REDIS_DUMP_FLAG_NAME
7
6
  from redis.commands.search.field import NumericField
8
7
 
8
+ from rapyer.types.base import RedisType, REDIS_DUMP_FLAG_NAME
9
+
9
10
 
10
11
  class RedisDatetime(datetime, RedisType):
11
12
  original_type = datetime
@@ -71,5 +72,5 @@ class RedisDatetimeTimestamp(RedisDatetime):
71
72
 
72
73
 
73
74
  if TYPE_CHECKING:
74
- RedisDatetime = RedisDatetime | datetime
75
- RedisDatetimeTimestamp = RedisDatetimeTimestamp | datetime
75
+ RedisDatetime = RedisDatetime | datetime # pragma: no cover
76
+ RedisDatetimeTimestamp = RedisDatetimeTimestamp | datetime # pragma: no cover
@@ -1,7 +1,9 @@
1
1
  from typing import TypeVar, Generic, get_args, Any, TypeAlias, TYPE_CHECKING
2
2
 
3
3
  from pydantic_core import core_schema
4
+
4
5
  from rapyer.types.base import GenericRedisType, RedisType, REDIS_DUMP_FLAG_NAME
6
+ from rapyer.utils.redis import refresh_ttl_if_needed
5
7
  from rapyer.utils.redis import update_keys_in_pipeline
6
8
 
7
9
  T = TypeVar("T")
@@ -136,13 +138,21 @@ class RedisDict(dict[str, T], GenericRedisType, Generic[T]):
136
138
  serialized_value = self._adapter.dump_python(
137
139
  {key: value}, mode="json", context={REDIS_DUMP_FLAG_NAME: True}
138
140
  )
139
- return await self.client.json().set(
141
+ result = await self.client.json().set(
140
142
  self.key, self.json_field_path(key), serialized_value[key]
141
143
  )
144
+ await refresh_ttl_if_needed(
145
+ self.client, self.key, self.Meta.ttl, self.Meta.refresh_ttl
146
+ )
147
+ return result
142
148
 
143
149
  async def adel_item(self, key):
144
150
  super().__delitem__(key)
145
- return await self.client.json().delete(self.key, self.json_field_path(key))
151
+ result = await self.client.json().delete(self.key, self.json_field_path(key))
152
+ await refresh_ttl_if_needed(
153
+ self.client, self.key, self.Meta.ttl, self.Meta.refresh_ttl
154
+ )
155
+ return result
146
156
 
147
157
  async def aupdate(self, **kwargs):
148
158
  self.update(**kwargs)
@@ -157,12 +167,18 @@ class RedisDict(dict[str, T], GenericRedisType, Generic[T]):
157
167
  async with self.redis.pipeline() as pipeline:
158
168
  update_keys_in_pipeline(pipeline, self.key, **redis_params)
159
169
  await pipeline.execute()
170
+ await refresh_ttl_if_needed(
171
+ self.redis, self.key, self.Meta.ttl, self.Meta.refresh_ttl
172
+ )
160
173
 
161
174
  async def apop(self, key, default=None):
162
175
  # Execute the script atomically
163
176
  result = await self.client.eval(POP_SCRIPT, 1, self.key, self.json_path, key)
164
177
  # Key exists in Redis, pop from local dict (it should exist there too)
165
178
  super().pop(key, None)
179
+ await refresh_ttl_if_needed(
180
+ self.client, self.key, self.Meta.ttl, self.Meta.refresh_ttl
181
+ )
166
182
 
167
183
  if result is None:
168
184
  # Key doesn't exist in Redis
@@ -175,6 +191,9 @@ class RedisDict(dict[str, T], GenericRedisType, Generic[T]):
175
191
  async def apopitem(self):
176
192
  # Execute the script atomically
177
193
  result = await self.client.eval(POPITEM_SCRIPT, 1, self.key, self.json_path)
194
+ await refresh_ttl_if_needed(
195
+ self.client, self.key, self.Meta.ttl, self.Meta.refresh_ttl
196
+ )
178
197
 
179
198
  if result is not None:
180
199
  redis_key, redis_value = result
@@ -192,7 +211,11 @@ class RedisDict(dict[str, T], GenericRedisType, Generic[T]):
192
211
  async def aclear(self):
193
212
  self.clear()
194
213
  # Clear Redis dict
195
- return await self.client.json().set(self.key, self.json_path, {})
214
+ result = await self.client.json().set(self.key, self.json_path, {})
215
+ await refresh_ttl_if_needed(
216
+ self.client, self.key, self.Meta.ttl, self.Meta.refresh_ttl
217
+ )
218
+ return result
196
219
 
197
220
  def clone(self):
198
221
  return {
@@ -213,15 +236,13 @@ class RedisDict(dict[str, T], GenericRedisType, Generic[T]):
213
236
  }
214
237
 
215
238
  @classmethod
216
- def full_deserializer(cls, value, info: core_schema.ValidationInfo):
239
+ def full_deserializer(cls, value: dict, info: core_schema.ValidationInfo):
217
240
  ctx = info.context or {}
218
241
  should_serialize_redis = ctx.get(REDIS_DUMP_FLAG_NAME)
219
- if isinstance(value, dict):
220
- return {
221
- key: cls.deserialize_unknown(item) if should_serialize_redis else item
222
- for key, item in value.items()
223
- }
224
- return value
242
+ return {
243
+ key: cls.deserialize_unknown(item) if should_serialize_redis else item
244
+ for key, item in value.items()
245
+ }
225
246
 
226
247
  @classmethod
227
248
  def schema_for_unknown(cls):
@@ -229,4 +250,4 @@ class RedisDict(dict[str, T], GenericRedisType, Generic[T]):
229
250
 
230
251
 
231
252
  if TYPE_CHECKING:
232
- RedisDict: TypeAlias = RedisDict[T] | dict[str, T]
253
+ RedisDict: TypeAlias = RedisDict[T] | dict[str, T] # pragma: no cover
@@ -1,8 +1,10 @@
1
1
  from typing import TypeAlias, TYPE_CHECKING
2
2
 
3
- from rapyer.types.base import RedisType
4
3
  from redis.commands.search.field import NumericField
5
4
 
5
+ from rapyer.types.base import RedisType
6
+ from rapyer.utils.redis import refresh_ttl_if_needed
7
+
6
8
 
7
9
  class RedisFloat(float, RedisType):
8
10
  original_type = float
@@ -13,6 +15,9 @@ class RedisFloat(float, RedisType):
13
15
 
14
16
  async def aincrease(self, amount: float = 1.0):
15
17
  result = await self.client.json().numincrby(self.key, self.json_path, amount)
18
+ await refresh_ttl_if_needed(
19
+ self.client, self.key, self.Meta.ttl, self.Meta.refresh_ttl
20
+ )
16
21
  return result[0] if isinstance(result, list) and result else result
17
22
 
18
23
  def clone(self):
@@ -44,4 +49,4 @@ class RedisFloat(float, RedisType):
44
49
 
45
50
 
46
51
  if TYPE_CHECKING:
47
- RedisFloat: TypeAlias = RedisFloat | float
52
+ RedisFloat: TypeAlias = RedisFloat | float # pragma: no cover
@@ -1,9 +1,11 @@
1
1
  from typing import TypeAlias, TYPE_CHECKING
2
2
 
3
- from rapyer.types.base import RedisType
4
3
  from redis.commands.search.field import NumericField
5
4
  from typing_extensions import deprecated
6
5
 
6
+ from rapyer.types.base import RedisType
7
+ from rapyer.utils.redis import refresh_ttl_if_needed
8
+
7
9
 
8
10
  class RedisInt(int, RedisType):
9
11
  original_type = int
@@ -20,6 +22,9 @@ class RedisInt(int, RedisType):
20
22
 
21
23
  async def aincrease(self, amount: int = 1):
22
24
  result = await self.client.json().numincrby(self.key, self.json_path, amount)
25
+ await refresh_ttl_if_needed(
26
+ self.client, self.key, self.Meta.ttl, self.Meta.refresh_ttl
27
+ )
23
28
  return result[0] if isinstance(result, list) and result else result
24
29
 
25
30
  def clone(self):
@@ -63,4 +68,4 @@ class RedisInt(int, RedisType):
63
68
 
64
69
 
65
70
  if TYPE_CHECKING:
66
- RedisInt: TypeAlias = RedisInt | int
71
+ RedisInt: TypeAlias = RedisInt | int # pragma: no cover
@@ -3,9 +3,11 @@ from typing import TypeVar, TYPE_CHECKING
3
3
 
4
4
  from pydantic_core import core_schema
5
5
  from pydantic_core.core_schema import ValidationInfo, SerializationInfo
6
- from rapyer.types.base import GenericRedisType, RedisType, REDIS_DUMP_FLAG_NAME
7
6
  from typing_extensions import TypeAlias
8
7
 
8
+ from rapyer.types.base import GenericRedisType, RedisType, REDIS_DUMP_FLAG_NAME
9
+ from rapyer.utils.redis import refresh_ttl_if_needed
10
+
9
11
  T = TypeVar("T")
10
12
 
11
13
 
@@ -26,6 +28,9 @@ class RedisList(list, GenericRedisType[T]):
26
28
  new_val = self.create_new_values([key], [value])[0]
27
29
  return new_val
28
30
 
31
+ def sub_field_path(self, key: str):
32
+ return f"{self.field_path}[{key}]"
33
+
29
34
  def __setitem__(self, key, value):
30
35
  if self.pipeline:
31
36
  self.pipeline.json().set(self.key, self.json_field_path(key), value)
@@ -34,8 +39,6 @@ class RedisList(list, GenericRedisType[T]):
34
39
 
35
40
  def __iadd__(self, other):
36
41
  self.extend(other)
37
- if self.pipeline and other:
38
- self.pipeline.json().arrappend(self.key, self.json_path, *other)
39
42
  return self
40
43
 
41
44
  def append(self, __object):
@@ -74,6 +77,9 @@ class RedisList(list, GenericRedisType[T]):
74
77
  await self.redis.json().arrappend(
75
78
  self.key, self.json_path, *serialized_object
76
79
  )
80
+ await refresh_ttl_if_needed(
81
+ self.redis, self.key, self.Meta.ttl, self.Meta.refresh_ttl
82
+ )
77
83
 
78
84
  async def aextend(self, __iterable):
79
85
  items = list(__iterable)
@@ -90,15 +96,17 @@ class RedisList(list, GenericRedisType[T]):
90
96
  self.json_path,
91
97
  *serialized_items,
92
98
  )
99
+ await refresh_ttl_if_needed(
100
+ self.redis, self.key, self.Meta.ttl, self.Meta.refresh_ttl
101
+ )
93
102
 
94
103
  async def apop(self, index=-1):
95
104
  if self:
96
105
  self.pop(index)
97
106
  arrpop = await self.redis.json().arrpop(self.key, self.json_path, index)
98
-
99
- # Handle empty list case
100
- if arrpop is None or (isinstance(arrpop, list) and len(arrpop) == 0):
101
- return None
107
+ await refresh_ttl_if_needed(
108
+ self.redis, self.key, self.Meta.ttl, self.Meta.refresh_ttl
109
+ )
102
110
 
103
111
  # Handle case where arrpop returns [None] for an empty list
104
112
  if arrpop[0] is None:
@@ -119,6 +127,9 @@ class RedisList(list, GenericRedisType[T]):
119
127
  await self.redis.json().arrinsert(
120
128
  self.key, self.json_path, index, *serialized_object
121
129
  )
130
+ await refresh_ttl_if_needed(
131
+ self.redis, self.key, self.Meta.ttl, self.Meta.refresh_ttl
132
+ )
122
133
 
123
134
  async def aclear(self):
124
135
  # Clear local list
@@ -127,6 +138,9 @@ class RedisList(list, GenericRedisType[T]):
127
138
  # Clear Redis list
128
139
  if not self.pipeline:
129
140
  await self.client.json().set(self.key, self.json_path, [])
141
+ await refresh_ttl_if_needed(
142
+ self.client, self.key, self.Meta.ttl, self.Meta.refresh_ttl
143
+ )
130
144
 
131
145
  def clone(self):
132
146
  return [v.clone() if isinstance(v, RedisType) else v for v in self]
@@ -144,16 +158,13 @@ class RedisList(list, GenericRedisType[T]):
144
158
  ]
145
159
 
146
160
  @classmethod
147
- def full_deserializer(cls, value, info: ValidationInfo):
161
+ def full_deserializer(cls, value: list, info: ValidationInfo):
148
162
  ctx = info.context or {}
149
163
  is_redis_data = ctx.get(REDIS_DUMP_FLAG_NAME)
150
164
 
151
- if isinstance(value, list):
152
- return [
153
- cls.deserialize_unknown(item) if is_redis_data else item
154
- for item in value
155
- ]
156
- return value
165
+ return [
166
+ cls.deserialize_unknown(item) if is_redis_data else item for item in value
167
+ ]
157
168
 
158
169
  @classmethod
159
170
  def schema_for_unknown(cls):
@@ -161,4 +172,4 @@ class RedisList(list, GenericRedisType[T]):
161
172
 
162
173
 
163
174
  if TYPE_CHECKING:
164
- RedisList: TypeAlias = RedisList[T] | list[T]
175
+ RedisList: TypeAlias = RedisList[T] | list[T] # pragma: no cover
@@ -17,4 +17,4 @@ class RedisStr(str, RedisType):
17
17
 
18
18
 
19
19
  if TYPE_CHECKING:
20
- RedisStr: TypeAlias = RedisStr | str
20
+ RedisStr: TypeAlias = RedisStr | str # pragma: no cover
@@ -0,0 +1,7 @@
1
+ # For python 3.10 support
2
+ try:
3
+ from typing import Self, Unpack
4
+ except ImportError: # pragma: no cover
5
+ from typing_extensions import Self, Unpack # pragma: no cover
6
+
7
+ __all__ = ["Self", "Unpack"]
@@ -10,17 +10,17 @@ DYNAMIC_CLASS_DOC = "___dynamic_class___"
10
10
  class TypeConverter(ABC):
11
11
  @abc.abstractmethod
12
12
  def is_type_support(self, type_to_check: type) -> bool:
13
- pass
13
+ pass # pragma: no cover
14
14
 
15
15
  @abc.abstractmethod
16
16
  def convert_flat_type(self, type_to_convert: type) -> type:
17
- pass
17
+ pass # pragma: no cover
18
18
 
19
19
  @abc.abstractmethod
20
20
  def covert_generic_type(
21
21
  self, type_to_covert: type, generic_values: tuple[type]
22
22
  ) -> type:
23
- pass
23
+ pass # pragma: no cover
24
24
 
25
25
 
26
26
  def replace_to_redis_types_in_annotation(
@@ -74,13 +74,10 @@ def replace_to_redis_types_in_annotation(
74
74
  # If we don't support the origin, just use the original annotation
75
75
  origin = annotation
76
76
  return origin
77
- return annotation
77
+ return annotation # pragma: no cover - There is no way to reach this line
78
78
 
79
79
 
80
80
  def has_annotation(field: Any, annotation_type: Any) -> bool:
81
- if field is annotation_type:
82
- return True
83
-
84
81
  origin = get_origin(field)
85
82
  if origin is Annotated:
86
83
  args = get_args(field)
@@ -0,0 +1,22 @@
1
+ from contextlib import AbstractAsyncContextManager
2
+
3
+ from redis.asyncio import Redis
4
+
5
+
6
+ def acquire_lock(
7
+ redis: Redis, key: str, sleep_time: int = 0.1
8
+ ) -> AbstractAsyncContextManager[None]:
9
+ lock_key = f"{key}:lock"
10
+ return redis.lock(lock_key, sleep=sleep_time)
11
+
12
+
13
+ def update_keys_in_pipeline(pipeline, redis_key: str, **kwargs):
14
+ for json_path, value in kwargs.items():
15
+ pipeline.json().set(redis_key, json_path, value)
16
+
17
+
18
+ async def refresh_ttl_if_needed(
19
+ redis_client: Redis, key: str, ttl: int | None, refresh_ttl: bool = True
20
+ ) -> None:
21
+ if ttl is not None and refresh_ttl:
22
+ await redis_client.expire(key, ttl)
@@ -1,10 +0,0 @@
1
- class RapyerError(Exception):
2
- """Base exception for all rapyer errors."""
3
-
4
- pass
5
-
6
-
7
- class KeyNotFound(RapyerError):
8
- """Raised when a key is not found in Redis."""
9
-
10
- pass
@@ -1,13 +0,0 @@
1
- # For python 3.10 support
2
- try:
3
- from typing import Self, Unpack
4
- except ImportError:
5
- from typing_extensions import Self, Unpack
6
-
7
- # For python 3.13 support
8
- try:
9
- from typing import deprecated
10
- except ImportError:
11
- from typing_extensions import deprecated
12
-
13
- __all__ = ["Self", "Unpack", "deprecated"]
File without changes
@@ -1,25 +0,0 @@
1
- import asyncio
2
- import contextlib
3
- import uuid
4
- from datetime import timedelta
5
-
6
- from redis.asyncio import Redis
7
-
8
-
9
- @contextlib.asynccontextmanager
10
- async def acquire_lock(
11
- redis: Redis, key: str, lock_timeout: timedelta | int = 10, sleep_time: int = 0.1
12
- ):
13
- lock_key = f"{key}:lock"
14
- lock_token = str(uuid.uuid4())
15
- while not await redis.set(lock_key, lock_token, nx=True, ex=lock_timeout):
16
- await asyncio.sleep(sleep_time)
17
- try:
18
- yield
19
- finally:
20
- await redis.delete(lock_key)
21
-
22
-
23
- def update_keys_in_pipeline(pipeline, redis_key: str, **kwargs):
24
- for json_path, value in kwargs.items():
25
- pipeline.json().set(redis_key, json_path, value)
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes