rapyer 1.1.3__tar.gz → 1.1.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. {rapyer-1.1.3 → rapyer-1.1.5}/PKG-INFO +6 -1
  2. {rapyer-1.1.3 → rapyer-1.1.5}/pyproject.toml +9 -6
  3. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/__init__.py +9 -1
  4. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/base.py +247 -71
  5. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/config.py +7 -0
  6. rapyer-1.1.5/rapyer/errors/__init__.py +17 -0
  7. rapyer-1.1.5/rapyer/errors/base.py +38 -0
  8. rapyer-1.1.5/rapyer/fields/__init__.py +5 -0
  9. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/fields/expression.py +16 -2
  10. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/fields/index.py +5 -1
  11. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/fields/key.py +5 -2
  12. rapyer-1.1.5/rapyer/fields/safe_load.py +27 -0
  13. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/init.py +15 -3
  14. rapyer-1.1.5/rapyer/scripts.py +86 -0
  15. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/types/base.py +38 -11
  16. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/types/byte.py +5 -2
  17. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/types/convert.py +11 -27
  18. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/types/datetime.py +4 -3
  19. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/types/dct.py +44 -12
  20. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/types/float.py +7 -2
  21. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/types/integer.py +7 -2
  22. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/types/lst.py +59 -15
  23. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/types/string.py +1 -1
  24. rapyer-1.1.5/rapyer/typing_support.py +7 -0
  25. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/utils/annotation.py +4 -7
  26. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/utils/fields.py +25 -2
  27. rapyer-1.1.5/rapyer/utils/redis.py +22 -0
  28. rapyer-1.1.3/rapyer/errors/base.py +0 -10
  29. rapyer-1.1.3/rapyer/fields/__init__.py +0 -4
  30. rapyer-1.1.3/rapyer/typing_support.py +0 -13
  31. rapyer-1.1.3/rapyer/utils/__init__.py +0 -0
  32. rapyer-1.1.3/rapyer/utils/redis.py +0 -25
  33. {rapyer-1.1.3 → rapyer-1.1.5}/README.md +0 -0
  34. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/context.py +0 -0
  35. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/links.py +0 -0
  36. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/types/__init__.py +0 -0
  37. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/types/init.py +0 -0
  38. {rapyer-1.1.3/rapyer/errors → rapyer-1.1.5/rapyer/utils}/__init__.py +0 -0
  39. {rapyer-1.1.3 → rapyer-1.1.5}/rapyer/utils/pythonic.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: rapyer
3
- Version: 1.1.3
3
+ Version: 1.1.5
4
4
  Summary: Pydantic models with Redis as the backend
5
5
  License: MIT
6
6
  Keywords: redis,redis-json,pydantic,pydantic-v2,orm,database,async,nosql,cache,key-value,data-modeling,python,backend,storage,serialization,validation
@@ -23,7 +23,12 @@ Classifier: Topic :: Database :: Database Engines/Servers
23
23
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
24
24
  Classifier: Typing :: Typed
25
25
  Classifier: Operating System :: OS Independent
26
+ Provides-Extra: test
27
+ Requires-Dist: fakeredis[json,lua] (>=2.20.0) ; extra == "test"
26
28
  Requires-Dist: pydantic (>=2.11.0,<2.13.0)
29
+ Requires-Dist: pytest (>=8.4.2) ; extra == "test"
30
+ Requires-Dist: pytest-asyncio (>=0.25.0) ; extra == "test"
31
+ Requires-Dist: pytest-cov (>=6.0.0) ; extra == "test"
27
32
  Requires-Dist: redis[async] (>=6.0.0,<7.1.0)
28
33
  Project-URL: Bug Tracker, https://github.com/imaginary-cherry/rapyer/issues
29
34
  Project-URL: Changelog, https://github.com/imaginary-cherry/rapyer/releases
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
4
4
 
5
5
  [project]
6
6
  name = "rapyer"
7
- version = "1.1.3"
7
+ version = "1.1.5"
8
8
  description = "Pydantic models with Redis as the backend"
9
9
  authors = [{name = "YedidyaHKfir", email = "yedidyakfir@gmail.com"}]
10
10
  readme = "README.md"
@@ -51,6 +51,14 @@ dependencies = [
51
51
  "pydantic>=2.11.0, <2.13.0",
52
52
  ]
53
53
 
54
+ [project.optional-dependencies]
55
+ test = [
56
+ "pytest>=8.4.2",
57
+ "pytest-asyncio>=0.25.0",
58
+ "pytest-cov>=6.0.0",
59
+ "fakeredis[lua,json]>=2.20.0",
60
+ ]
61
+
54
62
  [project.urls]
55
63
  Homepage = "https://imaginary-cherry.github.io/rapyer/"
56
64
  Documentation = "https://imaginary-cherry.github.io/rapyer/"
@@ -67,11 +75,6 @@ packages = [{include = "rapyer"}]
67
75
  black = "^25.9.0"
68
76
  mypy = "^1.0.0"
69
77
 
70
- [tool.poetry.group.tests.dependencies]
71
- pytest = "^8.4.2"
72
- pytest-asyncio = "^0.25.0"
73
- pytest-cov = "^6.0.0"
74
-
75
78
  [tool.coverage.run]
76
79
  source = ["rapyer"]
77
80
  omit = ["*/tests/*", "*/test_*"]
@@ -1,6 +1,13 @@
1
1
  """Redis Pydantic - Pydantic models with Redis as the backend."""
2
2
 
3
- from rapyer.base import AtomicRedisModel, aget, find_redis_models, ainsert
3
+ from rapyer.base import (
4
+ AtomicRedisModel,
5
+ aget,
6
+ find_redis_models,
7
+ ainsert,
8
+ get,
9
+ alock_from_key,
10
+ )
4
11
  from rapyer.init import init_rapyer, teardown_rapyer
5
12
 
6
13
  __all__ = [
@@ -11,4 +18,5 @@ __all__ = [
11
18
  "get",
12
19
  "find_redis_models",
13
20
  "ainsert",
21
+ "alock_from_key",
14
22
  ]
@@ -2,9 +2,11 @@ import asyncio
2
2
  import base64
3
3
  import contextlib
4
4
  import functools
5
+ import logging
5
6
  import pickle
6
7
  import uuid
7
- from typing import ClassVar, Any, AsyncGenerator
8
+ from contextlib import AbstractAsyncContextManager
9
+ from typing import ClassVar, Any, get_origin
8
10
 
9
11
  from pydantic import (
10
12
  BaseModel,
@@ -15,48 +17,85 @@ from pydantic import (
15
17
  field_validator,
16
18
  )
17
19
  from pydantic_core.core_schema import FieldSerializationInfo, ValidationInfo
20
+ from redis.commands.search.index_definition import IndexDefinition, IndexType
21
+ from redis.commands.search.query import Query
22
+ from redis.exceptions import NoScriptError
23
+ from typing_extensions import deprecated
24
+
18
25
  from rapyer.config import RedisConfig
19
26
  from rapyer.context import _context_var, _context_xx_pipe
20
- from rapyer.errors.base import KeyNotFound
21
- from rapyer.fields.expression import ExpressionField
27
+ from rapyer.errors.base import (
28
+ KeyNotFound,
29
+ PersistentNoScriptError,
30
+ UnsupportedIndexedFieldError,
31
+ CantSerializeRedisValueError,
32
+ )
33
+ from rapyer.fields.expression import ExpressionField, AtomicField, Expression
22
34
  from rapyer.fields.index import IndexAnnotation
23
35
  from rapyer.fields.key import KeyAnnotation
36
+ from rapyer.fields.safe_load import SafeLoadAnnotation
24
37
  from rapyer.links import REDIS_SUPPORTED_LINK
25
- from rapyer.types.base import RedisType, REDIS_DUMP_FLAG_NAME
38
+ from rapyer.scripts import handle_noscript_error
39
+ from rapyer.types.base import RedisType, REDIS_DUMP_FLAG_NAME, FAILED_FIELDS_KEY
26
40
  from rapyer.types.convert import RedisConverter
27
41
  from rapyer.typing_support import Self, Unpack
28
- from rapyer.typing_support import deprecated
29
42
  from rapyer.utils.annotation import (
30
43
  replace_to_redis_types_in_annotation,
31
44
  has_annotation,
32
45
  field_with_flag,
33
46
  DYNAMIC_CLASS_DOC,
34
47
  )
35
- from rapyer.utils.fields import get_all_pydantic_annotation, is_redis_field
36
- from rapyer.utils.redis import acquire_lock, update_keys_in_pipeline
37
- from redis.commands.search.index_definition import IndexDefinition, IndexType
38
- from redis.commands.search.query import Query
48
+ from rapyer.utils.fields import (
49
+ get_all_pydantic_annotation,
50
+ is_redis_field,
51
+ is_type_json_serializable,
52
+ )
53
+ from rapyer.utils.pythonic import safe_issubclass
54
+ from rapyer.utils.redis import (
55
+ acquire_lock,
56
+ update_keys_in_pipeline,
57
+ refresh_ttl_if_needed,
58
+ )
59
+
60
+ logger = logging.getLogger("rapyer")
39
61
 
40
62
 
41
- def make_pickle_field_serializer(field: str):
63
+ def make_pickle_field_serializer(
64
+ field: str, safe_load: bool = False, can_json: bool = False
65
+ ):
42
66
  @field_serializer(field, when_used="json-unless-none")
43
- def pickle_field_serializer(v, info: FieldSerializationInfo):
67
+ @classmethod
68
+ def pickle_field_serializer(cls, v, info: FieldSerializationInfo):
44
69
  ctx = info.context or {}
45
70
  should_serialize_redis = ctx.get(REDIS_DUMP_FLAG_NAME, False)
46
- if should_serialize_redis:
71
+ # Skip pickling if field CAN be JSON serialized AND user prefers JSON dump
72
+ field_can_be_json = can_json and cls.Meta.prefer_normal_json_dump
73
+ if should_serialize_redis and not field_can_be_json:
47
74
  return base64.b64encode(pickle.dumps(v)).decode("utf-8")
48
75
  return v
49
76
 
50
77
  pickle_field_serializer.__name__ = f"__serialize_{field}"
51
78
 
52
79
  @field_validator(field, mode="before")
53
- def pickle_field_validator(v, info: ValidationInfo):
80
+ @classmethod
81
+ def pickle_field_validator(cls, v, info: ValidationInfo):
54
82
  if v is None:
55
83
  return v
56
84
  ctx = info.context or {}
57
85
  should_serialize_redis = ctx.get(REDIS_DUMP_FLAG_NAME, False)
58
86
  if should_serialize_redis:
59
- return pickle.loads(base64.b64decode(v))
87
+ try:
88
+ field_can_be_json = can_json and cls.Meta.prefer_normal_json_dump
89
+ if should_serialize_redis and not field_can_be_json:
90
+ return pickle.loads(base64.b64decode(v))
91
+ return v
92
+ except Exception as e:
93
+ if safe_load:
94
+ failed_fields = ctx.setdefault(FAILED_FIELDS_KEY, set())
95
+ failed_fields.add(field)
96
+ logger.warning("SafeLoad: Failed to deserialize field '%s'", field)
97
+ return None
98
+ raise CantSerializeRedisValueError() from e
60
99
  return v
61
100
 
62
101
  pickle_field_validator.__name__ = f"__deserialize_{field}"
@@ -64,15 +103,40 @@ def make_pickle_field_serializer(field: str):
64
103
  return pickle_field_serializer, pickle_field_validator
65
104
 
66
105
 
106
+ # TODO: Remove in next major version (2.0) - backward compatibility for pickled data
107
+ # This validator handles loading old pickled data for fields that are now JSON-serializable.
108
+ # In 2.0, remove this function and the validator registration in __init_subclass__.
109
+ def make_backward_compat_validator(field: str):
110
+ @field_validator(field, mode="before")
111
+ def backward_compat_validator(v, info: ValidationInfo):
112
+ ctx = info.context or {}
113
+ should_deserialize_redis = ctx.get(REDIS_DUMP_FLAG_NAME, False)
114
+ if should_deserialize_redis and isinstance(v, str):
115
+ try:
116
+ return pickle.loads(base64.b64decode(v))
117
+ except Exception:
118
+ pass
119
+ return v
120
+
121
+ backward_compat_validator.__name__ = f"__backward_compat_{field}"
122
+ return backward_compat_validator
123
+
124
+
67
125
  class AtomicRedisModel(BaseModel):
68
126
  _pk: str = PrivateAttr(default_factory=lambda: str(uuid.uuid4()))
69
127
  _base_model_link: Self | RedisType = PrivateAttr(default=None)
128
+ _failed_fields: set[str] = PrivateAttr(default_factory=set)
70
129
 
71
130
  Meta: ClassVar[RedisConfig] = RedisConfig()
72
131
  _key_field_name: ClassVar[str | None] = None
132
+ _safe_load_fields: ClassVar[set[str]] = set()
73
133
  _field_name: str = PrivateAttr(default="")
74
134
  model_config = ConfigDict(validate_assignment=True, validate_default=True)
75
135
 
136
+ @property
137
+ def failed_fields(self) -> set[str]:
138
+ return self._failed_fields
139
+
76
140
  @property
77
141
  def pk(self):
78
142
  if self._key_field_name:
@@ -106,34 +170,33 @@ class AtomicRedisModel(BaseModel):
106
170
  return f"${field_path}" if field_path else "$"
107
171
 
108
172
  @classmethod
109
- def redis_schema(cls):
173
+ def redis_schema(cls, redis_name: str = ""):
110
174
  fields = []
111
175
 
112
176
  for field_name, field_info in cls.model_fields.items():
113
177
  real_type = field_info.annotation
114
- if not is_redis_field(field_name, real_type):
115
- continue
116
-
117
- if not field_with_flag(field_info, IndexAnnotation):
118
- continue
119
-
120
178
  # Check if real_type is a class before using issubclass
121
- if isinstance(real_type, type):
122
- if issubclass(real_type, AtomicRedisModel):
123
- sub_fields = real_type.redis_schema()
124
- for sub_field in sub_fields:
125
- sub_field.name = f"{field_name}.{sub_field.name}"
126
- fields.append(sub_field)
127
- elif issubclass(real_type, RedisType):
128
- field_schema = real_type.redis_schema(field_name)
129
- fields.append(field_schema)
130
- else:
131
- raise RuntimeError(
132
- f"Indexed field {field_name} must be redis-supported to be indexed, see {REDIS_SUPPORTED_LINK}"
179
+ if get_origin(real_type) is not None or not isinstance(real_type, type):
180
+ if field_with_flag(field_info, IndexAnnotation):
181
+ raise UnsupportedIndexedFieldError(
182
+ f"Field {field_name} is type {real_type}, and not supported for indexing"
133
183
  )
184
+ else:
185
+ continue
186
+
187
+ full_redis_name = f"{redis_name}.{field_name}" if redis_name else field_name
188
+ if issubclass(real_type, AtomicRedisModel):
189
+ real_type: type[AtomicRedisModel]
190
+ sub_fields = real_type.redis_schema(full_redis_name)
191
+ fields.extend(sub_fields)
192
+ elif not field_with_flag(field_info, IndexAnnotation):
193
+ continue
194
+ elif issubclass(real_type, RedisType):
195
+ field_schema = real_type.redis_schema(full_redis_name)
196
+ fields.append(field_schema)
134
197
  else:
135
- raise RuntimeError(
136
- f"Indexed field {field_name} must be a simple redis-supported type, see {REDIS_SUPPORTED_LINK}"
198
+ raise UnsupportedIndexedFieldError(
199
+ f"Indexed field {field_name} must be redis-supported to be indexed, see {REDIS_SUPPORTED_LINK}"
137
200
  )
138
201
 
139
202
  return fields
@@ -178,11 +241,13 @@ class AtomicRedisModel(BaseModel):
178
241
  self._pk = value.split(":", maxsplit=1)[-1]
179
242
 
180
243
  def __init_subclass__(cls, **kwargs):
181
- # Find a field with KeyAnnotation and save its name
244
+ # Find fields with KeyAnnotation and SafeLoadAnnotation
245
+ cls._safe_load_fields = set()
182
246
  for field_name, annotation in cls.__annotations__.items():
183
247
  if has_annotation(annotation, KeyAnnotation):
184
248
  cls._key_field_name = field_name
185
- break
249
+ if has_annotation(annotation, SafeLoadAnnotation):
250
+ cls._safe_load_fields.add(field_name)
186
251
 
187
252
  # Redefine annotations to use redis types
188
253
  pydantic_annotation = get_all_pydantic_annotation(cls, AtomicRedisModel)
@@ -194,7 +259,13 @@ class AtomicRedisModel(BaseModel):
194
259
  original_annotations.update(new_annotation)
195
260
  new_annotations = {
196
261
  field_name: replace_to_redis_types_in_annotation(
197
- annotation, RedisConverter(cls.Meta.redis_type, f".{field_name}")
262
+ annotation,
263
+ RedisConverter(
264
+ cls.Meta.redis_type,
265
+ f".{field_name}",
266
+ safe_load=field_name in cls._safe_load_fields
267
+ or cls.Meta.safe_load_all,
268
+ ),
198
269
  )
199
270
  for field_name, annotation in original_annotations.items()
200
271
  if is_redis_field(field_name, annotation)
@@ -210,21 +281,47 @@ class AtomicRedisModel(BaseModel):
210
281
  if not is_redis_field(attr_name, attr_type):
211
282
  continue
212
283
  if original_annotations[attr_name] == attr_type:
213
- serializer, validator = make_pickle_field_serializer(attr_name)
214
- setattr(cls, serializer.__name__, serializer)
215
- setattr(cls, validator.__name__, validator)
284
+ default_value = cls.__dict__.get(attr_name, None)
285
+ can_json = is_type_json_serializable(attr_type, default_value)
286
+ should_json_serialize = can_json and cls.Meta.prefer_normal_json_dump
287
+
288
+ if not should_json_serialize:
289
+ is_field_marked_safe = attr_name in cls._safe_load_fields
290
+ is_safe_load = is_field_marked_safe or cls.Meta.safe_load_all
291
+ serializer, validator = make_pickle_field_serializer(
292
+ attr_name, safe_load=is_safe_load, can_json=can_json
293
+ )
294
+ setattr(cls, serializer.__name__, serializer)
295
+ setattr(cls, validator.__name__, validator)
296
+ else:
297
+ # TODO: Remove in 2.0 - backward compatibility for old pickled data
298
+ validator = make_backward_compat_validator(attr_name)
299
+ setattr(cls, validator.__name__, validator)
216
300
  continue
217
301
 
218
302
  # Update the redis model list for initialization
219
303
  # Skip dynamically created classes from type conversion
220
- if cls.__doc__ != DYNAMIC_CLASS_DOC:
304
+ if cls.__doc__ != DYNAMIC_CLASS_DOC and cls.Meta.init_with_rapyer:
221
305
  REDIS_MODELS.append(cls)
222
306
 
223
307
  @classmethod
224
- def init_class(cls):
308
+ def create_expressions(cls, base_path: str = "") -> dict[str, Expression]:
309
+ expressions = {}
225
310
  for field_name, field_info in cls.model_fields.items():
311
+ full_field_name = rf"{base_path}\.{field_name}" if base_path else field_name
226
312
  field_type = field_info.annotation
227
- setattr(cls, field_name, ExpressionField(field_name, field_type))
313
+ if safe_issubclass(field_type, AtomicRedisModel):
314
+ expressions[field_name] = AtomicField(
315
+ field_name, **field_type.create_expressions(full_field_name)
316
+ )
317
+ else:
318
+ expressions[field_name] = ExpressionField(full_field_name, field_type)
319
+ return expressions
320
+
321
+ @classmethod
322
+ def init_class(cls):
323
+ for field_name, field_expression in cls.create_expressions().items():
324
+ setattr(cls, field_name, field_expression)
228
325
 
229
326
  def is_inner_model(self) -> bool:
230
327
  return bool(self.field_name)
@@ -233,13 +330,14 @@ class AtomicRedisModel(BaseModel):
233
330
  f"save function is deprecated and will become sync function in rapyer 1.2.0, use asave() instead"
234
331
  )
235
332
  async def save(self):
236
- return await self.asave()
333
+ return await self.asave() # pragma: no cover
237
334
 
238
335
  async def asave(self) -> Self:
239
336
  model_dump = self.redis_dump()
240
337
  await self.Meta.redis.json().set(self.key, self.json_path, model_dump)
241
338
  if self.Meta.ttl is not None:
242
- await self.Meta.redis.expire(self.key, self.Meta.ttl)
339
+ nx = not self.Meta.refresh_ttl
340
+ await self.Meta.redis.expire(self.key, self.Meta.ttl, nx=nx)
243
341
  return self
244
342
 
245
343
  def redis_dump(self):
@@ -252,7 +350,7 @@ class AtomicRedisModel(BaseModel):
252
350
  "duplicate function is deprecated and will be removed in rapyer 1.2.0, use aduplicate instead"
253
351
  )
254
352
  async def duplicate(self) -> Self:
255
- return await self.aduplicate()
353
+ return await self.aduplicate() # pragma: no cover
256
354
 
257
355
  async def aduplicate(self) -> Self:
258
356
  if self.is_inner_model():
@@ -266,7 +364,7 @@ class AtomicRedisModel(BaseModel):
266
364
  "duplicate_many function is deprecated and will be removed in rapyer 1.2.0, use aduplicate_many instead"
267
365
  )
268
366
  async def duplicate_many(self, num: int) -> list[Self]:
269
- return await self.aduplicate_many(num)
367
+ return await self.aduplicate_many(num) # pragma: no cover
270
368
 
271
369
  async def aduplicate_many(self, num: int) -> list[Self]:
272
370
  if self.is_inner_model():
@@ -297,13 +395,16 @@ class AtomicRedisModel(BaseModel):
297
395
  async with self.Meta.redis.pipeline() as pipe:
298
396
  update_keys_in_pipeline(pipe, self.key, **json_path_kwargs)
299
397
  await pipe.execute()
398
+ await refresh_ttl_if_needed(
399
+ self.Meta.redis, self.key, self.Meta.ttl, self.Meta.refresh_ttl
400
+ )
300
401
 
301
402
  @classmethod
302
403
  @deprecated(
303
404
  "get() classmethod is deprecated and will be removed in rapyer 1.2.0, use aget instead"
304
405
  )
305
406
  async def get(cls, key: str) -> Self:
306
- return await cls.aget(key)
407
+ return await cls.aget(key) # pragma: no cover
307
408
 
308
409
  @classmethod
309
410
  async def aget(cls, key: str) -> Self:
@@ -314,24 +415,34 @@ class AtomicRedisModel(BaseModel):
314
415
  raise KeyNotFound(f"{key} is missing in redis")
315
416
  model_dump = model_dump[0]
316
417
 
317
- instance = cls.model_validate(model_dump, context={REDIS_DUMP_FLAG_NAME: True})
418
+ context = {REDIS_DUMP_FLAG_NAME: True, FAILED_FIELDS_KEY: set()}
419
+ instance = cls.model_validate(model_dump, context=context)
318
420
  instance.key = key
421
+ instance._failed_fields = context.get(FAILED_FIELDS_KEY, set())
422
+ await refresh_ttl_if_needed(
423
+ cls.Meta.redis, key, cls.Meta.ttl, cls.Meta.refresh_ttl
424
+ )
319
425
  return instance
320
426
 
321
427
  @deprecated(
322
428
  "load function is deprecated and will be removed in rapyer 1.2.0, use aload() instead"
323
429
  )
324
430
  async def load(self):
325
- return await self.aload()
431
+ return await self.aload() # pragma: no cover
326
432
 
327
433
  async def aload(self) -> Self:
328
434
  model_dump = await self.Meta.redis.json().get(self.key, self.json_path)
329
435
  if not model_dump:
330
436
  raise KeyNotFound(f"{self.key} is missing in redis")
331
437
  model_dump = model_dump[0]
332
- instance = self.__class__(**model_dump)
438
+ context = {REDIS_DUMP_FLAG_NAME: True, FAILED_FIELDS_KEY: set()}
439
+ instance = self.__class__.model_validate(model_dump, context=context)
333
440
  instance._pk = self._pk
334
441
  instance._base_model_link = self._base_model_link
442
+ instance._failed_fields = context.get(FAILED_FIELDS_KEY, set())
443
+ await refresh_ttl_if_needed(
444
+ self.Meta.redis, self.key, self.Meta.ttl, self.Meta.refresh_ttl
445
+ )
335
446
  return instance
336
447
 
337
448
  @classmethod
@@ -365,10 +476,18 @@ class AtomicRedisModel(BaseModel):
365
476
  # Fetch the actual documents
366
477
  models = await cls.Meta.redis.json().mget(keys=keys, path="$")
367
478
 
479
+ if cls.Meta.ttl is not None and cls.Meta.refresh_ttl:
480
+ async with cls.Meta.redis.pipeline() as pipe:
481
+ for key in keys:
482
+ pipe.expire(key, cls.Meta.ttl)
483
+ await pipe.execute()
484
+
368
485
  instances = []
369
486
  for model, key in zip(models, keys):
370
- model = cls.model_validate(model[0], context={REDIS_DUMP_FLAG_NAME: True})
487
+ context = {REDIS_DUMP_FLAG_NAME: True, FAILED_FIELDS_KEY: set()}
488
+ model = cls.model_validate(model[0], context=context)
371
489
  model.key = key
490
+ model._failed_fields = context.get(FAILED_FIELDS_KEY, set())
372
491
  instances.append(model)
373
492
  return instances
374
493
 
@@ -381,6 +500,8 @@ class AtomicRedisModel(BaseModel):
381
500
  async with cls.Meta.redis.pipeline() as pipe:
382
501
  for model in models:
383
502
  pipe.json().set(model.key, model.json_path, model.redis_dump())
503
+ if cls.Meta.ttl is not None:
504
+ pipe.expire(model.key, cls.Meta.ttl)
384
505
  await pipe.execute()
385
506
 
386
507
  @classmethod
@@ -388,7 +509,7 @@ class AtomicRedisModel(BaseModel):
388
509
  "function delete is deprecated and will be removed in rapyer 1.2.0, use adelete instead"
389
510
  )
390
511
  async def delete_by_key(cls, key: str) -> bool:
391
- return await cls.adelete_by_key(key)
512
+ return await cls.adelete_by_key(key) # pragma: no cover
392
513
 
393
514
  @classmethod
394
515
  async def adelete_by_key(cls, key: str) -> bool:
@@ -399,7 +520,7 @@ class AtomicRedisModel(BaseModel):
399
520
  "function delete is deprecated and will be removed in rapyer 1.2.0, use adelete instead"
400
521
  )
401
522
  async def delete(self):
402
- return await self.adelete()
523
+ return await self.adelete() # pragma: no cover
403
524
 
404
525
  async def adelete(self):
405
526
  if self.is_inner_model():
@@ -419,15 +540,17 @@ class AtomicRedisModel(BaseModel):
419
540
  )
420
541
  async def lock_from_key(
421
542
  cls, key: str, action: str = "default", save_at_end: bool = False
422
- ) -> AsyncGenerator[Self, None]:
423
- async with cls.alock_from_key(key, action, save_at_end) as redis_model:
424
- yield redis_model
543
+ ) -> AbstractAsyncContextManager[Self]:
544
+ async with cls.alock_from_key( # pragma: no cover
545
+ key, action, save_at_end # pragma: no cover
546
+ ) as redis_model: # pragma: no cover
547
+ yield redis_model # pragma: no cover
425
548
 
426
549
  @classmethod
427
550
  @contextlib.asynccontextmanager
428
551
  async def alock_from_key(
429
552
  cls, key: str, action: str = "default", save_at_end: bool = False
430
- ) -> AsyncGenerator[Self, None]:
553
+ ) -> AbstractAsyncContextManager[Self]:
431
554
  async with acquire_lock(cls.Meta.redis, f"{key}/{action}"):
432
555
  redis_model = await cls.aget(key)
433
556
  yield redis_model
@@ -440,14 +563,16 @@ class AtomicRedisModel(BaseModel):
440
563
  )
441
564
  async def lock(
442
565
  self, action: str = "default", save_at_end: bool = False
443
- ) -> AsyncGenerator[Self, None]:
444
- async with self.alock_from_key(self.key, action, save_at_end) as redis_model:
445
- yield redis_model
566
+ ) -> AbstractAsyncContextManager[Self]:
567
+ async with self.alock_from_key( # pragma: no cover
568
+ self.key, action, save_at_end # pragma: no cover
569
+ ) as redis_model: # pragma: no cover
570
+ yield redis_model # pragma: no cover
446
571
 
447
572
  @contextlib.asynccontextmanager
448
573
  async def alock(
449
574
  self, action: str = "default", save_at_end: bool = False
450
- ) -> AsyncGenerator[Self, None]:
575
+ ) -> AbstractAsyncContextManager[Self]:
451
576
  async with self.alock_from_key(self.key, action, save_at_end) as redis_model:
452
577
  unset_fields = {
453
578
  k: redis_model.__dict__[k] for k in redis_model.model_fields_set
@@ -461,22 +586,24 @@ class AtomicRedisModel(BaseModel):
461
586
  )
462
587
  async def pipeline(
463
588
  self, ignore_if_deleted: bool = False
464
- ) -> AsyncGenerator[Self, None]:
465
- async with self.apipeline(ignore_if_deleted=ignore_if_deleted) as redis_model:
466
- yield redis_model
589
+ ) -> AbstractAsyncContextManager[Self]:
590
+ async with self.apipeline( # pragma: no cover
591
+ ignore_if_deleted=ignore_if_deleted # pragma: no cover
592
+ ) as redis_model: # pragma: no cover
593
+ yield redis_model # pragma: no cover
467
594
 
468
595
  @contextlib.asynccontextmanager
469
596
  async def apipeline(
470
597
  self, ignore_if_deleted: bool = False
471
- ) -> AsyncGenerator[Self, None]:
472
- async with self.Meta.redis.pipeline() as pipe:
598
+ ) -> AbstractAsyncContextManager[Self]:
599
+ async with self.Meta.redis.pipeline(transaction=True) as pipe:
473
600
  try:
474
601
  redis_model = await self.__class__.aget(self.key)
475
602
  unset_fields = {
476
603
  k: redis_model.__dict__[k] for k in redis_model.model_fields_set
477
604
  }
478
605
  self.__dict__.update(unset_fields)
479
- except (TypeError, IndexError):
606
+ except (TypeError, KeyNotFound):
480
607
  if ignore_if_deleted:
481
608
  redis_model = self
482
609
  else:
@@ -484,7 +611,40 @@ class AtomicRedisModel(BaseModel):
484
611
  _context_var.set(pipe)
485
612
  _context_xx_pipe.set(ignore_if_deleted)
486
613
  yield redis_model
487
- await pipe.execute()
614
+ commands_backup = list(pipe.command_stack)
615
+ noscript_on_first_attempt = False
616
+ noscript_on_retry = False
617
+
618
+ try:
619
+ await pipe.execute()
620
+ except NoScriptError:
621
+ noscript_on_first_attempt = True
622
+
623
+ if noscript_on_first_attempt:
624
+ await handle_noscript_error(self.Meta.redis)
625
+ evalsha_commands = [
626
+ (args, options)
627
+ for args, options in commands_backup
628
+ if args[0] == "EVALSHA"
629
+ ]
630
+ # Retry execute the pipeline actions
631
+ async with self.Meta.redis.pipeline(transaction=True) as retry_pipe:
632
+ for args, options in evalsha_commands:
633
+ retry_pipe.execute_command(*args, **options)
634
+ try:
635
+ await retry_pipe.execute()
636
+ except NoScriptError:
637
+ noscript_on_retry = True
638
+
639
+ if noscript_on_retry:
640
+ raise PersistentNoScriptError(
641
+ "NOSCRIPT error persisted after re-registering scripts. "
642
+ "This indicates a server-side problem with Redis."
643
+ )
644
+
645
+ await refresh_ttl_if_needed(
646
+ self.Meta.redis, self.key, self.Meta.ttl, self.Meta.refresh_ttl
647
+ )
488
648
  _context_var.set(None)
489
649
  _context_xx_pipe.set(False)
490
650
 
@@ -530,13 +690,15 @@ REDIS_MODELS: list[type[AtomicRedisModel]] = []
530
690
  "get function is deprecated and will be removed in rapyer 1.2.0, use aget instead"
531
691
  )
532
692
  async def get(redis_key: str) -> AtomicRedisModel:
533
- return await aget(redis_key)
693
+ return await aget(redis_key) # pragma: no cover
534
694
 
535
695
 
536
696
  async def aget(redis_key: str) -> AtomicRedisModel:
537
697
  redis_model_mapping = {klass.__name__: klass for klass in REDIS_MODELS}
538
698
  class_name = redis_key.split(":")[0]
539
699
  klass = redis_model_mapping.get(class_name)
700
+ if klass is None:
701
+ raise KeyNotFound(f"{redis_key} is missing in redis")
540
702
  return await klass.aget(redis_key)
541
703
 
542
704
 
@@ -550,3 +712,17 @@ async def ainsert(*models: Unpack[AtomicRedisModel]) -> list[AtomicRedisModel]:
550
712
  pipe.json().set(model.key, model.json_path, model.redis_dump())
551
713
  await pipe.execute()
552
714
  return models
715
+
716
+
717
+ @contextlib.asynccontextmanager
718
+ async def alock_from_key(
719
+ key: str, action: str = "default", save_at_end: bool = False
720
+ ) -> AbstractAsyncContextManager[AtomicRedisModel | None]:
721
+ async with acquire_lock(AtomicRedisModel.Meta.redis, f"{key}/{action}"):
722
+ try:
723
+ redis_model = await aget(key)
724
+ except KeyNotFound:
725
+ redis_model = None
726
+ yield redis_model
727
+ if save_at_end and redis_model is not None:
728
+ await redis_model.asave()
@@ -21,3 +21,10 @@ class RedisConfig:
21
21
  )
22
22
  redis_type: dict[type, type] = dataclasses.field(default_factory=create_all_types)
23
23
  ttl: int | None = None
24
+ init_with_rapyer: bool = True
25
+ # Enable TTL refresh on read/write operations by default
26
+ refresh_ttl: bool = True
27
+ # If True, all non-Redis-supported fields are treated as SafeLoad
28
+ safe_load_all: bool = False
29
+ # If True, use JSON serialization for fields that support it instead of pickle
30
+ prefer_normal_json_dump: bool = False