rapyer 1.1.7__tar.gz → 1.2.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {rapyer-1.1.7 → rapyer-1.2.1}/PKG-INFO +1 -1
- {rapyer-1.1.7 → rapyer-1.2.1}/pyproject.toml +1 -1
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/__init__.py +2 -2
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/base.py +79 -172
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/context.py +0 -3
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/__init__.py +6 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/constants.py +5 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/loader.py +24 -3
- rapyer-1.2.1/rapyer/scripts/lua/dict/pop.lua +13 -0
- rapyer-1.2.1/rapyer/scripts/lua/dict/popitem.lua +29 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/registry.py +44 -7
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/types/base.py +0 -13
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/types/dct.py +24 -83
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/types/integer.py +0 -7
- rapyer-1.2.1/rapyer/utils/__init__.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/README.md +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/config.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/errors/__init__.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/errors/base.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/fields/__init__.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/fields/expression.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/fields/index.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/fields/key.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/fields/safe_load.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/init.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/links.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/lua/__init__.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/lua/datetime/__init__.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/lua/datetime/add.lua +0 -0
- {rapyer-1.1.7/rapyer/scripts/lua/list → rapyer-1.2.1/rapyer/scripts/lua/dict}/__init__.py +0 -0
- {rapyer-1.1.7/rapyer/scripts/lua/numeric → rapyer-1.2.1/rapyer/scripts/lua/list}/__init__.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/lua/list/remove_range.lua +0 -0
- {rapyer-1.1.7/rapyer/scripts/lua/string → rapyer-1.2.1/rapyer/scripts/lua/numeric}/__init__.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/lua/numeric/floordiv.lua +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/lua/numeric/mod.lua +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/lua/numeric/mul.lua +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/lua/numeric/pow.lua +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/lua/numeric/pow_float.lua +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/lua/numeric/truediv.lua +0 -0
- {rapyer-1.1.7/rapyer/utils → rapyer-1.2.1/rapyer/scripts/lua/string}/__init__.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/lua/string/append.lua +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/scripts/lua/string/mul.lua +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/types/__init__.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/types/byte.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/types/convert.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/types/datetime.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/types/float.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/types/init.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/types/lst.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/types/string.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/typing_support.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/utils/annotation.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/utils/fields.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/utils/pythonic.py +0 -0
- {rapyer-1.1.7 → rapyer-1.2.1}/rapyer/utils/redis.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: rapyer
|
|
3
|
-
Version: 1.1
|
|
3
|
+
Version: 1.2.1
|
|
4
4
|
Summary: Pydantic models with Redis as the backend
|
|
5
5
|
License: MIT
|
|
6
6
|
Keywords: redis,redis-json,pydantic,pydantic-v2,orm,database,async,nosql,cache,key-value,data-modeling,python,backend,storage,serialization,validation
|
|
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "rapyer"
|
|
7
|
-
version = "1.1
|
|
7
|
+
version = "1.2.1"
|
|
8
8
|
description = "Pydantic models with Redis as the backend"
|
|
9
9
|
authors = [{name = "YedidyaHKfir", email = "yedidyakfir@gmail.com"}]
|
|
10
10
|
readme = "README.md"
|
|
@@ -6,8 +6,8 @@ from rapyer.base import (
|
|
|
6
6
|
afind,
|
|
7
7
|
find_redis_models,
|
|
8
8
|
ainsert,
|
|
9
|
-
get,
|
|
10
9
|
alock_from_key,
|
|
10
|
+
apipeline,
|
|
11
11
|
)
|
|
12
12
|
from rapyer.init import init_rapyer, teardown_rapyer
|
|
13
13
|
|
|
@@ -17,8 +17,8 @@ __all__ = [
|
|
|
17
17
|
"teardown_rapyer",
|
|
18
18
|
"aget",
|
|
19
19
|
"afind",
|
|
20
|
-
"get",
|
|
21
20
|
"find_redis_models",
|
|
22
21
|
"ainsert",
|
|
23
22
|
"alock_from_key",
|
|
23
|
+
"apipeline",
|
|
24
24
|
]
|
|
@@ -18,13 +18,10 @@ from pydantic import (
|
|
|
18
18
|
ValidationError,
|
|
19
19
|
)
|
|
20
20
|
from pydantic_core.core_schema import FieldSerializationInfo, ValidationInfo
|
|
21
|
-
from redis.
|
|
22
|
-
from redis.commands.search.query import Query
|
|
23
|
-
from redis.exceptions import NoScriptError, ResponseError
|
|
24
|
-
from typing_extensions import deprecated
|
|
21
|
+
from redis.client import Pipeline
|
|
25
22
|
|
|
26
23
|
from rapyer.config import RedisConfig
|
|
27
|
-
from rapyer.context import _context_var
|
|
24
|
+
from rapyer.context import _context_var
|
|
28
25
|
from rapyer.errors.base import (
|
|
29
26
|
KeyNotFound,
|
|
30
27
|
PersistentNoScriptError,
|
|
@@ -37,7 +34,7 @@ from rapyer.fields.index import IndexAnnotation
|
|
|
37
34
|
from rapyer.fields.key import KeyAnnotation
|
|
38
35
|
from rapyer.fields.safe_load import SafeLoadAnnotation
|
|
39
36
|
from rapyer.links import REDIS_SUPPORTED_LINK
|
|
40
|
-
from rapyer.scripts import
|
|
37
|
+
from rapyer.scripts import registry as scripts_registry
|
|
41
38
|
from rapyer.types.base import RedisType, REDIS_DUMP_FLAG_NAME, FAILED_FIELDS_KEY
|
|
42
39
|
from rapyer.types.convert import RedisConverter
|
|
43
40
|
from rapyer.typing_support import Self, Unpack
|
|
@@ -57,6 +54,9 @@ from rapyer.utils.redis import (
|
|
|
57
54
|
acquire_lock,
|
|
58
55
|
update_keys_in_pipeline,
|
|
59
56
|
)
|
|
57
|
+
from redis.commands.search.index_definition import IndexDefinition, IndexType
|
|
58
|
+
from redis.commands.search.query import Query
|
|
59
|
+
from redis.exceptions import NoScriptError, ResponseError
|
|
60
60
|
|
|
61
61
|
logger = logging.getLogger("rapyer")
|
|
62
62
|
|
|
@@ -104,25 +104,6 @@ def make_pickle_field_serializer(
|
|
|
104
104
|
return pickle_field_serializer, pickle_field_validator
|
|
105
105
|
|
|
106
106
|
|
|
107
|
-
# TODO: Remove in next major version (2.0) - backward compatibility for pickled data
|
|
108
|
-
# This validator handles loading old pickled data for fields that are now JSON-serializable.
|
|
109
|
-
# In 2.0, remove this function and the validator registration in __init_subclass__.
|
|
110
|
-
def make_backward_compat_validator(field: str):
|
|
111
|
-
@field_validator(field, mode="before")
|
|
112
|
-
def backward_compat_validator(v, info: ValidationInfo):
|
|
113
|
-
ctx = info.context or {}
|
|
114
|
-
should_deserialize_redis = ctx.get(REDIS_DUMP_FLAG_NAME, False)
|
|
115
|
-
if should_deserialize_redis and isinstance(v, str):
|
|
116
|
-
try:
|
|
117
|
-
return pickle.loads(base64.b64decode(v))
|
|
118
|
-
except Exception:
|
|
119
|
-
pass
|
|
120
|
-
return v
|
|
121
|
-
|
|
122
|
-
backward_compat_validator.__name__ = f"__backward_compat_{field}"
|
|
123
|
-
return backward_compat_validator
|
|
124
|
-
|
|
125
|
-
|
|
126
107
|
class AtomicRedisModel(BaseModel):
|
|
127
108
|
_pk: str = PrivateAttr(default_factory=lambda: str(uuid.uuid4()))
|
|
128
109
|
_base_model_link: Self | RedisType = PrivateAttr(default=None)
|
|
@@ -170,6 +151,10 @@ class AtomicRedisModel(BaseModel):
|
|
|
170
151
|
field_path = self.field_path
|
|
171
152
|
return f"${field_path}" if field_path else "$"
|
|
172
153
|
|
|
154
|
+
@property
|
|
155
|
+
def client(self):
|
|
156
|
+
return _context_var.get() or self.Meta.redis
|
|
157
|
+
|
|
173
158
|
@classmethod
|
|
174
159
|
def should_refresh(cls):
|
|
175
160
|
return cls.Meta.refresh_ttl and cls.Meta.ttl is not None
|
|
@@ -302,11 +287,6 @@ class AtomicRedisModel(BaseModel):
|
|
|
302
287
|
)
|
|
303
288
|
setattr(cls, serializer.__name__, serializer)
|
|
304
289
|
setattr(cls, validator.__name__, validator)
|
|
305
|
-
else:
|
|
306
|
-
# TODO: Remove in 2.0 - backward compatibility for old pickled data
|
|
307
|
-
validator = make_backward_compat_validator(attr_name)
|
|
308
|
-
setattr(cls, validator.__name__, validator)
|
|
309
|
-
continue
|
|
310
290
|
|
|
311
291
|
# Update the redis model list for initialization
|
|
312
292
|
# Skip dynamically created classes from type conversion
|
|
@@ -335,18 +315,12 @@ class AtomicRedisModel(BaseModel):
|
|
|
335
315
|
def is_inner_model(self) -> bool:
|
|
336
316
|
return bool(self.field_name)
|
|
337
317
|
|
|
338
|
-
@deprecated(
|
|
339
|
-
f"save function is deprecated and will become sync function in rapyer 1.2.0, use asave() instead"
|
|
340
|
-
)
|
|
341
|
-
async def save(self):
|
|
342
|
-
return await self.asave() # pragma: no cover
|
|
343
|
-
|
|
344
318
|
async def asave(self) -> Self:
|
|
345
319
|
model_dump = self.redis_dump()
|
|
346
|
-
await self.
|
|
320
|
+
await self.client.json().set(self.key, self.json_path, model_dump)
|
|
347
321
|
if self.Meta.ttl is not None:
|
|
348
322
|
nx = not self.Meta.refresh_ttl
|
|
349
|
-
await self.
|
|
323
|
+
await self.client.expire(self.key, self.Meta.ttl, nx=nx)
|
|
350
324
|
return self
|
|
351
325
|
|
|
352
326
|
def redis_dump(self):
|
|
@@ -355,12 +329,6 @@ class AtomicRedisModel(BaseModel):
|
|
|
355
329
|
def redis_dump_json(self):
|
|
356
330
|
return self.model_dump_json(context={REDIS_DUMP_FLAG_NAME: True})
|
|
357
331
|
|
|
358
|
-
@deprecated(
|
|
359
|
-
"duplicate function is deprecated and will be removed in rapyer 1.2.0, use aduplicate instead"
|
|
360
|
-
)
|
|
361
|
-
async def duplicate(self) -> Self:
|
|
362
|
-
return await self.aduplicate() # pragma: no cover
|
|
363
|
-
|
|
364
332
|
async def aduplicate(self) -> Self:
|
|
365
333
|
if self.is_inner_model():
|
|
366
334
|
raise RuntimeError("Can only duplicate from top level model")
|
|
@@ -369,12 +337,6 @@ class AtomicRedisModel(BaseModel):
|
|
|
369
337
|
await duplicated.asave()
|
|
370
338
|
return duplicated
|
|
371
339
|
|
|
372
|
-
@deprecated(
|
|
373
|
-
"duplicate_many function is deprecated and will be removed in rapyer 1.2.0, use aduplicate_many instead"
|
|
374
|
-
)
|
|
375
|
-
async def duplicate_many(self, num: int) -> list[Self]:
|
|
376
|
-
return await self.aduplicate_many(num) # pragma: no cover
|
|
377
|
-
|
|
378
340
|
async def aduplicate_many(self, num: int) -> list[Self]:
|
|
379
341
|
if self.is_inner_model():
|
|
380
342
|
raise RuntimeError("Can only duplicate from top level model")
|
|
@@ -401,7 +363,7 @@ class AtomicRedisModel(BaseModel):
|
|
|
401
363
|
for field_name in kwargs.keys()
|
|
402
364
|
}
|
|
403
365
|
|
|
404
|
-
async with self.Meta.redis.pipeline() as pipe:
|
|
366
|
+
async with self.Meta.redis.pipeline(transaction=True) as pipe:
|
|
405
367
|
update_keys_in_pipeline(pipe, self.key, **json_path_kwargs)
|
|
406
368
|
await pipe.execute()
|
|
407
369
|
await self.refresh_ttl_if_needed()
|
|
@@ -409,14 +371,11 @@ class AtomicRedisModel(BaseModel):
|
|
|
409
371
|
async def aset_ttl(self, ttl: int) -> None:
|
|
410
372
|
if self.is_inner_model():
|
|
411
373
|
raise RuntimeError("Can only set TTL from top level model")
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
)
|
|
418
|
-
async def get(cls, key: str) -> Self:
|
|
419
|
-
return await cls.aget(key) # pragma: no cover
|
|
374
|
+
pipeline = _context_var.get()
|
|
375
|
+
if pipeline is not None:
|
|
376
|
+
pipeline.expire(self.key, ttl)
|
|
377
|
+
else:
|
|
378
|
+
await self.Meta.redis.expire(self.key, ttl)
|
|
420
379
|
|
|
421
380
|
@classmethod
|
|
422
381
|
async def aget(cls, key: str) -> Self:
|
|
@@ -435,12 +394,6 @@ class AtomicRedisModel(BaseModel):
|
|
|
435
394
|
await cls.Meta.redis.expire(key, cls.Meta.ttl)
|
|
436
395
|
return instance
|
|
437
396
|
|
|
438
|
-
@deprecated(
|
|
439
|
-
"load function is deprecated and will be removed in rapyer 1.2.0, use aload() instead"
|
|
440
|
-
)
|
|
441
|
-
async def load(self):
|
|
442
|
-
return await self.aload() # pragma: no cover
|
|
443
|
-
|
|
444
397
|
async def aload(self) -> Self:
|
|
445
398
|
model_dump = await self.Meta.redis.json().get(self.key, self.json_path)
|
|
446
399
|
if not model_dump:
|
|
@@ -543,24 +496,11 @@ class AtomicRedisModel(BaseModel):
|
|
|
543
496
|
pipe.expire(model.key, cls.Meta.ttl)
|
|
544
497
|
await pipe.execute()
|
|
545
498
|
|
|
546
|
-
@classmethod
|
|
547
|
-
@deprecated(
|
|
548
|
-
"function delete is deprecated and will be removed in rapyer 1.2.0, use adelete instead"
|
|
549
|
-
)
|
|
550
|
-
async def delete_by_key(cls, key: str) -> bool:
|
|
551
|
-
return await cls.adelete_by_key(key) # pragma: no cover
|
|
552
|
-
|
|
553
499
|
@classmethod
|
|
554
500
|
async def adelete_by_key(cls, key: str) -> bool:
|
|
555
501
|
client = _context_var.get() or cls.Meta.redis
|
|
556
502
|
return await client.delete(key) == 1
|
|
557
503
|
|
|
558
|
-
@deprecated(
|
|
559
|
-
"function delete is deprecated and will be removed in rapyer 1.2.0, use adelete instead"
|
|
560
|
-
)
|
|
561
|
-
async def delete(self):
|
|
562
|
-
return await self.adelete() # pragma: no cover
|
|
563
|
-
|
|
564
504
|
async def adelete(self):
|
|
565
505
|
if self.is_inner_model():
|
|
566
506
|
raise RuntimeError("Can only delete from inner model")
|
|
@@ -572,19 +512,6 @@ class AtomicRedisModel(BaseModel):
|
|
|
572
512
|
*[model if isinstance(model, str) else model.key for model in args]
|
|
573
513
|
)
|
|
574
514
|
|
|
575
|
-
@classmethod
|
|
576
|
-
@contextlib.asynccontextmanager
|
|
577
|
-
@deprecated(
|
|
578
|
-
"lock_from_key function is deprecated and will be removed in rapyer 1.2.0, use alock_from_key instead"
|
|
579
|
-
)
|
|
580
|
-
async def lock_from_key(
|
|
581
|
-
cls, key: str, action: str = "default", save_at_end: bool = False
|
|
582
|
-
) -> AbstractAsyncContextManager[Self]:
|
|
583
|
-
async with cls.alock_from_key( # pragma: no cover
|
|
584
|
-
key, action, save_at_end # pragma: no cover
|
|
585
|
-
) as redis_model: # pragma: no cover
|
|
586
|
-
yield redis_model # pragma: no cover
|
|
587
|
-
|
|
588
515
|
@classmethod
|
|
589
516
|
@contextlib.asynccontextmanager
|
|
590
517
|
async def alock_from_key(
|
|
@@ -596,18 +523,6 @@ class AtomicRedisModel(BaseModel):
|
|
|
596
523
|
if save_at_end:
|
|
597
524
|
await redis_model.asave()
|
|
598
525
|
|
|
599
|
-
@contextlib.asynccontextmanager
|
|
600
|
-
@deprecated(
|
|
601
|
-
"lock function is deprecated and will be removed in rapyer 1.2.0, use alock instead"
|
|
602
|
-
)
|
|
603
|
-
async def lock(
|
|
604
|
-
self, action: str = "default", save_at_end: bool = False
|
|
605
|
-
) -> AbstractAsyncContextManager[Self]:
|
|
606
|
-
async with self.alock_from_key( # pragma: no cover
|
|
607
|
-
self.key, action, save_at_end # pragma: no cover
|
|
608
|
-
) as redis_model: # pragma: no cover
|
|
609
|
-
yield redis_model # pragma: no cover
|
|
610
|
-
|
|
611
526
|
@contextlib.asynccontextmanager
|
|
612
527
|
async def alock(
|
|
613
528
|
self, action: str = "default", save_at_end: bool = False
|
|
@@ -619,23 +534,13 @@ class AtomicRedisModel(BaseModel):
|
|
|
619
534
|
self.__dict__.update(unset_fields)
|
|
620
535
|
yield redis_model
|
|
621
536
|
|
|
622
|
-
@contextlib.asynccontextmanager
|
|
623
|
-
@deprecated(
|
|
624
|
-
"pipeline function is deprecated and will be removed in rapyer 1.2.0, use apipeline instead"
|
|
625
|
-
)
|
|
626
|
-
async def pipeline(
|
|
627
|
-
self, ignore_if_deleted: bool = False
|
|
628
|
-
) -> AbstractAsyncContextManager[Self]:
|
|
629
|
-
async with self.apipeline( # pragma: no cover
|
|
630
|
-
ignore_if_deleted=ignore_if_deleted # pragma: no cover
|
|
631
|
-
) as redis_model: # pragma: no cover
|
|
632
|
-
yield redis_model # pragma: no cover
|
|
633
|
-
|
|
634
537
|
@contextlib.asynccontextmanager
|
|
635
538
|
async def apipeline(
|
|
636
|
-
self,
|
|
539
|
+
self, ignore_redis_error: bool = False
|
|
637
540
|
) -> AbstractAsyncContextManager[Self]:
|
|
638
|
-
async with
|
|
541
|
+
async with apipeline(
|
|
542
|
+
ignore_redis_error=ignore_redis_error, _meta=self.Meta
|
|
543
|
+
) as pipe:
|
|
639
544
|
try:
|
|
640
545
|
redis_model = await self.__class__.aget(self.key)
|
|
641
546
|
unset_fields = {
|
|
@@ -643,61 +548,17 @@ class AtomicRedisModel(BaseModel):
|
|
|
643
548
|
}
|
|
644
549
|
self.__dict__.update(unset_fields)
|
|
645
550
|
except (TypeError, KeyNotFound):
|
|
646
|
-
if
|
|
551
|
+
if ignore_redis_error:
|
|
647
552
|
redis_model = self
|
|
648
553
|
else:
|
|
649
554
|
raise
|
|
650
|
-
_context_var.set(pipe)
|
|
651
|
-
_context_xx_pipe.set(ignore_if_deleted)
|
|
652
555
|
yield redis_model
|
|
653
|
-
commands_backup = list(pipe.command_stack)
|
|
654
|
-
noscript_on_first_attempt = False
|
|
655
|
-
noscript_on_retry = False
|
|
656
556
|
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
pipe.expire(self.key, self.Meta.ttl)
|
|
660
|
-
await pipe.execute()
|
|
661
|
-
except NoScriptError:
|
|
662
|
-
noscript_on_first_attempt = True
|
|
663
|
-
except ResponseError as exc:
|
|
664
|
-
if ignore_if_deleted:
|
|
665
|
-
logger.warning(
|
|
666
|
-
"Swallowed ResponseError during pipeline.execute() with "
|
|
667
|
-
"ignore_if_deleted=True for key %r: %s",
|
|
668
|
-
getattr(self, "key", None),
|
|
669
|
-
exc,
|
|
670
|
-
)
|
|
671
|
-
else:
|
|
672
|
-
raise
|
|
673
|
-
|
|
674
|
-
if noscript_on_first_attempt:
|
|
675
|
-
await handle_noscript_error(self.Meta.redis)
|
|
676
|
-
evalsha_commands = [
|
|
677
|
-
(args, options)
|
|
678
|
-
for args, options in commands_backup
|
|
679
|
-
if args[0] == "EVALSHA"
|
|
680
|
-
]
|
|
681
|
-
# Retry execute the pipeline actions
|
|
682
|
-
async with self.Meta.redis.pipeline(transaction=True) as retry_pipe:
|
|
683
|
-
for args, options in evalsha_commands:
|
|
684
|
-
retry_pipe.execute_command(*args, **options)
|
|
685
|
-
try:
|
|
686
|
-
await retry_pipe.execute()
|
|
687
|
-
except NoScriptError:
|
|
688
|
-
noscript_on_retry = True
|
|
689
|
-
|
|
690
|
-
if noscript_on_retry:
|
|
691
|
-
raise PersistentNoScriptError(
|
|
692
|
-
"NOSCRIPT error persisted after re-registering scripts. "
|
|
693
|
-
"This indicates a server-side problem with Redis."
|
|
694
|
-
)
|
|
695
|
-
|
|
696
|
-
_context_var.set(None)
|
|
697
|
-
_context_xx_pipe.set(False)
|
|
557
|
+
if self.should_refresh():
|
|
558
|
+
pipe.expire(self.key, self.Meta.ttl)
|
|
698
559
|
|
|
699
560
|
def __setattr__(self, name: str, value: Any) -> None:
|
|
700
|
-
if name not in self.
|
|
561
|
+
if name not in self.__class__.model_fields or value is None:
|
|
701
562
|
super().__setattr__(name, value)
|
|
702
563
|
return
|
|
703
564
|
|
|
@@ -744,13 +605,6 @@ class AtomicRedisModel(BaseModel):
|
|
|
744
605
|
REDIS_MODELS: list[type[AtomicRedisModel]] = []
|
|
745
606
|
|
|
746
607
|
|
|
747
|
-
@deprecated(
|
|
748
|
-
"get function is deprecated and will be removed in rapyer 1.2.0, use aget instead"
|
|
749
|
-
)
|
|
750
|
-
async def get(redis_key: str) -> AtomicRedisModel:
|
|
751
|
-
return await aget(redis_key) # pragma: no cover
|
|
752
|
-
|
|
753
|
-
|
|
754
608
|
async def aget(redis_key: str) -> AtomicRedisModel:
|
|
755
609
|
redis_model_mapping = {klass.__name__: klass for klass in REDIS_MODELS}
|
|
756
610
|
class_name = redis_key.split(":")[0]
|
|
@@ -832,3 +686,56 @@ async def alock_from_key(
|
|
|
832
686
|
yield redis_model
|
|
833
687
|
if save_at_end and redis_model is not None:
|
|
834
688
|
await redis_model.asave()
|
|
689
|
+
|
|
690
|
+
|
|
691
|
+
@contextlib.asynccontextmanager
|
|
692
|
+
async def apipeline(
|
|
693
|
+
ignore_redis_error: bool = False, _meta: RedisConfig = None
|
|
694
|
+
) -> AbstractAsyncContextManager[Pipeline]:
|
|
695
|
+
_meta = _meta or AtomicRedisModel.Meta
|
|
696
|
+
redis = _meta.redis
|
|
697
|
+
async with redis.pipeline(transaction=True) as pipe:
|
|
698
|
+
pipe_prev = _context_var.set(pipe)
|
|
699
|
+
try:
|
|
700
|
+
yield pipe
|
|
701
|
+
commands_backup = list(pipe.command_stack)
|
|
702
|
+
noscript_on_first_attempt = False
|
|
703
|
+
noscript_on_retry = False
|
|
704
|
+
|
|
705
|
+
try:
|
|
706
|
+
await pipe.execute()
|
|
707
|
+
except NoScriptError:
|
|
708
|
+
noscript_on_first_attempt = True
|
|
709
|
+
except ResponseError as exc:
|
|
710
|
+
if ignore_redis_error:
|
|
711
|
+
logger.warning(
|
|
712
|
+
"Swallowed ResponseError during pipeline.execute() with "
|
|
713
|
+
"ignore_redis_error=True: %s",
|
|
714
|
+
exc,
|
|
715
|
+
)
|
|
716
|
+
else:
|
|
717
|
+
raise
|
|
718
|
+
|
|
719
|
+
if noscript_on_first_attempt:
|
|
720
|
+
await scripts_registry.handle_noscript_error(redis, _meta)
|
|
721
|
+
evalsha_commands = [
|
|
722
|
+
(args, options)
|
|
723
|
+
for args, options in commands_backup
|
|
724
|
+
if args[0] == "EVALSHA"
|
|
725
|
+
]
|
|
726
|
+
# Retry execute the pipeline actions
|
|
727
|
+
async with redis.pipeline(transaction=True) as retry_pipe:
|
|
728
|
+
for args, options in evalsha_commands:
|
|
729
|
+
retry_pipe.execute_command(*args, **options)
|
|
730
|
+
try:
|
|
731
|
+
await retry_pipe.execute()
|
|
732
|
+
except NoScriptError:
|
|
733
|
+
noscript_on_retry = True
|
|
734
|
+
|
|
735
|
+
if noscript_on_retry:
|
|
736
|
+
raise PersistentNoScriptError(
|
|
737
|
+
"NOSCRIPT error persisted after re-registering scripts. "
|
|
738
|
+
"This indicates a server-side problem with Redis."
|
|
739
|
+
)
|
|
740
|
+
finally:
|
|
741
|
+
_context_var.reset(pipe_prev)
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
from rapyer.scripts.constants import (
|
|
2
2
|
DATETIME_ADD_SCRIPT_NAME,
|
|
3
|
+
DICT_POP_SCRIPT_NAME,
|
|
4
|
+
DICT_POPITEM_SCRIPT_NAME,
|
|
3
5
|
NUM_FLOORDIV_SCRIPT_NAME,
|
|
4
6
|
NUM_MOD_SCRIPT_NAME,
|
|
5
7
|
NUM_MUL_SCRIPT_NAME,
|
|
@@ -12,6 +14,7 @@ from rapyer.scripts.constants import (
|
|
|
12
14
|
)
|
|
13
15
|
from rapyer.scripts.registry import (
|
|
14
16
|
_REGISTERED_SCRIPT_SHAS,
|
|
17
|
+
arun_sha,
|
|
15
18
|
get_scripts,
|
|
16
19
|
get_scripts_fakeredis,
|
|
17
20
|
handle_noscript_error,
|
|
@@ -24,6 +27,8 @@ SCRIPTS_FAKEREDIS = get_scripts_fakeredis()
|
|
|
24
27
|
|
|
25
28
|
__all__ = [
|
|
26
29
|
"DATETIME_ADD_SCRIPT_NAME",
|
|
30
|
+
"DICT_POP_SCRIPT_NAME",
|
|
31
|
+
"DICT_POPITEM_SCRIPT_NAME",
|
|
27
32
|
"NUM_FLOORDIV_SCRIPT_NAME",
|
|
28
33
|
"NUM_MOD_SCRIPT_NAME",
|
|
29
34
|
"NUM_MUL_SCRIPT_NAME",
|
|
@@ -35,6 +40,7 @@ __all__ = [
|
|
|
35
40
|
"SCRIPTS_FAKEREDIS",
|
|
36
41
|
"STR_APPEND_SCRIPT_NAME",
|
|
37
42
|
"STR_MUL_SCRIPT_NAME",
|
|
43
|
+
"arun_sha",
|
|
38
44
|
"handle_noscript_error",
|
|
39
45
|
"register_scripts",
|
|
40
46
|
"run_sha",
|
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
REDIS_VARIANT = "redis"
|
|
2
|
+
FAKEREDIS_VARIANT = "fakeredis"
|
|
3
|
+
|
|
1
4
|
REMOVE_RANGE_SCRIPT_NAME = "remove_range"
|
|
2
5
|
NUM_MUL_SCRIPT_NAME = "num_mul"
|
|
3
6
|
NUM_FLOORDIV_SCRIPT_NAME = "num_floordiv"
|
|
@@ -8,3 +11,5 @@ NUM_TRUEDIV_SCRIPT_NAME = "num_truediv"
|
|
|
8
11
|
STR_APPEND_SCRIPT_NAME = "str_append"
|
|
9
12
|
STR_MUL_SCRIPT_NAME = "str_mul"
|
|
10
13
|
DATETIME_ADD_SCRIPT_NAME = "datetime_add"
|
|
14
|
+
DICT_POP_SCRIPT_NAME = "dict_pop"
|
|
15
|
+
DICT_POPITEM_SCRIPT_NAME = "dict_popitem"
|
|
@@ -1,19 +1,40 @@
|
|
|
1
1
|
from functools import lru_cache
|
|
2
2
|
from importlib import resources
|
|
3
3
|
|
|
4
|
+
from rapyer.scripts.constants import FAKEREDIS_VARIANT, REDIS_VARIANT
|
|
4
5
|
|
|
5
6
|
VARIANTS = {
|
|
6
|
-
|
|
7
|
+
REDIS_VARIANT: {
|
|
7
8
|
"EXTRACT_ARRAY": "local arr = cjson.decode(arr_json)[1]",
|
|
8
9
|
"EXTRACT_VALUE": "local value = tonumber(cjson.decode(current_json)[1])",
|
|
9
10
|
"EXTRACT_STR": "local value = cjson.decode(current_json)[1]",
|
|
10
11
|
"EXTRACT_DATETIME": "local value = cjson.decode(current_json)[1]",
|
|
12
|
+
"DICT_EXTRACT_VALUE": "local extracted = cjson.decode(value)[1]",
|
|
13
|
+
"DICT_EXTRACT_POPITEM": """local parsed = cjson.decode(value)
|
|
14
|
+
if type(parsed) == 'table' then
|
|
15
|
+
for _, v in pairs(parsed) do
|
|
16
|
+
extracted = v
|
|
17
|
+
break
|
|
18
|
+
end
|
|
19
|
+
else
|
|
20
|
+
extracted = parsed
|
|
21
|
+
end""",
|
|
11
22
|
},
|
|
12
|
-
|
|
23
|
+
FAKEREDIS_VARIANT: {
|
|
13
24
|
"EXTRACT_ARRAY": "local arr = cjson.decode(arr_json)",
|
|
14
25
|
"EXTRACT_VALUE": "local value = tonumber(cjson.decode(current_json)[1])",
|
|
15
26
|
"EXTRACT_STR": "local value = cjson.decode(current_json)[1]",
|
|
16
27
|
"EXTRACT_DATETIME": "local value = cjson.decode(current_json)[1]",
|
|
28
|
+
"DICT_EXTRACT_VALUE": "local extracted = cjson.decode(value)[1]",
|
|
29
|
+
"DICT_EXTRACT_POPITEM": """local parsed = cjson.decode(value)
|
|
30
|
+
if type(parsed) == 'table' then
|
|
31
|
+
for _, v in pairs(parsed) do
|
|
32
|
+
extracted = v
|
|
33
|
+
break
|
|
34
|
+
end
|
|
35
|
+
else
|
|
36
|
+
extracted = parsed
|
|
37
|
+
end""",
|
|
17
38
|
},
|
|
18
39
|
}
|
|
19
40
|
|
|
@@ -25,7 +46,7 @@ def _load_template(category: str, name: str) -> str:
|
|
|
25
46
|
return resources.files(package).joinpath(filename).read_text()
|
|
26
47
|
|
|
27
48
|
|
|
28
|
-
def load_script(category: str, name: str, variant: str =
|
|
49
|
+
def load_script(category: str, name: str, variant: str = REDIS_VARIANT) -> str:
|
|
29
50
|
template = _load_template(category, name)
|
|
30
51
|
replacements = VARIANTS[variant]
|
|
31
52
|
result = template
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
local key = KEYS[1]
|
|
2
|
+
local path = ARGV[1]
|
|
3
|
+
local target_key = ARGV[2]
|
|
4
|
+
|
|
5
|
+
local value = redis.call('JSON.GET', key, path .. '.' .. target_key)
|
|
6
|
+
|
|
7
|
+
if value and value ~= '[]' and value ~= 'null' then
|
|
8
|
+
redis.call('JSON.DEL', key, path .. '.' .. target_key)
|
|
9
|
+
--[[DICT_EXTRACT_VALUE]]
|
|
10
|
+
return extracted
|
|
11
|
+
else
|
|
12
|
+
return nil
|
|
13
|
+
end
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
local key = KEYS[1]
|
|
2
|
+
local path = ARGV[1]
|
|
3
|
+
|
|
4
|
+
local keys = redis.call('JSON.OBJKEYS', key, path)
|
|
5
|
+
|
|
6
|
+
if not keys or #keys == 0 then
|
|
7
|
+
return nil
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
if type(keys[1]) == 'table' then
|
|
11
|
+
keys = keys[1]
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
if not keys or #keys == 0 then
|
|
15
|
+
return nil
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
local first_key = tostring(keys[1])
|
|
19
|
+
local value = redis.call('JSON.GET', key, path .. '.' .. first_key)
|
|
20
|
+
|
|
21
|
+
if not value then
|
|
22
|
+
return nil
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
redis.call('JSON.DEL', key, path .. '.' .. first_key)
|
|
26
|
+
|
|
27
|
+
local extracted
|
|
28
|
+
--[[DICT_EXTRACT_POPITEM]]
|
|
29
|
+
return {first_key, extracted}
|
|
@@ -1,17 +1,27 @@
|
|
|
1
|
-
from
|
|
1
|
+
from typing import TYPE_CHECKING
|
|
2
|
+
|
|
3
|
+
from rapyer.errors import PersistentNoScriptError, ScriptsNotInitializedError
|
|
2
4
|
from rapyer.scripts.constants import (
|
|
3
5
|
DATETIME_ADD_SCRIPT_NAME,
|
|
6
|
+
DICT_POP_SCRIPT_NAME,
|
|
7
|
+
DICT_POPITEM_SCRIPT_NAME,
|
|
8
|
+
FAKEREDIS_VARIANT,
|
|
4
9
|
NUM_FLOORDIV_SCRIPT_NAME,
|
|
5
10
|
NUM_MOD_SCRIPT_NAME,
|
|
6
11
|
NUM_MUL_SCRIPT_NAME,
|
|
7
12
|
NUM_POW_FLOAT_SCRIPT_NAME,
|
|
8
13
|
NUM_POW_SCRIPT_NAME,
|
|
9
14
|
NUM_TRUEDIV_SCRIPT_NAME,
|
|
15
|
+
REDIS_VARIANT,
|
|
10
16
|
REMOVE_RANGE_SCRIPT_NAME,
|
|
11
17
|
STR_APPEND_SCRIPT_NAME,
|
|
12
18
|
STR_MUL_SCRIPT_NAME,
|
|
13
19
|
)
|
|
14
20
|
from rapyer.scripts.loader import load_script
|
|
21
|
+
from redis.exceptions import NoScriptError
|
|
22
|
+
|
|
23
|
+
if TYPE_CHECKING:
|
|
24
|
+
from rapyer.config import RedisConfig
|
|
15
25
|
|
|
16
26
|
SCRIPT_REGISTRY: list[tuple[str, str, str]] = [
|
|
17
27
|
("list", "remove_range", REMOVE_RANGE_SCRIPT_NAME),
|
|
@@ -24,6 +34,8 @@ SCRIPT_REGISTRY: list[tuple[str, str, str]] = [
|
|
|
24
34
|
("string", "append", STR_APPEND_SCRIPT_NAME),
|
|
25
35
|
("string", "mul", STR_MUL_SCRIPT_NAME),
|
|
26
36
|
("datetime", "add", DATETIME_ADD_SCRIPT_NAME),
|
|
37
|
+
("dict", "pop", DICT_POP_SCRIPT_NAME),
|
|
38
|
+
("dict", "popitem", DICT_POPITEM_SCRIPT_NAME),
|
|
27
39
|
]
|
|
28
40
|
|
|
29
41
|
_REGISTERED_SCRIPT_SHAS: dict[str, str] = {}
|
|
@@ -37,29 +49,54 @@ def _build_scripts(variant: str) -> dict[str, str]:
|
|
|
37
49
|
|
|
38
50
|
|
|
39
51
|
def get_scripts() -> dict[str, str]:
|
|
40
|
-
return _build_scripts(
|
|
52
|
+
return _build_scripts(REDIS_VARIANT)
|
|
41
53
|
|
|
42
54
|
|
|
43
55
|
def get_scripts_fakeredis() -> dict[str, str]:
|
|
44
|
-
return _build_scripts(
|
|
56
|
+
return _build_scripts(FAKEREDIS_VARIANT)
|
|
45
57
|
|
|
46
58
|
|
|
47
59
|
async def register_scripts(redis_client, is_fakeredis: bool = False) -> None:
|
|
48
|
-
variant =
|
|
60
|
+
variant = FAKEREDIS_VARIANT if is_fakeredis else REDIS_VARIANT
|
|
49
61
|
scripts = _build_scripts(variant)
|
|
50
62
|
for name, script_text in scripts.items():
|
|
51
63
|
sha = await redis_client.script_load(script_text)
|
|
52
64
|
_REGISTERED_SCRIPT_SHAS[name] = sha
|
|
53
65
|
|
|
54
66
|
|
|
55
|
-
def
|
|
67
|
+
def get_script(script_name: str):
|
|
56
68
|
sha = _REGISTERED_SCRIPT_SHAS.get(script_name)
|
|
57
69
|
if sha is None:
|
|
58
70
|
raise ScriptsNotInitializedError(
|
|
59
71
|
f"Script '{script_name}' not loaded. Did you forget to call init_rapyer()?"
|
|
60
72
|
)
|
|
73
|
+
return sha
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def run_sha(pipeline, script_name: str, keys: int, *args):
|
|
77
|
+
sha = get_script(script_name)
|
|
61
78
|
pipeline.evalsha(sha, keys, *args)
|
|
62
79
|
|
|
63
80
|
|
|
64
|
-
async def
|
|
65
|
-
|
|
81
|
+
async def arun_sha(
|
|
82
|
+
client, redis_config: "RedisConfig", script_name: str, keys: int, *args
|
|
83
|
+
):
|
|
84
|
+
sha = get_script(script_name)
|
|
85
|
+
try:
|
|
86
|
+
return await client.evalsha(sha, keys, *args)
|
|
87
|
+
except NoScriptError:
|
|
88
|
+
pass
|
|
89
|
+
|
|
90
|
+
await handle_noscript_error(client, redis_config)
|
|
91
|
+
sha = get_script(script_name)
|
|
92
|
+
try:
|
|
93
|
+
return await client.evalsha(sha, keys, *args)
|
|
94
|
+
except NoScriptError as e:
|
|
95
|
+
raise PersistentNoScriptError(
|
|
96
|
+
"NOSCRIPT error persisted after re-registering scripts. "
|
|
97
|
+
"This indicates a server-side problem with Redis."
|
|
98
|
+
) from e
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
async def handle_noscript_error(redis_client, redis_config: "RedisConfig"):
|
|
102
|
+
await register_scripts(redis_client, is_fakeredis=redis_config.is_fake_redis)
|
|
@@ -9,7 +9,6 @@ from pydantic import GetCoreSchemaHandler, TypeAdapter
|
|
|
9
9
|
from pydantic_core import core_schema
|
|
10
10
|
from pydantic_core.core_schema import ValidationInfo, CoreSchema, SerializationInfo
|
|
11
11
|
from redis.commands.search.field import TextField
|
|
12
|
-
from typing_extensions import deprecated
|
|
13
12
|
|
|
14
13
|
from rapyer.context import _context_var
|
|
15
14
|
from rapyer.errors.base import CantSerializeRedisValueError
|
|
@@ -74,12 +73,6 @@ class RedisType(ABC):
|
|
|
74
73
|
def json_field_path(self, field_name: str):
|
|
75
74
|
return f"${self.sub_field_path(field_name)}"
|
|
76
75
|
|
|
77
|
-
@deprecated(
|
|
78
|
-
f"save function is deprecated and will become sync function in rapyer 1.2.0, use asave() instead"
|
|
79
|
-
)
|
|
80
|
-
async def save(self):
|
|
81
|
-
return await self.asave() # pragma: no cover
|
|
82
|
-
|
|
83
76
|
async def asave(self) -> Self:
|
|
84
77
|
model_dump = self._adapter.dump_python(
|
|
85
78
|
self, mode="json", context={REDIS_DUMP_FLAG_NAME: True}
|
|
@@ -90,12 +83,6 @@ class RedisType(ABC):
|
|
|
90
83
|
await self.client.expire(self.key, self.Meta.ttl, nx=nx)
|
|
91
84
|
return self
|
|
92
85
|
|
|
93
|
-
@deprecated(
|
|
94
|
-
"load function is deprecated and will be removed in rapyer 1.2.0, use aload() instead"
|
|
95
|
-
)
|
|
96
|
-
async def load(self):
|
|
97
|
-
return await self.aload() # pragma: no cover
|
|
98
|
-
|
|
99
86
|
async def aload(self):
|
|
100
87
|
redis_value = await self.client.json().get(self.key, self.field_path)
|
|
101
88
|
if redis_value is None:
|
|
@@ -2,6 +2,7 @@ from typing import TypeVar, Generic, get_args, Any, TypeAlias, TYPE_CHECKING
|
|
|
2
2
|
|
|
3
3
|
from pydantic_core import core_schema
|
|
4
4
|
|
|
5
|
+
from rapyer.scripts import arun_sha, DICT_POP_SCRIPT_NAME, DICT_POPITEM_SCRIPT_NAME
|
|
5
6
|
from rapyer.types.base import (
|
|
6
7
|
GenericRedisType,
|
|
7
8
|
RedisType,
|
|
@@ -12,80 +13,6 @@ from rapyer.utils.redis import update_keys_in_pipeline
|
|
|
12
13
|
|
|
13
14
|
T = TypeVar("T")
|
|
14
15
|
|
|
15
|
-
# Redis Lua script for atomic get-and-delete operation
|
|
16
|
-
POP_SCRIPT = """
|
|
17
|
-
local key = KEYS[1]
|
|
18
|
-
local path = ARGV[1]
|
|
19
|
-
local target_key = ARGV[2]
|
|
20
|
-
|
|
21
|
-
-- Get the value from the JSON object
|
|
22
|
-
local value = redis.call('JSON.GET', key, path .. '.' .. target_key)
|
|
23
|
-
|
|
24
|
-
if value and value ~= '[]' and value ~= 'null' then
|
|
25
|
-
-- Delete the key from the JSON object
|
|
26
|
-
redis.call('JSON.DEL', key, path .. '.' .. target_key)
|
|
27
|
-
|
|
28
|
-
-- Parse and return the actual value
|
|
29
|
-
local parsed = cjson.decode(value)
|
|
30
|
-
return parsed[1] -- Return first element if it's an array
|
|
31
|
-
else
|
|
32
|
-
return nil
|
|
33
|
-
end
|
|
34
|
-
"""
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
# Redis Lua script for atomic get-arbitrary-key-and-delete operation
|
|
38
|
-
POPITEM_SCRIPT = """
|
|
39
|
-
local key = KEYS[1]
|
|
40
|
-
local path = ARGV[1]
|
|
41
|
-
|
|
42
|
-
-- Get all the keys from the JSON object
|
|
43
|
-
local keys = redis.call('JSON.OBJKEYS', key, path)
|
|
44
|
-
|
|
45
|
-
-- Return nil if no keys exist
|
|
46
|
-
if not keys or #keys == 0 then
|
|
47
|
-
return nil
|
|
48
|
-
end
|
|
49
|
-
|
|
50
|
-
-- Handle nested arrays - Redis sometimes wraps results
|
|
51
|
-
if type(keys[1]) == 'table' then
|
|
52
|
-
keys = keys[1]
|
|
53
|
-
end
|
|
54
|
-
|
|
55
|
-
-- Check again after unwrapping
|
|
56
|
-
if not keys or #keys == 0 then
|
|
57
|
-
return nil
|
|
58
|
-
end
|
|
59
|
-
|
|
60
|
-
local first_key = tostring(keys[1])
|
|
61
|
-
|
|
62
|
-
-- Get the value for this key
|
|
63
|
-
local value = redis.call('JSON.GET', key, path .. '.' .. first_key)
|
|
64
|
-
|
|
65
|
-
-- Return nil if value doesn't exist
|
|
66
|
-
if not value then
|
|
67
|
-
return nil
|
|
68
|
-
end
|
|
69
|
-
|
|
70
|
-
-- Delete the key from the JSON object
|
|
71
|
-
redis.call('JSON.DEL', key, path .. '.' .. first_key)
|
|
72
|
-
|
|
73
|
-
-- Parse the JSON string
|
|
74
|
-
local parsed_value = cjson.decode(value)
|
|
75
|
-
|
|
76
|
-
-- If it's a table/object, return the first value
|
|
77
|
-
if type(parsed_value) == 'table' then
|
|
78
|
-
for _, v in pairs(parsed_value) do
|
|
79
|
-
return {first_key, v} -- Return first value found
|
|
80
|
-
end
|
|
81
|
-
-- If table is empty, return nil
|
|
82
|
-
return nil
|
|
83
|
-
end
|
|
84
|
-
|
|
85
|
-
-- Otherwise return the parsed value as-is
|
|
86
|
-
return {first_key, parsed_value}
|
|
87
|
-
"""
|
|
88
|
-
|
|
89
16
|
|
|
90
17
|
class RedisDict(dict[str, T], GenericRedisType, Generic[T]):
|
|
91
18
|
original_type = dict
|
|
@@ -108,10 +35,13 @@ class RedisDict(dict[str, T], GenericRedisType, Generic[T]):
|
|
|
108
35
|
|
|
109
36
|
def update(self, m=None, /, **kwargs):
|
|
110
37
|
if self.pipeline:
|
|
111
|
-
m_redis_val =
|
|
112
|
-
|
|
38
|
+
m_redis_val = (
|
|
39
|
+
self._adapter.dump_python(
|
|
40
|
+
m, mode="json", context={REDIS_DUMP_FLAG_NAME: True}
|
|
41
|
+
)
|
|
42
|
+
if m
|
|
43
|
+
else {}
|
|
113
44
|
)
|
|
114
|
-
m_redis_val = m_redis_val or {}
|
|
115
45
|
kwargs_redis_val = self._adapter.dump_python(
|
|
116
46
|
kwargs, mode="json", context={REDIS_DUMP_FLAG_NAME: True}
|
|
117
47
|
)
|
|
@@ -175,14 +105,19 @@ class RedisDict(dict[str, T], GenericRedisType, Generic[T]):
|
|
|
175
105
|
await self.refresh_ttl_if_needed()
|
|
176
106
|
|
|
177
107
|
async def apop(self, key, default=None):
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
108
|
+
result = await arun_sha(
|
|
109
|
+
self.client,
|
|
110
|
+
self.Meta,
|
|
111
|
+
DICT_POP_SCRIPT_NAME,
|
|
112
|
+
1,
|
|
113
|
+
self.key,
|
|
114
|
+
self.json_path,
|
|
115
|
+
key,
|
|
116
|
+
)
|
|
181
117
|
super().pop(key, None)
|
|
182
118
|
await self.refresh_ttl_if_needed()
|
|
183
119
|
|
|
184
120
|
if result is None:
|
|
185
|
-
# Key doesn't exist in Redis
|
|
186
121
|
return default
|
|
187
122
|
|
|
188
123
|
return self._adapter.validate_python(
|
|
@@ -190,8 +125,14 @@ class RedisDict(dict[str, T], GenericRedisType, Generic[T]):
|
|
|
190
125
|
)[key]
|
|
191
126
|
|
|
192
127
|
async def apopitem(self):
|
|
193
|
-
|
|
194
|
-
|
|
128
|
+
result = await arun_sha(
|
|
129
|
+
self.client,
|
|
130
|
+
self.Meta,
|
|
131
|
+
DICT_POPITEM_SCRIPT_NAME,
|
|
132
|
+
1,
|
|
133
|
+
self.key,
|
|
134
|
+
self.json_path,
|
|
135
|
+
)
|
|
195
136
|
await self.refresh_ttl_if_needed()
|
|
196
137
|
|
|
197
138
|
if result is not None:
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
from typing import TypeAlias, TYPE_CHECKING
|
|
2
2
|
|
|
3
3
|
from redis.commands.search.field import NumericField
|
|
4
|
-
from typing_extensions import deprecated
|
|
5
4
|
|
|
6
5
|
from rapyer.scripts import (
|
|
7
6
|
run_sha,
|
|
@@ -20,12 +19,6 @@ class RedisInt(int, RedisType):
|
|
|
20
19
|
def redis_schema(cls, field_name: str):
|
|
21
20
|
return NumericField(f"$.{field_name}", as_name=field_name)
|
|
22
21
|
|
|
23
|
-
@deprecated(
|
|
24
|
-
f"increase function is deprecated and will become sync function in rapyer 1.2.0, use aincrease() instead"
|
|
25
|
-
)
|
|
26
|
-
async def increase(self, amount: int = 1):
|
|
27
|
-
return await self.aincrease(amount)
|
|
28
|
-
|
|
29
22
|
async def aincrease(self, amount: int = 1):
|
|
30
23
|
result = await self.client.json().numincrby(self.key, self.json_path, amount)
|
|
31
24
|
await self.refresh_ttl_if_needed()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{rapyer-1.1.7/rapyer/scripts/lua/numeric → rapyer-1.2.1/rapyer/scripts/lua/list}/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{rapyer-1.1.7/rapyer/scripts/lua/string → rapyer-1.2.1/rapyer/scripts/lua/numeric}/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|