coredis 5.5.0__cp313-cp313-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- 22fe76227e35f92ab5c3__mypyc.cpython-313-darwin.so +0 -0
- coredis/__init__.py +42 -0
- coredis/_enum.py +42 -0
- coredis/_json.py +11 -0
- coredis/_packer.cpython-313-darwin.so +0 -0
- coredis/_packer.py +71 -0
- coredis/_protocols.py +50 -0
- coredis/_py_311_typing.py +20 -0
- coredis/_py_312_typing.py +17 -0
- coredis/_sidecar.py +114 -0
- coredis/_utils.cpython-313-darwin.so +0 -0
- coredis/_utils.py +440 -0
- coredis/_version.py +34 -0
- coredis/_version.pyi +1 -0
- coredis/cache.py +801 -0
- coredis/client/__init__.py +6 -0
- coredis/client/basic.py +1240 -0
- coredis/client/cluster.py +1265 -0
- coredis/commands/__init__.py +64 -0
- coredis/commands/_key_spec.py +517 -0
- coredis/commands/_utils.py +108 -0
- coredis/commands/_validators.py +159 -0
- coredis/commands/_wrappers.py +175 -0
- coredis/commands/bitfield.py +110 -0
- coredis/commands/constants.py +662 -0
- coredis/commands/core.py +8484 -0
- coredis/commands/function.py +408 -0
- coredis/commands/monitor.py +168 -0
- coredis/commands/pubsub.py +905 -0
- coredis/commands/request.py +108 -0
- coredis/commands/script.py +296 -0
- coredis/commands/sentinel.py +246 -0
- coredis/config.py +50 -0
- coredis/connection.py +906 -0
- coredis/constants.cpython-313-darwin.so +0 -0
- coredis/constants.py +37 -0
- coredis/credentials.py +45 -0
- coredis/exceptions.py +360 -0
- coredis/experimental/__init__.py +1 -0
- coredis/globals.py +23 -0
- coredis/modules/__init__.py +121 -0
- coredis/modules/autocomplete.py +138 -0
- coredis/modules/base.py +262 -0
- coredis/modules/filters.py +1319 -0
- coredis/modules/graph.py +362 -0
- coredis/modules/json.py +691 -0
- coredis/modules/response/__init__.py +0 -0
- coredis/modules/response/_callbacks/__init__.py +0 -0
- coredis/modules/response/_callbacks/autocomplete.py +42 -0
- coredis/modules/response/_callbacks/graph.py +237 -0
- coredis/modules/response/_callbacks/json.py +21 -0
- coredis/modules/response/_callbacks/search.py +221 -0
- coredis/modules/response/_callbacks/timeseries.py +158 -0
- coredis/modules/response/types.py +179 -0
- coredis/modules/search.py +1089 -0
- coredis/modules/timeseries.py +1139 -0
- coredis/parser.cpython-313-darwin.so +0 -0
- coredis/parser.py +344 -0
- coredis/pipeline.py +1225 -0
- coredis/pool/__init__.py +11 -0
- coredis/pool/basic.py +453 -0
- coredis/pool/cluster.py +517 -0
- coredis/pool/nodemanager.py +340 -0
- coredis/py.typed +0 -0
- coredis/recipes/__init__.py +0 -0
- coredis/recipes/credentials/__init__.py +5 -0
- coredis/recipes/credentials/iam_provider.py +63 -0
- coredis/recipes/locks/__init__.py +5 -0
- coredis/recipes/locks/extend.lua +17 -0
- coredis/recipes/locks/lua_lock.py +281 -0
- coredis/recipes/locks/release.lua +10 -0
- coredis/response/__init__.py +5 -0
- coredis/response/_callbacks/__init__.py +538 -0
- coredis/response/_callbacks/acl.py +32 -0
- coredis/response/_callbacks/cluster.py +183 -0
- coredis/response/_callbacks/command.py +86 -0
- coredis/response/_callbacks/connection.py +31 -0
- coredis/response/_callbacks/geo.py +58 -0
- coredis/response/_callbacks/hash.py +85 -0
- coredis/response/_callbacks/keys.py +59 -0
- coredis/response/_callbacks/module.py +33 -0
- coredis/response/_callbacks/script.py +85 -0
- coredis/response/_callbacks/sentinel.py +179 -0
- coredis/response/_callbacks/server.py +241 -0
- coredis/response/_callbacks/sets.py +44 -0
- coredis/response/_callbacks/sorted_set.py +204 -0
- coredis/response/_callbacks/streams.py +185 -0
- coredis/response/_callbacks/strings.py +70 -0
- coredis/response/_callbacks/vector_sets.py +159 -0
- coredis/response/_utils.py +33 -0
- coredis/response/types.py +416 -0
- coredis/retry.py +233 -0
- coredis/sentinel.py +477 -0
- coredis/stream.py +369 -0
- coredis/tokens.py +2286 -0
- coredis/typing.py +593 -0
- coredis-5.5.0.dist-info/METADATA +211 -0
- coredis-5.5.0.dist-info/RECORD +100 -0
- coredis-5.5.0.dist-info/WHEEL +6 -0
- coredis-5.5.0.dist-info/licenses/LICENSE +23 -0
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from coredis.modules.response.types import AutocompleteSuggestion
|
|
4
|
+
from coredis.response._callbacks import ResponseCallback
|
|
5
|
+
from coredis.typing import AnyStr, ResponseType
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class AutocompleteCallback(
|
|
9
|
+
ResponseCallback[
|
|
10
|
+
list[ResponseType],
|
|
11
|
+
list[ResponseType],
|
|
12
|
+
tuple[AutocompleteSuggestion[AnyStr], ...] | tuple[()],
|
|
13
|
+
]
|
|
14
|
+
):
|
|
15
|
+
def transform(
|
|
16
|
+
self,
|
|
17
|
+
response: list[ResponseType],
|
|
18
|
+
) -> tuple[AutocompleteSuggestion[AnyStr], ...] | tuple[()]:
|
|
19
|
+
if not response:
|
|
20
|
+
return ()
|
|
21
|
+
step = 1
|
|
22
|
+
results = []
|
|
23
|
+
score_idx = payload_idx = 0
|
|
24
|
+
if self.options.get("withscores"):
|
|
25
|
+
score_idx = 1
|
|
26
|
+
step += 1
|
|
27
|
+
if self.options.get("withpayloads"):
|
|
28
|
+
payload_idx = score_idx + 1
|
|
29
|
+
step += 1
|
|
30
|
+
|
|
31
|
+
for k in range(0, len(response), step):
|
|
32
|
+
section = response[k : k + step]
|
|
33
|
+
score = section[score_idx] if score_idx else None
|
|
34
|
+
results.append(
|
|
35
|
+
AutocompleteSuggestion(
|
|
36
|
+
section[0],
|
|
37
|
+
float(score) if score else None,
|
|
38
|
+
section[payload_idx] if payload_idx else None,
|
|
39
|
+
)
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
return tuple(results)
|
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import enum
|
|
5
|
+
from typing import TYPE_CHECKING, Any
|
|
6
|
+
|
|
7
|
+
from coredis._utils import b, nativestr
|
|
8
|
+
from coredis.modules.response.types import (
|
|
9
|
+
GraphNode,
|
|
10
|
+
GraphPath,
|
|
11
|
+
GraphQueryResult,
|
|
12
|
+
GraphRelation,
|
|
13
|
+
GraphSlowLogInfo,
|
|
14
|
+
)
|
|
15
|
+
from coredis.response._callbacks import ResponseCallback
|
|
16
|
+
from coredis.typing import (
|
|
17
|
+
AnyStr,
|
|
18
|
+
Generic,
|
|
19
|
+
Literal,
|
|
20
|
+
ResponsePrimitive,
|
|
21
|
+
ResponseType,
|
|
22
|
+
StringT,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from coredis.client import Client
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class RedisValueTypes(enum.IntEnum):
|
|
30
|
+
VALUE_UNKNOWN = 0
|
|
31
|
+
VALUE_NULL = 1
|
|
32
|
+
VALUE_STRING = 2
|
|
33
|
+
VALUE_INTEGER = 3
|
|
34
|
+
VALUE_BOOLEAN = 4
|
|
35
|
+
VALUE_DOUBLE = 5
|
|
36
|
+
VALUE_ARRAY = 6
|
|
37
|
+
VALUE_EDGE = 7
|
|
38
|
+
VALUE_NODE = 8
|
|
39
|
+
VALUE_PATH = 9
|
|
40
|
+
VALUE_MAP = 10
|
|
41
|
+
VALUE_POINT = 11
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
PROCEDURE_CALLS = {
|
|
45
|
+
"labels": "db.labels()",
|
|
46
|
+
"relationships": "db.relationshipTypes()",
|
|
47
|
+
"properties": "db.propertyKeys()",
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
SCALAR_MAPPING = {
|
|
51
|
+
RedisValueTypes.VALUE_INTEGER: int,
|
|
52
|
+
RedisValueTypes.VALUE_BOOLEAN: lambda v: b(v) == b"true",
|
|
53
|
+
RedisValueTypes.VALUE_DOUBLE: float,
|
|
54
|
+
RedisValueTypes.VALUE_STRING: lambda v: v,
|
|
55
|
+
RedisValueTypes.VALUE_NULL: lambda _: None,
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class QueryCallback(
|
|
60
|
+
ResponseCallback[ResponseType, ResponseType, GraphQueryResult[AnyStr]],
|
|
61
|
+
Generic[AnyStr],
|
|
62
|
+
):
|
|
63
|
+
properties: dict[int, StringT]
|
|
64
|
+
relationships: dict[int, StringT]
|
|
65
|
+
labels: dict[int, StringT]
|
|
66
|
+
|
|
67
|
+
def __init__(self, graph: StringT, **options: Any):
|
|
68
|
+
self.graph = graph
|
|
69
|
+
self.properties = {}
|
|
70
|
+
self.relationships = {}
|
|
71
|
+
self.labels = {}
|
|
72
|
+
super().__init__(**options)
|
|
73
|
+
|
|
74
|
+
async def pre_process(
|
|
75
|
+
self,
|
|
76
|
+
client: Client[Any],
|
|
77
|
+
response: ResponseType,
|
|
78
|
+
) -> None:
|
|
79
|
+
if not len(response) == 3:
|
|
80
|
+
return
|
|
81
|
+
result_set = response[1]
|
|
82
|
+
max_label_id, max_relation_id, max_property_id = -1, -1, -1
|
|
83
|
+
|
|
84
|
+
cache = client.callback_storage[self.__class__]
|
|
85
|
+
self.labels = cache.setdefault(f"{self.graph}:labels", {})
|
|
86
|
+
self.relationships = cache.setdefault(f"{self.graph}:relationships", {})
|
|
87
|
+
self.properties = cache.setdefault(f"{self.graph}:properties", {})
|
|
88
|
+
for row in result_set:
|
|
89
|
+
for entity in row:
|
|
90
|
+
max_label_id, max_relation_id, max_property_id = self.fetch_max_ids(
|
|
91
|
+
entity, max_label_id, max_relation_id, max_property_id
|
|
92
|
+
)
|
|
93
|
+
if any(k != -1 for k in [max_label_id, max_relation_id, max_property_id]):
|
|
94
|
+
self.labels, self.relationships, self.properties = await asyncio.gather(
|
|
95
|
+
self.fetch_mapping(max_label_id, "labels", client),
|
|
96
|
+
self.fetch_mapping(max_relation_id, "relationships", client),
|
|
97
|
+
self.fetch_mapping(max_property_id, "properties", client),
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
def fetch_max_ids(
|
|
101
|
+
self, entity: Any, max_label_id: int, max_relation_id: int, max_property_id: int
|
|
102
|
+
) -> tuple[int, int, int]:
|
|
103
|
+
result_type = entity[0]
|
|
104
|
+
if result_type == RedisValueTypes.VALUE_NODE:
|
|
105
|
+
for label_id in entity[1][1]:
|
|
106
|
+
max_label_id = max(max_label_id, label_id)
|
|
107
|
+
for property_id in [k[0] for k in entity[1][2]]:
|
|
108
|
+
max_property_id = max(max_property_id, property_id)
|
|
109
|
+
elif result_type == RedisValueTypes.VALUE_EDGE:
|
|
110
|
+
max_relation_id = max(max_relation_id, entity[1][1])
|
|
111
|
+
for property_id in [k[0] for k in entity[1][4]]:
|
|
112
|
+
max_property_id = max(max_property_id, property_id)
|
|
113
|
+
elif result_type == RedisValueTypes.VALUE_PATH:
|
|
114
|
+
for segment in entity[1]:
|
|
115
|
+
max_label_id, max_relation_id, max_property_id = self.fetch_max_ids(
|
|
116
|
+
segment, max_label_id, max_relation_id, max_property_id
|
|
117
|
+
)
|
|
118
|
+
elif result_type == RedisValueTypes.VALUE_ARRAY:
|
|
119
|
+
for segment in entity[1]:
|
|
120
|
+
max_label_id, max_relation_id, max_property_id = self.fetch_max_ids(
|
|
121
|
+
segment, max_label_id, max_relation_id, max_property_id
|
|
122
|
+
)
|
|
123
|
+
return max_label_id, max_relation_id, max_property_id
|
|
124
|
+
|
|
125
|
+
async def fetch_mapping(
|
|
126
|
+
self,
|
|
127
|
+
max_id: int,
|
|
128
|
+
type: Literal["labels", "properties", "relationships"],
|
|
129
|
+
client: Client[Any],
|
|
130
|
+
) -> dict[int, StringT]:
|
|
131
|
+
cache = client.callback_storage[self.__class__]
|
|
132
|
+
if max_id > max(cache[f"{self.graph}:{type}"] or [-1]):
|
|
133
|
+
cache[f"{self.graph}:{type}"] = dict(
|
|
134
|
+
enumerate(
|
|
135
|
+
[
|
|
136
|
+
k[0]
|
|
137
|
+
for k in (
|
|
138
|
+
await client.graph.ro_query(self.graph, f"CALL {PROCEDURE_CALLS[type]}")
|
|
139
|
+
).result_set
|
|
140
|
+
],
|
|
141
|
+
)
|
|
142
|
+
)
|
|
143
|
+
return cache[f"{self.graph}:{type}"]
|
|
144
|
+
|
|
145
|
+
def transform(
|
|
146
|
+
self,
|
|
147
|
+
response: ResponseType,
|
|
148
|
+
) -> GraphQueryResult[AnyStr]:
|
|
149
|
+
result_set = []
|
|
150
|
+
headers = []
|
|
151
|
+
if len(response) == 3:
|
|
152
|
+
headers = [k[1] for k in response[0]]
|
|
153
|
+
stats = response[2]
|
|
154
|
+
for result in response[1]:
|
|
155
|
+
entities = []
|
|
156
|
+
for entity in result:
|
|
157
|
+
entities.append(self.parse_entity(entity))
|
|
158
|
+
result_set.append(entities)
|
|
159
|
+
else:
|
|
160
|
+
stats = response[0]
|
|
161
|
+
stats_mapping = dict(
|
|
162
|
+
map(
|
|
163
|
+
lambda v: int(v) if v.isalnum() else v,
|
|
164
|
+
map(lambda v: v.strip(), nativestr(m).split(":")),
|
|
165
|
+
)
|
|
166
|
+
for m in stats
|
|
167
|
+
)
|
|
168
|
+
return GraphQueryResult(tuple(headers), tuple(result_set), stats_mapping)
|
|
169
|
+
|
|
170
|
+
def parse_entity(self, entity):
|
|
171
|
+
result_type = entity[0]
|
|
172
|
+
if result_type in [
|
|
173
|
+
RedisValueTypes.VALUE_NULL,
|
|
174
|
+
RedisValueTypes.VALUE_STRING,
|
|
175
|
+
RedisValueTypes.VALUE_INTEGER,
|
|
176
|
+
RedisValueTypes.VALUE_BOOLEAN,
|
|
177
|
+
RedisValueTypes.VALUE_DOUBLE,
|
|
178
|
+
]:
|
|
179
|
+
return SCALAR_MAPPING[result_type](entity[1])
|
|
180
|
+
elif result_type == RedisValueTypes.VALUE_MAP:
|
|
181
|
+
it = iter(entity[1])
|
|
182
|
+
return dict(zip(it, map(self.parse_entity, it)))
|
|
183
|
+
elif result_type == RedisValueTypes.VALUE_ARRAY:
|
|
184
|
+
return [self.parse_entity(k) for k in entity[1]]
|
|
185
|
+
elif result_type == RedisValueTypes.VALUE_POINT:
|
|
186
|
+
return tuple(map(float, entity[1]))
|
|
187
|
+
elif result_type == RedisValueTypes.VALUE_EDGE:
|
|
188
|
+
return GraphRelation(
|
|
189
|
+
id=entity[1][0],
|
|
190
|
+
type=self.relationships[entity[1][1]],
|
|
191
|
+
src_node=entity[1][2],
|
|
192
|
+
destination_node=entity[1][3],
|
|
193
|
+
properties={
|
|
194
|
+
self.properties[k[0]]: self.parse_entity((k[1], k[2])) for k in entity[1][4]
|
|
195
|
+
},
|
|
196
|
+
)
|
|
197
|
+
elif result_type == RedisValueTypes.VALUE_NODE:
|
|
198
|
+
return GraphNode(
|
|
199
|
+
id=entity[1][0],
|
|
200
|
+
labels={self.labels[k] for k in entity[1][1]},
|
|
201
|
+
properties={
|
|
202
|
+
self.properties[k[0]]: self.parse_entity((k[1], k[2])) for k in entity[1][2]
|
|
203
|
+
},
|
|
204
|
+
)
|
|
205
|
+
elif result_type == RedisValueTypes.VALUE_PATH:
|
|
206
|
+
nodes, relations = entity[1]
|
|
207
|
+
nodes = self.parse_entity(nodes)
|
|
208
|
+
relations = self.parse_entity(relations)
|
|
209
|
+
return GraphPath(nodes, relations)
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
class GraphSlowLogCallback(
|
|
213
|
+
ResponseCallback[ResponseType, ResponseType, tuple[GraphSlowLogInfo, ...]]
|
|
214
|
+
):
|
|
215
|
+
def transform(
|
|
216
|
+
self,
|
|
217
|
+
response: ResponseType,
|
|
218
|
+
) -> tuple[GraphSlowLogInfo, ...]:
|
|
219
|
+
return tuple(GraphSlowLogInfo(int(k[0]), k[1], k[2], float(k[3])) for k in response)
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
class ConfigGetCallback(
|
|
223
|
+
ResponseCallback[
|
|
224
|
+
ResponseType,
|
|
225
|
+
ResponseType,
|
|
226
|
+
ResponsePrimitive | dict[AnyStr, ResponsePrimitive],
|
|
227
|
+
]
|
|
228
|
+
):
|
|
229
|
+
def transform(
|
|
230
|
+
self,
|
|
231
|
+
response: ResponseType,
|
|
232
|
+
) -> ResponsePrimitive | dict[AnyStr, ResponsePrimitive]:
|
|
233
|
+
if isinstance(response, list):
|
|
234
|
+
if isinstance(response[0], list):
|
|
235
|
+
return dict(response)
|
|
236
|
+
else:
|
|
237
|
+
return response[1]
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import cast
|
|
4
|
+
|
|
5
|
+
from coredis._json import json
|
|
6
|
+
from coredis.response._callbacks import ResponseCallback
|
|
7
|
+
from coredis.typing import JsonType, ResponseType
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class JsonCallback(ResponseCallback[ResponseType, ResponseType, JsonType]):
|
|
11
|
+
def transform(
|
|
12
|
+
self,
|
|
13
|
+
response: ResponseType,
|
|
14
|
+
) -> JsonType:
|
|
15
|
+
if isinstance(response, (bytes, str)):
|
|
16
|
+
deser = json.loads(response)
|
|
17
|
+
elif isinstance(response, list):
|
|
18
|
+
deser = [json.loads(e) if isinstance(e, (bytes, str)) else e for e in response]
|
|
19
|
+
else:
|
|
20
|
+
deser = response
|
|
21
|
+
return cast(JsonType, deser) # alas we lie.
|
|
@@ -0,0 +1,221 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections import ChainMap, OrderedDict
|
|
4
|
+
from functools import partial
|
|
5
|
+
|
|
6
|
+
from coredis._json import json
|
|
7
|
+
from coredis._utils import EncodingInsensitiveDict
|
|
8
|
+
from coredis.modules.response.types import (
|
|
9
|
+
SearchAggregationResult,
|
|
10
|
+
SearchDocument,
|
|
11
|
+
SearchResult,
|
|
12
|
+
)
|
|
13
|
+
from coredis.response._callbacks import ResponseCallback
|
|
14
|
+
from coredis.response._utils import flat_pairs_to_dict
|
|
15
|
+
from coredis.typing import (
|
|
16
|
+
AnyStr,
|
|
17
|
+
ResponsePrimitive,
|
|
18
|
+
ResponseType,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class SearchConfigCallback(
|
|
23
|
+
ResponseCallback[
|
|
24
|
+
list[list[ResponsePrimitive]],
|
|
25
|
+
dict[AnyStr, ResponseType] | list[list[ResponsePrimitive]],
|
|
26
|
+
dict[AnyStr, ResponsePrimitive],
|
|
27
|
+
]
|
|
28
|
+
):
|
|
29
|
+
def transform(
|
|
30
|
+
self,
|
|
31
|
+
response: list[list[ResponsePrimitive]],
|
|
32
|
+
) -> dict[AnyStr, ResponsePrimitive]:
|
|
33
|
+
command_arguments = []
|
|
34
|
+
for item in response:
|
|
35
|
+
try:
|
|
36
|
+
v = (item[0], json.loads(item[1]))
|
|
37
|
+
except (ValueError, TypeError):
|
|
38
|
+
v = item
|
|
39
|
+
command_arguments.append(v)
|
|
40
|
+
return dict(command_arguments)
|
|
41
|
+
|
|
42
|
+
def transform_3(
|
|
43
|
+
self,
|
|
44
|
+
response: dict[AnyStr, ResponseType] | list[list[ResponsePrimitive]],
|
|
45
|
+
) -> dict[AnyStr, ResponsePrimitive]:
|
|
46
|
+
if isinstance(response, list):
|
|
47
|
+
return self.transform(response)
|
|
48
|
+
else:
|
|
49
|
+
config = {}
|
|
50
|
+
for item, value in response.items():
|
|
51
|
+
try:
|
|
52
|
+
config[item] = json.loads(value)
|
|
53
|
+
except (ValueError, TypeError):
|
|
54
|
+
config[item] = value
|
|
55
|
+
return config
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class SearchResultCallback(
|
|
59
|
+
ResponseCallback[
|
|
60
|
+
list[ResponseType],
|
|
61
|
+
list[ResponseType] | dict[AnyStr, ResponseType],
|
|
62
|
+
SearchResult[AnyStr],
|
|
63
|
+
]
|
|
64
|
+
):
|
|
65
|
+
def transform(
|
|
66
|
+
self,
|
|
67
|
+
response: list[ResponseType],
|
|
68
|
+
) -> SearchResult[AnyStr]:
|
|
69
|
+
if self.options.get("nocontent"):
|
|
70
|
+
return SearchResult[AnyStr](
|
|
71
|
+
response[0],
|
|
72
|
+
tuple(SearchDocument(i, None, None, None, None, {}) for i in response[1:]),
|
|
73
|
+
)
|
|
74
|
+
step = 2
|
|
75
|
+
results = []
|
|
76
|
+
score_idx = payload_idx = sort_key_idx = 0
|
|
77
|
+
if self.options.get("withscores"):
|
|
78
|
+
score_idx = 1
|
|
79
|
+
step += 1
|
|
80
|
+
if self.options.get("withpayloads"):
|
|
81
|
+
payload_idx = score_idx + 1
|
|
82
|
+
step += 1
|
|
83
|
+
if self.options.get("withsortkeys"):
|
|
84
|
+
sort_key_idx = payload_idx + 1
|
|
85
|
+
step += 1
|
|
86
|
+
|
|
87
|
+
for k in range(1, len(response) - 1, step):
|
|
88
|
+
section = response[k : k + step]
|
|
89
|
+
score_explain = None
|
|
90
|
+
if self.options.get("explainscore"):
|
|
91
|
+
score = section[score_idx][0]
|
|
92
|
+
score_explain = section[score_idx][1]
|
|
93
|
+
else:
|
|
94
|
+
score = section[score_idx] if score_idx else None
|
|
95
|
+
fields = EncodingInsensitiveDict(flat_pairs_to_dict(section[-1]))
|
|
96
|
+
if "$" in fields:
|
|
97
|
+
fields = json.loads(fields.pop("$"))
|
|
98
|
+
results.append(
|
|
99
|
+
SearchDocument(
|
|
100
|
+
section[0],
|
|
101
|
+
float(score) if score else None,
|
|
102
|
+
score_explain,
|
|
103
|
+
section[payload_idx] if payload_idx else None,
|
|
104
|
+
section[sort_key_idx] if sort_key_idx else None,
|
|
105
|
+
fields,
|
|
106
|
+
)
|
|
107
|
+
)
|
|
108
|
+
return SearchResult[AnyStr](response[0], tuple(results))
|
|
109
|
+
|
|
110
|
+
def transform_3(
|
|
111
|
+
self,
|
|
112
|
+
response: list[ResponseType] | dict[AnyStr, ResponseType],
|
|
113
|
+
) -> SearchResult[AnyStr]:
|
|
114
|
+
results = []
|
|
115
|
+
if isinstance(response, list):
|
|
116
|
+
return self.transform(response)
|
|
117
|
+
else:
|
|
118
|
+
response = EncodingInsensitiveDict(response)
|
|
119
|
+
for result in response["results"]:
|
|
120
|
+
result = EncodingInsensitiveDict(result)
|
|
121
|
+
score_explain = None
|
|
122
|
+
if self.options.get("explainscore"):
|
|
123
|
+
score, score_explain = result.get("score")
|
|
124
|
+
else:
|
|
125
|
+
score = result.get("score", None)
|
|
126
|
+
fields = EncodingInsensitiveDict(result.get("extra_attributes", {}))
|
|
127
|
+
if "$" in fields:
|
|
128
|
+
fields = json.loads(fields.pop("$"))
|
|
129
|
+
results.append(
|
|
130
|
+
SearchDocument(
|
|
131
|
+
result["id"],
|
|
132
|
+
float(score) if score else None,
|
|
133
|
+
score_explain,
|
|
134
|
+
result["payload"] if self.options.get("withpayloads") else None,
|
|
135
|
+
result["sortkey"] if self.options.get("withsortkeys") else None,
|
|
136
|
+
fields,
|
|
137
|
+
)
|
|
138
|
+
)
|
|
139
|
+
return SearchResult[AnyStr](response["total_results"], tuple(results))
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
class AggregationResultCallback(
|
|
143
|
+
ResponseCallback[
|
|
144
|
+
list[ResponseType],
|
|
145
|
+
dict[AnyStr, ResponseType] | list[ResponseType],
|
|
146
|
+
SearchAggregationResult[AnyStr],
|
|
147
|
+
]
|
|
148
|
+
):
|
|
149
|
+
def transform(
|
|
150
|
+
self,
|
|
151
|
+
response: list[ResponseType],
|
|
152
|
+
) -> SearchAggregationResult:
|
|
153
|
+
return SearchAggregationResult[AnyStr](
|
|
154
|
+
[
|
|
155
|
+
flat_pairs_to_dict(k, partial(self.try_json, self.options))
|
|
156
|
+
for k in (response[1:] if not self.options.get("with_cursor") else response[0][1:])
|
|
157
|
+
],
|
|
158
|
+
response[1] if self.options.get("with_cursor") else None,
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
def transform_3(
|
|
162
|
+
self,
|
|
163
|
+
response: dict[AnyStr, ResponseType] | list[ResponseType],
|
|
164
|
+
) -> SearchAggregationResult:
|
|
165
|
+
if (
|
|
166
|
+
self.options.get("with_cursor")
|
|
167
|
+
and isinstance(response[0], dict)
|
|
168
|
+
or isinstance(response, dict)
|
|
169
|
+
):
|
|
170
|
+
response, cursor = response if self.options.get("with_cursor") else (response, None)
|
|
171
|
+
response = EncodingInsensitiveDict(response)
|
|
172
|
+
return SearchAggregationResult[AnyStr](
|
|
173
|
+
[
|
|
174
|
+
{
|
|
175
|
+
r: self.try_json(self.options, v)
|
|
176
|
+
for r, v in EncodingInsensitiveDict(k)["extra_attributes"].items()
|
|
177
|
+
}
|
|
178
|
+
for k in response["results"]
|
|
179
|
+
],
|
|
180
|
+
cursor,
|
|
181
|
+
)
|
|
182
|
+
else:
|
|
183
|
+
return self.transform(response)
|
|
184
|
+
|
|
185
|
+
@staticmethod
|
|
186
|
+
def try_json(options, value):
|
|
187
|
+
if not options.get("dialect", None) == 3:
|
|
188
|
+
return value
|
|
189
|
+
try:
|
|
190
|
+
return json.loads(value)
|
|
191
|
+
except ValueError:
|
|
192
|
+
return value
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
class SpellCheckCallback(
|
|
196
|
+
ResponseCallback[
|
|
197
|
+
list[ResponseType],
|
|
198
|
+
dict[AnyStr, ResponseType] | list[ResponseType],
|
|
199
|
+
dict[AnyStr, OrderedDict[AnyStr, float]],
|
|
200
|
+
]
|
|
201
|
+
):
|
|
202
|
+
def transform(
|
|
203
|
+
self,
|
|
204
|
+
response: list[ResponseType],
|
|
205
|
+
) -> dict[AnyStr, OrderedDict[AnyStr, float]]:
|
|
206
|
+
return {
|
|
207
|
+
result[1]: OrderedDict(
|
|
208
|
+
(suggestion[1], float(suggestion[0])) for suggestion in result[2]
|
|
209
|
+
)
|
|
210
|
+
for result in response
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
def transform_3(
|
|
214
|
+
self,
|
|
215
|
+
response: dict[AnyStr, ResponseType] | list[ResponseType],
|
|
216
|
+
) -> dict[AnyStr, OrderedDict[AnyStr, float]]:
|
|
217
|
+
# For older versions of redis search that didn't support RESP3
|
|
218
|
+
if isinstance(response, list):
|
|
219
|
+
return self.transform(response)
|
|
220
|
+
response = EncodingInsensitiveDict(response)
|
|
221
|
+
return {key: OrderedDict(ChainMap(*result)) for key, result in response["results"].items()}
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any, cast
|
|
4
|
+
|
|
5
|
+
from coredis._utils import EncodingInsensitiveDict
|
|
6
|
+
from coredis.response._callbacks import (
|
|
7
|
+
ClusterMergeMapping,
|
|
8
|
+
DictCallback,
|
|
9
|
+
ResponseCallback,
|
|
10
|
+
)
|
|
11
|
+
from coredis.response._utils import flat_pairs_to_dict
|
|
12
|
+
from coredis.typing import (
|
|
13
|
+
AnyStr,
|
|
14
|
+
RedisValueT,
|
|
15
|
+
ResponsePrimitive,
|
|
16
|
+
ResponseType,
|
|
17
|
+
Sequence,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class SampleCallback(
|
|
22
|
+
ResponseCallback[
|
|
23
|
+
list[RedisValueT],
|
|
24
|
+
list[RedisValueT],
|
|
25
|
+
tuple[int, float] | tuple[()],
|
|
26
|
+
]
|
|
27
|
+
):
|
|
28
|
+
def transform(
|
|
29
|
+
self,
|
|
30
|
+
response: list[RedisValueT],
|
|
31
|
+
) -> tuple[int, float] | tuple[()]:
|
|
32
|
+
return (int(response[0]), float(response[1])) if response else ()
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class SamplesCallback(
|
|
36
|
+
ResponseCallback[
|
|
37
|
+
list[list[RedisValueT]] | None,
|
|
38
|
+
list[list[RedisValueT]] | None,
|
|
39
|
+
tuple[tuple[int, float], ...] | tuple[()],
|
|
40
|
+
],
|
|
41
|
+
):
|
|
42
|
+
def transform(
|
|
43
|
+
self,
|
|
44
|
+
response: list[list[RedisValueT]] | None,
|
|
45
|
+
) -> tuple[tuple[int, float], ...] | tuple[()]:
|
|
46
|
+
if response:
|
|
47
|
+
return tuple(cast(tuple[int, float], SampleCallback().transform(r)) for r in response)
|
|
48
|
+
return ()
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class TimeSeriesInfoCallback(DictCallback[AnyStr, ResponseType]):
|
|
52
|
+
def transform(
|
|
53
|
+
self,
|
|
54
|
+
response: Sequence[ResponseType] | dict[ResponsePrimitive, ResponseType],
|
|
55
|
+
) -> dict[AnyStr, ResponseType]:
|
|
56
|
+
dct = EncodingInsensitiveDict(super().transform(response))
|
|
57
|
+
if "labels" in dct:
|
|
58
|
+
dct["labels"] = dict(dct["labels"])
|
|
59
|
+
if "Chunks" in dct:
|
|
60
|
+
dct["Chunks"] = [flat_pairs_to_dict(chunk) for chunk in dct["Chunks"]]
|
|
61
|
+
if "rules" in dct and not isinstance(dct["rules"], dict):
|
|
62
|
+
dct["rules"] = {rule[0]: rule[1:] for rule in dct["rules"]}
|
|
63
|
+
|
|
64
|
+
return dict(dct)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class TimeSeriesCallback(
|
|
68
|
+
ResponseCallback[
|
|
69
|
+
ResponseType,
|
|
70
|
+
ResponseType,
|
|
71
|
+
dict[AnyStr, tuple[dict[AnyStr, AnyStr], tuple[int, float] | tuple[()]]],
|
|
72
|
+
]
|
|
73
|
+
):
|
|
74
|
+
def transform(
|
|
75
|
+
self,
|
|
76
|
+
response: ResponseType,
|
|
77
|
+
) -> dict[AnyStr, tuple[dict[AnyStr, AnyStr], tuple[int, float] | tuple[()]]]:
|
|
78
|
+
if isinstance(response, dict):
|
|
79
|
+
return {k: (v[0], tuple(v[1])) for k, v in response.items()}
|
|
80
|
+
else:
|
|
81
|
+
return {
|
|
82
|
+
r[0]: (dict(r[1]), (r[2][0], float(r[2][1])) if r[2] else tuple()) for r in response
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class TimeSeriesMultiCallback(
|
|
87
|
+
ResponseCallback[
|
|
88
|
+
ResponseType,
|
|
89
|
+
ResponseType,
|
|
90
|
+
dict[
|
|
91
|
+
AnyStr,
|
|
92
|
+
tuple[dict[AnyStr, AnyStr], tuple[tuple[int, float], ...] | tuple[()]],
|
|
93
|
+
],
|
|
94
|
+
]
|
|
95
|
+
):
|
|
96
|
+
def transform(
|
|
97
|
+
self,
|
|
98
|
+
response: ResponseType,
|
|
99
|
+
) -> dict[
|
|
100
|
+
AnyStr,
|
|
101
|
+
tuple[dict[AnyStr, AnyStr], tuple[tuple[int, float], ...] | tuple[()]],
|
|
102
|
+
]:
|
|
103
|
+
if self.options.get("grouped"):
|
|
104
|
+
return {
|
|
105
|
+
r[0]: (
|
|
106
|
+
flat_pairs_to_dict(r[1][0]) if r[1] else {},
|
|
107
|
+
tuple(SampleCallback().transform(t) for t in r[2]),
|
|
108
|
+
)
|
|
109
|
+
for r in cast(Any, response)
|
|
110
|
+
}
|
|
111
|
+
else:
|
|
112
|
+
return {
|
|
113
|
+
r[0]: (
|
|
114
|
+
dict(r[1]),
|
|
115
|
+
tuple(SampleCallback().transform(t) for t in r[2]),
|
|
116
|
+
)
|
|
117
|
+
for r in cast(Any, response)
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
def transform_3(
|
|
121
|
+
self,
|
|
122
|
+
response: ResponseType,
|
|
123
|
+
) -> dict[
|
|
124
|
+
AnyStr,
|
|
125
|
+
tuple[dict[AnyStr, AnyStr], tuple[tuple[int, float], ...] | tuple[()]],
|
|
126
|
+
]:
|
|
127
|
+
if isinstance(response, dict):
|
|
128
|
+
if self.options.get("grouped"):
|
|
129
|
+
return {
|
|
130
|
+
k: (
|
|
131
|
+
r[0],
|
|
132
|
+
tuple(SampleCallback().transform(t) for t in r[-1]),
|
|
133
|
+
)
|
|
134
|
+
for k, r in response.items()
|
|
135
|
+
}
|
|
136
|
+
else:
|
|
137
|
+
return {
|
|
138
|
+
k: (
|
|
139
|
+
r[0],
|
|
140
|
+
tuple(SampleCallback().transform(t) for t in r[-1]),
|
|
141
|
+
)
|
|
142
|
+
for k, r in response.items()
|
|
143
|
+
}
|
|
144
|
+
else:
|
|
145
|
+
return self.transform(response)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
class ClusterMergeTimeSeries(ClusterMergeMapping[AnyStr, tuple[Any, ...]]):
|
|
149
|
+
def __init__(self) -> None:
|
|
150
|
+
super().__init__(value_combine=self.merge)
|
|
151
|
+
|
|
152
|
+
def merge(self, values: Any) -> tuple[dict[AnyStr, AnyStr], tuple[tuple[int, float], ...]]:
|
|
153
|
+
merged_labels: dict[AnyStr, AnyStr] = {}
|
|
154
|
+
merged_series: tuple[tuple[int, float], ...] = ()
|
|
155
|
+
for value in values:
|
|
156
|
+
merged_labels.update(value[0])
|
|
157
|
+
merged_series = merged_series + value[1]
|
|
158
|
+
return merged_labels, tuple(merged_series)
|