coredis 4.24.0__py3-none-any.whl → 5.0.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of coredis might be problematic. Click here for more details.
- coredis/__init__.py +1 -3
- coredis/_packer.py +10 -10
- coredis/_protocols.py +23 -32
- coredis/_py_311_typing.py +20 -0
- coredis/_py_312_typing.py +17 -0
- coredis/_utils.py +49 -51
- coredis/_version.py +3 -3
- coredis/cache.py +57 -82
- coredis/client/__init__.py +1 -2
- coredis/client/basic.py +129 -56
- coredis/client/cluster.py +147 -70
- coredis/commands/__init__.py +27 -7
- coredis/commands/_key_spec.py +11 -10
- coredis/commands/_utils.py +1 -1
- coredis/commands/_validators.py +30 -20
- coredis/commands/_wrappers.py +19 -99
- coredis/commands/bitfield.py +10 -2
- coredis/commands/constants.py +20 -3
- coredis/commands/core.py +1627 -1246
- coredis/commands/function.py +21 -19
- coredis/commands/monitor.py +0 -71
- coredis/commands/pubsub.py +7 -142
- coredis/commands/request.py +108 -0
- coredis/commands/script.py +9 -9
- coredis/commands/sentinel.py +60 -49
- coredis/connection.py +14 -15
- coredis/exceptions.py +2 -2
- coredis/experimental/__init__.py +0 -4
- coredis/globals.py +3 -0
- coredis/modules/autocomplete.py +28 -30
- coredis/modules/base.py +15 -31
- coredis/modules/filters.py +269 -245
- coredis/modules/graph.py +61 -62
- coredis/modules/json.py +172 -140
- coredis/modules/response/_callbacks/autocomplete.py +5 -4
- coredis/modules/response/_callbacks/graph.py +34 -29
- coredis/modules/response/_callbacks/json.py +5 -3
- coredis/modules/response/_callbacks/search.py +49 -53
- coredis/modules/response/_callbacks/timeseries.py +18 -30
- coredis/modules/response/types.py +1 -5
- coredis/modules/search.py +186 -169
- coredis/modules/timeseries.py +184 -164
- coredis/parser.py +6 -19
- coredis/pipeline.py +391 -422
- coredis/pool/basic.py +7 -7
- coredis/pool/cluster.py +3 -3
- coredis/pool/nodemanager.py +10 -3
- coredis/response/_callbacks/__init__.py +76 -57
- coredis/response/_callbacks/acl.py +0 -3
- coredis/response/_callbacks/cluster.py +25 -16
- coredis/response/_callbacks/command.py +8 -6
- coredis/response/_callbacks/connection.py +4 -3
- coredis/response/_callbacks/geo.py +17 -13
- coredis/response/_callbacks/hash.py +13 -11
- coredis/response/_callbacks/keys.py +9 -5
- coredis/response/_callbacks/module.py +2 -3
- coredis/response/_callbacks/script.py +6 -8
- coredis/response/_callbacks/sentinel.py +21 -17
- coredis/response/_callbacks/server.py +36 -14
- coredis/response/_callbacks/sets.py +3 -4
- coredis/response/_callbacks/sorted_set.py +27 -24
- coredis/response/_callbacks/streams.py +22 -13
- coredis/response/_callbacks/strings.py +7 -6
- coredis/response/_callbacks/vector_sets.py +126 -0
- coredis/response/types.py +13 -4
- coredis/sentinel.py +1 -1
- coredis/stream.py +4 -3
- coredis/tokens.py +343 -16
- coredis/typing.py +432 -79
- {coredis-4.24.0.dist-info → coredis-5.0.0rc1.dist-info}/METADATA +4 -5
- coredis-5.0.0rc1.dist-info/RECORD +95 -0
- coredis/client/keydb.py +0 -336
- coredis/pipeline.pyi +0 -2103
- coredis-4.24.0.dist-info/RECORD +0 -93
- {coredis-4.24.0.dist-info → coredis-5.0.0rc1.dist-info}/WHEEL +0 -0
- {coredis-4.24.0.dist-info → coredis-5.0.0rc1.dist-info}/licenses/LICENSE +0 -0
- {coredis-4.24.0.dist-info → coredis-5.0.0rc1.dist-info}/top_level.txt +0 -0
|
@@ -2,7 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from coredis.modules.response.types import AutocompleteSuggestion
|
|
4
4
|
from coredis.response._callbacks import ResponseCallback
|
|
5
|
-
from coredis.typing import AnyStr, ResponseType
|
|
5
|
+
from coredis.typing import AnyStr, ResponseType
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
class AutocompleteCallback(
|
|
@@ -13,17 +13,18 @@ class AutocompleteCallback(
|
|
|
13
13
|
]
|
|
14
14
|
):
|
|
15
15
|
def transform(
|
|
16
|
-
self,
|
|
16
|
+
self,
|
|
17
|
+
response: list[ResponseType],
|
|
17
18
|
) -> tuple[AutocompleteSuggestion[AnyStr], ...] | tuple[()]:
|
|
18
19
|
if not response:
|
|
19
20
|
return ()
|
|
20
21
|
step = 1
|
|
21
22
|
results = []
|
|
22
23
|
score_idx = payload_idx = 0
|
|
23
|
-
if options.get("withscores"):
|
|
24
|
+
if self.options.get("withscores"):
|
|
24
25
|
score_idx = 1
|
|
25
26
|
step += 1
|
|
26
|
-
if options.get("withpayloads"):
|
|
27
|
+
if self.options.get("withpayloads"):
|
|
27
28
|
payload_idx = score_idx + 1
|
|
28
29
|
step += 1
|
|
29
30
|
|
|
@@ -20,14 +20,13 @@ from coredis.typing import (
|
|
|
20
20
|
ResponsePrimitive,
|
|
21
21
|
ResponseType,
|
|
22
22
|
StringT,
|
|
23
|
-
ValueT,
|
|
24
23
|
)
|
|
25
24
|
|
|
26
25
|
if TYPE_CHECKING:
|
|
27
26
|
from coredis.client import Client
|
|
28
27
|
|
|
29
28
|
|
|
30
|
-
class
|
|
29
|
+
class RedisValueTypes(enum.IntEnum):
|
|
31
30
|
VALUE_UNKNOWN = 0
|
|
32
31
|
VALUE_NULL = 1
|
|
33
32
|
VALUE_STRING = 2
|
|
@@ -49,11 +48,11 @@ PROCEDURE_CALLS = {
|
|
|
49
48
|
}
|
|
50
49
|
|
|
51
50
|
SCALAR_MAPPING = {
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
51
|
+
RedisValueTypes.VALUE_INTEGER: int,
|
|
52
|
+
RedisValueTypes.VALUE_BOOLEAN: lambda v: b(v) == b"true",
|
|
53
|
+
RedisValueTypes.VALUE_DOUBLE: float,
|
|
54
|
+
RedisValueTypes.VALUE_STRING: lambda v: v,
|
|
55
|
+
RedisValueTypes.VALUE_NULL: lambda _: None,
|
|
57
56
|
}
|
|
58
57
|
|
|
59
58
|
|
|
@@ -65,14 +64,17 @@ class QueryCallback(
|
|
|
65
64
|
relationships: dict[int, StringT]
|
|
66
65
|
labels: dict[int, StringT]
|
|
67
66
|
|
|
68
|
-
def __init__(self, graph: StringT):
|
|
67
|
+
def __init__(self, graph: StringT, **options: Any):
|
|
69
68
|
self.graph = graph
|
|
70
69
|
self.properties = {}
|
|
71
70
|
self.relationships = {}
|
|
72
71
|
self.labels = {}
|
|
72
|
+
super().__init__(**options)
|
|
73
73
|
|
|
74
74
|
async def pre_process(
|
|
75
|
-
self,
|
|
75
|
+
self,
|
|
76
|
+
client: Client[Any],
|
|
77
|
+
response: ResponseType,
|
|
76
78
|
) -> None:
|
|
77
79
|
if not len(response) == 3:
|
|
78
80
|
return
|
|
@@ -99,21 +101,21 @@ class QueryCallback(
|
|
|
99
101
|
self, entity: Any, max_label_id: int, max_relation_id: int, max_property_id: int
|
|
100
102
|
) -> tuple[int, int, int]:
|
|
101
103
|
result_type = entity[0]
|
|
102
|
-
if result_type ==
|
|
104
|
+
if result_type == RedisValueTypes.VALUE_NODE:
|
|
103
105
|
for label_id in entity[1][1]:
|
|
104
106
|
max_label_id = max(max_label_id, label_id)
|
|
105
107
|
for property_id in [k[0] for k in entity[1][2]]:
|
|
106
108
|
max_property_id = max(max_property_id, property_id)
|
|
107
|
-
elif result_type ==
|
|
109
|
+
elif result_type == RedisValueTypes.VALUE_EDGE:
|
|
108
110
|
max_relation_id = max(max_relation_id, entity[1][1])
|
|
109
111
|
for property_id in [k[0] for k in entity[1][4]]:
|
|
110
112
|
max_property_id = max(max_property_id, property_id)
|
|
111
|
-
elif result_type ==
|
|
113
|
+
elif result_type == RedisValueTypes.VALUE_PATH:
|
|
112
114
|
for segment in entity[1]:
|
|
113
115
|
max_label_id, max_relation_id, max_property_id = self.fetch_max_ids(
|
|
114
116
|
segment, max_label_id, max_relation_id, max_property_id
|
|
115
117
|
)
|
|
116
|
-
elif result_type ==
|
|
118
|
+
elif result_type == RedisValueTypes.VALUE_ARRAY:
|
|
117
119
|
for segment in entity[1]:
|
|
118
120
|
max_label_id, max_relation_id, max_property_id = self.fetch_max_ids(
|
|
119
121
|
segment, max_label_id, max_relation_id, max_property_id
|
|
@@ -141,7 +143,8 @@ class QueryCallback(
|
|
|
141
143
|
return cache[f"{self.graph}:{type}"]
|
|
142
144
|
|
|
143
145
|
def transform(
|
|
144
|
-
self,
|
|
146
|
+
self,
|
|
147
|
+
response: ResponseType,
|
|
145
148
|
) -> GraphQueryResult[AnyStr]:
|
|
146
149
|
result_set = []
|
|
147
150
|
headers = []
|
|
@@ -167,21 +170,21 @@ class QueryCallback(
|
|
|
167
170
|
def parse_entity(self, entity):
|
|
168
171
|
result_type = entity[0]
|
|
169
172
|
if result_type in [
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
173
|
+
RedisValueTypes.VALUE_NULL,
|
|
174
|
+
RedisValueTypes.VALUE_STRING,
|
|
175
|
+
RedisValueTypes.VALUE_INTEGER,
|
|
176
|
+
RedisValueTypes.VALUE_BOOLEAN,
|
|
177
|
+
RedisValueTypes.VALUE_DOUBLE,
|
|
175
178
|
]:
|
|
176
179
|
return SCALAR_MAPPING[result_type](entity[1])
|
|
177
|
-
elif result_type ==
|
|
180
|
+
elif result_type == RedisValueTypes.VALUE_MAP:
|
|
178
181
|
it = iter(entity[1])
|
|
179
182
|
return dict(zip(it, map(self.parse_entity, it)))
|
|
180
|
-
elif result_type ==
|
|
183
|
+
elif result_type == RedisValueTypes.VALUE_ARRAY:
|
|
181
184
|
return [self.parse_entity(k) for k in entity[1]]
|
|
182
|
-
elif result_type ==
|
|
185
|
+
elif result_type == RedisValueTypes.VALUE_POINT:
|
|
183
186
|
return tuple(map(float, entity[1]))
|
|
184
|
-
elif result_type ==
|
|
187
|
+
elif result_type == RedisValueTypes.VALUE_EDGE:
|
|
185
188
|
return GraphRelation(
|
|
186
189
|
id=entity[1][0],
|
|
187
190
|
type=self.relationships[entity[1][1]],
|
|
@@ -191,7 +194,7 @@ class QueryCallback(
|
|
|
191
194
|
self.properties[k[0]]: self.parse_entity((k[1], k[2])) for k in entity[1][4]
|
|
192
195
|
},
|
|
193
196
|
)
|
|
194
|
-
elif result_type ==
|
|
197
|
+
elif result_type == RedisValueTypes.VALUE_NODE:
|
|
195
198
|
return GraphNode(
|
|
196
199
|
id=entity[1][0],
|
|
197
200
|
labels={self.labels[k] for k in entity[1][1]},
|
|
@@ -199,7 +202,7 @@ class QueryCallback(
|
|
|
199
202
|
self.properties[k[0]]: self.parse_entity((k[1], k[2])) for k in entity[1][2]
|
|
200
203
|
},
|
|
201
204
|
)
|
|
202
|
-
elif result_type ==
|
|
205
|
+
elif result_type == RedisValueTypes.VALUE_PATH:
|
|
203
206
|
nodes, relations = entity[1]
|
|
204
207
|
nodes = self.parse_entity(nodes)
|
|
205
208
|
relations = self.parse_entity(relations)
|
|
@@ -207,11 +210,12 @@ class QueryCallback(
|
|
|
207
210
|
|
|
208
211
|
|
|
209
212
|
class GraphSlowLogCallback(
|
|
210
|
-
ResponseCallback[ResponseType, ResponseType, tuple[GraphSlowLogInfo, ...]
|
|
213
|
+
ResponseCallback[ResponseType, ResponseType, tuple[GraphSlowLogInfo, ...]]
|
|
211
214
|
):
|
|
212
215
|
def transform(
|
|
213
|
-
self,
|
|
214
|
-
|
|
216
|
+
self,
|
|
217
|
+
response: ResponseType,
|
|
218
|
+
) -> tuple[GraphSlowLogInfo, ...]:
|
|
215
219
|
return tuple(GraphSlowLogInfo(int(k[0]), k[1], k[2], float(k[3])) for k in response)
|
|
216
220
|
|
|
217
221
|
|
|
@@ -223,7 +227,8 @@ class ConfigGetCallback(
|
|
|
223
227
|
]
|
|
224
228
|
):
|
|
225
229
|
def transform(
|
|
226
|
-
self,
|
|
230
|
+
self,
|
|
231
|
+
response: ResponseType,
|
|
227
232
|
) -> ResponsePrimitive | dict[AnyStr, ResponsePrimitive]:
|
|
228
233
|
if isinstance(response, list):
|
|
229
234
|
if isinstance(response[0], list):
|
|
@@ -3,13 +3,15 @@ from __future__ import annotations
|
|
|
3
3
|
from typing import cast
|
|
4
4
|
|
|
5
5
|
from coredis._json import json
|
|
6
|
-
from coredis.modules.response.types import JsonType
|
|
7
6
|
from coredis.response._callbacks import ResponseCallback
|
|
8
|
-
from coredis.typing import
|
|
7
|
+
from coredis.typing import JsonType, ResponseType
|
|
9
8
|
|
|
10
9
|
|
|
11
10
|
class JsonCallback(ResponseCallback[ResponseType, ResponseType, JsonType]):
|
|
12
|
-
def transform(
|
|
11
|
+
def transform(
|
|
12
|
+
self,
|
|
13
|
+
response: ResponseType,
|
|
14
|
+
) -> JsonType:
|
|
13
15
|
if isinstance(response, (bytes, str)):
|
|
14
16
|
deser = json.loads(response)
|
|
15
17
|
elif isinstance(response, list):
|
|
@@ -16,9 +16,6 @@ from coredis.typing import (
|
|
|
16
16
|
AnyStr,
|
|
17
17
|
ResponsePrimitive,
|
|
18
18
|
ResponseType,
|
|
19
|
-
StringT,
|
|
20
|
-
TypedDict,
|
|
21
|
-
ValueT,
|
|
22
19
|
)
|
|
23
20
|
|
|
24
21
|
|
|
@@ -30,24 +27,24 @@ class SearchConfigCallback(
|
|
|
30
27
|
]
|
|
31
28
|
):
|
|
32
29
|
def transform(
|
|
33
|
-
self,
|
|
30
|
+
self,
|
|
31
|
+
response: list[list[ResponsePrimitive]],
|
|
34
32
|
) -> dict[AnyStr, ResponsePrimitive]:
|
|
35
|
-
|
|
33
|
+
command_arguments = []
|
|
36
34
|
for item in response:
|
|
37
35
|
try:
|
|
38
36
|
v = (item[0], json.loads(item[1]))
|
|
39
37
|
except (ValueError, TypeError):
|
|
40
38
|
v = item
|
|
41
|
-
|
|
42
|
-
return dict(
|
|
39
|
+
command_arguments.append(v)
|
|
40
|
+
return dict(command_arguments)
|
|
43
41
|
|
|
44
42
|
def transform_3(
|
|
45
43
|
self,
|
|
46
44
|
response: dict[AnyStr, ResponseType] | list[list[ResponsePrimitive]],
|
|
47
|
-
**options: ValueT | None,
|
|
48
45
|
) -> dict[AnyStr, ResponsePrimitive]:
|
|
49
46
|
if isinstance(response, list):
|
|
50
|
-
return self.transform(response
|
|
47
|
+
return self.transform(response)
|
|
51
48
|
else:
|
|
52
49
|
config = {}
|
|
53
50
|
for item, value in response.items():
|
|
@@ -66,9 +63,10 @@ class SearchResultCallback(
|
|
|
66
63
|
]
|
|
67
64
|
):
|
|
68
65
|
def transform(
|
|
69
|
-
self,
|
|
66
|
+
self,
|
|
67
|
+
response: list[ResponseType],
|
|
70
68
|
) -> SearchResult[AnyStr]:
|
|
71
|
-
if options.get("nocontent"):
|
|
69
|
+
if self.options.get("nocontent"):
|
|
72
70
|
return SearchResult[AnyStr](
|
|
73
71
|
response[0],
|
|
74
72
|
tuple(SearchDocument(i, None, None, None, None, {}) for i in response[1:]),
|
|
@@ -76,20 +74,20 @@ class SearchResultCallback(
|
|
|
76
74
|
step = 2
|
|
77
75
|
results = []
|
|
78
76
|
score_idx = payload_idx = sort_key_idx = 0
|
|
79
|
-
if options.get("withscores"):
|
|
77
|
+
if self.options.get("withscores"):
|
|
80
78
|
score_idx = 1
|
|
81
79
|
step += 1
|
|
82
|
-
if options.get("withpayloads"):
|
|
80
|
+
if self.options.get("withpayloads"):
|
|
83
81
|
payload_idx = score_idx + 1
|
|
84
82
|
step += 1
|
|
85
|
-
if options.get("withsortkeys"):
|
|
83
|
+
if self.options.get("withsortkeys"):
|
|
86
84
|
sort_key_idx = payload_idx + 1
|
|
87
85
|
step += 1
|
|
88
86
|
|
|
89
87
|
for k in range(1, len(response) - 1, step):
|
|
90
88
|
section = response[k : k + step]
|
|
91
89
|
score_explain = None
|
|
92
|
-
if options.get("explainscore"):
|
|
90
|
+
if self.options.get("explainscore"):
|
|
93
91
|
score = section[score_idx][0]
|
|
94
92
|
score_explain = section[score_idx][1]
|
|
95
93
|
else:
|
|
@@ -112,22 +110,20 @@ class SearchResultCallback(
|
|
|
112
110
|
def transform_3(
|
|
113
111
|
self,
|
|
114
112
|
response: list[ResponseType] | dict[AnyStr, ResponseType],
|
|
115
|
-
**options: ValueT | None,
|
|
116
113
|
) -> SearchResult[AnyStr]:
|
|
117
114
|
results = []
|
|
118
115
|
if isinstance(response, list):
|
|
119
|
-
return self.transform(response
|
|
116
|
+
return self.transform(response)
|
|
120
117
|
else:
|
|
121
118
|
response = EncodingInsensitiveDict(response)
|
|
122
119
|
for result in response["results"]:
|
|
123
120
|
result = EncodingInsensitiveDict(result)
|
|
124
121
|
score_explain = None
|
|
125
|
-
if options.get("explainscore"):
|
|
126
|
-
score = result
|
|
127
|
-
score_explain = result["score"][1]
|
|
122
|
+
if self.options.get("explainscore"):
|
|
123
|
+
score, score_explain = result.get("score")
|
|
128
124
|
else:
|
|
129
|
-
score = result
|
|
130
|
-
fields = EncodingInsensitiveDict(result
|
|
125
|
+
score = result.get("score", None)
|
|
126
|
+
fields = EncodingInsensitiveDict(result.get("extra_attributes", {}))
|
|
131
127
|
if "$" in fields:
|
|
132
128
|
fields = json.loads(fields.pop("$"))
|
|
133
129
|
results.append(
|
|
@@ -135,8 +131,8 @@ class SearchResultCallback(
|
|
|
135
131
|
result["id"],
|
|
136
132
|
float(score) if score else None,
|
|
137
133
|
score_explain,
|
|
138
|
-
result["payload"] if options.get("withpayloads") else None,
|
|
139
|
-
result["sortkey"] if options.get("withsortkeys") else None,
|
|
134
|
+
result["payload"] if self.options.get("withpayloads") else None,
|
|
135
|
+
result["sortkey"] if self.options.get("withsortkeys") else None,
|
|
140
136
|
fields,
|
|
141
137
|
)
|
|
142
138
|
)
|
|
@@ -151,37 +147,40 @@ class AggregationResultCallback(
|
|
|
151
147
|
]
|
|
152
148
|
):
|
|
153
149
|
def transform(
|
|
154
|
-
self,
|
|
150
|
+
self,
|
|
151
|
+
response: list[ResponseType],
|
|
155
152
|
) -> SearchAggregationResult:
|
|
156
153
|
return SearchAggregationResult[AnyStr](
|
|
157
154
|
[
|
|
158
|
-
flat_pairs_to_dict(k, partial(self.try_json, options))
|
|
159
|
-
for k in (response[1:] if not options.get("with_cursor") else response[0][1:])
|
|
155
|
+
flat_pairs_to_dict(k, partial(self.try_json, self.options))
|
|
156
|
+
for k in (response[1:] if not self.options.get("with_cursor") else response[0][1:])
|
|
160
157
|
],
|
|
161
|
-
response[1] if options.get("with_cursor") else None,
|
|
158
|
+
response[1] if self.options.get("with_cursor") else None,
|
|
162
159
|
)
|
|
163
160
|
|
|
164
161
|
def transform_3(
|
|
165
162
|
self,
|
|
166
163
|
response: dict[AnyStr, ResponseType] | list[ResponseType],
|
|
167
|
-
**options: ValueT | None,
|
|
168
164
|
) -> SearchAggregationResult:
|
|
169
165
|
if (
|
|
170
|
-
options.get("with_cursor")
|
|
166
|
+
self.options.get("with_cursor")
|
|
171
167
|
and isinstance(response[0], dict)
|
|
172
168
|
or isinstance(response, dict)
|
|
173
169
|
):
|
|
174
|
-
response, cursor = response if options.get("with_cursor") else (response, None)
|
|
170
|
+
response, cursor = response if self.options.get("with_cursor") else (response, None)
|
|
175
171
|
response = EncodingInsensitiveDict(response)
|
|
176
172
|
return SearchAggregationResult[AnyStr](
|
|
177
173
|
[
|
|
178
|
-
{
|
|
179
|
-
|
|
174
|
+
{
|
|
175
|
+
r: self.try_json(self.options, v)
|
|
176
|
+
for r, v in EncodingInsensitiveDict(k)["extra_attributes"].items()
|
|
177
|
+
}
|
|
178
|
+
for k in response["results"]
|
|
180
179
|
],
|
|
181
180
|
cursor,
|
|
182
181
|
)
|
|
183
182
|
else:
|
|
184
|
-
return self.transform(response
|
|
183
|
+
return self.transform(response)
|
|
185
184
|
|
|
186
185
|
@staticmethod
|
|
187
186
|
def try_json(options, value):
|
|
@@ -193,33 +192,30 @@ class AggregationResultCallback(
|
|
|
193
192
|
return value
|
|
194
193
|
|
|
195
194
|
|
|
196
|
-
class SpellCheckResult(TypedDict):
|
|
197
|
-
term: StringT
|
|
198
|
-
suggestions: OrderedDict[StringT, int]
|
|
199
|
-
|
|
200
|
-
|
|
201
195
|
class SpellCheckCallback(
|
|
202
196
|
ResponseCallback[
|
|
203
197
|
list[ResponseType],
|
|
204
198
|
dict[AnyStr, ResponseType] | list[ResponseType],
|
|
205
|
-
|
|
199
|
+
dict[AnyStr, OrderedDict[AnyStr, float]],
|
|
206
200
|
]
|
|
207
201
|
):
|
|
208
|
-
def transform(
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
202
|
+
def transform(
|
|
203
|
+
self,
|
|
204
|
+
response: list[ResponseType],
|
|
205
|
+
) -> dict[AnyStr, OrderedDict[AnyStr, float]]:
|
|
206
|
+
return {
|
|
207
|
+
result[1]: OrderedDict(
|
|
208
|
+
(suggestion[1], float(suggestion[0])) for suggestion in result[2]
|
|
209
|
+
)
|
|
210
|
+
for result in response
|
|
211
|
+
}
|
|
214
212
|
|
|
215
213
|
def transform_3(
|
|
216
214
|
self,
|
|
217
215
|
response: dict[AnyStr, ResponseType] | list[ResponseType],
|
|
218
|
-
|
|
219
|
-
|
|
216
|
+
) -> dict[AnyStr, OrderedDict[AnyStr, float]]:
|
|
217
|
+
# For older versions of redis search that didn't support RESP3
|
|
220
218
|
if isinstance(response, list):
|
|
221
|
-
return self.transform(response
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
key: OrderedDict(ChainMap(*result)) for key, result in response["results"].items()
|
|
225
|
-
}
|
|
219
|
+
return self.transform(response)
|
|
220
|
+
response = EncodingInsensitiveDict(response)
|
|
221
|
+
return {key: OrderedDict(ChainMap(*result)) for key, result in response["results"].items()}
|
|
@@ -11,40 +11,37 @@ from coredis.response._callbacks import (
|
|
|
11
11
|
from coredis.response._utils import flat_pairs_to_dict
|
|
12
12
|
from coredis.typing import (
|
|
13
13
|
AnyStr,
|
|
14
|
-
|
|
14
|
+
RedisValueT,
|
|
15
15
|
ResponsePrimitive,
|
|
16
16
|
ResponseType,
|
|
17
17
|
Sequence,
|
|
18
|
-
ValueT,
|
|
19
18
|
)
|
|
20
19
|
|
|
21
20
|
|
|
22
21
|
class SampleCallback(
|
|
23
22
|
ResponseCallback[
|
|
24
|
-
list[
|
|
25
|
-
list[
|
|
23
|
+
list[RedisValueT],
|
|
24
|
+
list[RedisValueT],
|
|
26
25
|
tuple[int, float] | tuple[()],
|
|
27
26
|
]
|
|
28
27
|
):
|
|
29
28
|
def transform(
|
|
30
29
|
self,
|
|
31
|
-
response: list[
|
|
32
|
-
**options: ValueT | None,
|
|
30
|
+
response: list[RedisValueT],
|
|
33
31
|
) -> tuple[int, float] | tuple[()]:
|
|
34
32
|
return (int(response[0]), float(response[1])) if response else ()
|
|
35
33
|
|
|
36
34
|
|
|
37
35
|
class SamplesCallback(
|
|
38
36
|
ResponseCallback[
|
|
39
|
-
list[list[
|
|
40
|
-
list[list[
|
|
37
|
+
list[list[RedisValueT]] | None,
|
|
38
|
+
list[list[RedisValueT]] | None,
|
|
41
39
|
tuple[tuple[int, float], ...] | tuple[()],
|
|
42
40
|
],
|
|
43
41
|
):
|
|
44
42
|
def transform(
|
|
45
43
|
self,
|
|
46
|
-
response: list[list[
|
|
47
|
-
**options: ValueT | None,
|
|
44
|
+
response: list[list[RedisValueT]] | None,
|
|
48
45
|
) -> tuple[tuple[int, float], ...] | tuple[()]:
|
|
49
46
|
if response:
|
|
50
47
|
return tuple(cast(tuple[int, float], SampleCallback().transform(r)) for r in response)
|
|
@@ -55,9 +52,8 @@ class TimeSeriesInfoCallback(DictCallback[AnyStr, ResponseType]):
|
|
|
55
52
|
def transform(
|
|
56
53
|
self,
|
|
57
54
|
response: Sequence[ResponseType] | dict[ResponsePrimitive, ResponseType],
|
|
58
|
-
**options: ValueT | None,
|
|
59
55
|
) -> dict[AnyStr, ResponseType]:
|
|
60
|
-
dct = EncodingInsensitiveDict(super().transform(response
|
|
56
|
+
dct = EncodingInsensitiveDict(super().transform(response))
|
|
61
57
|
if "labels" in dct:
|
|
62
58
|
dct["labels"] = dict(dct["labels"])
|
|
63
59
|
if "Chunks" in dct:
|
|
@@ -76,7 +72,8 @@ class TimeSeriesCallback(
|
|
|
76
72
|
]
|
|
77
73
|
):
|
|
78
74
|
def transform(
|
|
79
|
-
self,
|
|
75
|
+
self,
|
|
76
|
+
response: ResponseType,
|
|
80
77
|
) -> dict[AnyStr, tuple[dict[AnyStr, AnyStr], tuple[int, float] | tuple[()]]]:
|
|
81
78
|
if isinstance(response, dict):
|
|
82
79
|
return {k: (v[0], tuple(v[1])) for k, v in response.items()}
|
|
@@ -97,12 +94,13 @@ class TimeSeriesMultiCallback(
|
|
|
97
94
|
]
|
|
98
95
|
):
|
|
99
96
|
def transform(
|
|
100
|
-
self,
|
|
97
|
+
self,
|
|
98
|
+
response: ResponseType,
|
|
101
99
|
) -> dict[
|
|
102
100
|
AnyStr,
|
|
103
101
|
tuple[dict[AnyStr, AnyStr], tuple[tuple[int, float], ...] | tuple[()]],
|
|
104
102
|
]:
|
|
105
|
-
if options.get("grouped"):
|
|
103
|
+
if self.options.get("grouped"):
|
|
106
104
|
return {
|
|
107
105
|
r[0]: (
|
|
108
106
|
flat_pairs_to_dict(r[1][0]) if r[1] else {},
|
|
@@ -120,13 +118,14 @@ class TimeSeriesMultiCallback(
|
|
|
120
118
|
}
|
|
121
119
|
|
|
122
120
|
def transform_3(
|
|
123
|
-
self,
|
|
121
|
+
self,
|
|
122
|
+
response: ResponseType,
|
|
124
123
|
) -> dict[
|
|
125
124
|
AnyStr,
|
|
126
125
|
tuple[dict[AnyStr, AnyStr], tuple[tuple[int, float], ...] | tuple[()]],
|
|
127
126
|
]:
|
|
128
127
|
if isinstance(response, dict):
|
|
129
|
-
if options.get("grouped"):
|
|
128
|
+
if self.options.get("grouped"):
|
|
130
129
|
return {
|
|
131
130
|
k: (
|
|
132
131
|
r[0],
|
|
@@ -143,23 +142,12 @@ class TimeSeriesMultiCallback(
|
|
|
143
142
|
for k, r in response.items()
|
|
144
143
|
}
|
|
145
144
|
else:
|
|
146
|
-
return self.transform(response
|
|
145
|
+
return self.transform(response)
|
|
147
146
|
|
|
148
147
|
|
|
149
148
|
class ClusterMergeTimeSeries(ClusterMergeMapping[AnyStr, tuple[Any, ...]]):
|
|
150
149
|
def __init__(self) -> None:
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
def combine(
|
|
154
|
-
self,
|
|
155
|
-
responses: Mapping[str, dict[AnyStr, tuple[Any, ...]]],
|
|
156
|
-
**kwargs: ValueT | None,
|
|
157
|
-
) -> dict[AnyStr, tuple[Any, ...]]:
|
|
158
|
-
if not kwargs.get("grouped"):
|
|
159
|
-
return super().combine(responses, **kwargs)
|
|
160
|
-
raise NotImplementedError(
|
|
161
|
-
"Unable to merge response from multiple cluster nodes when used with grouping"
|
|
162
|
-
)
|
|
150
|
+
super().__init__(value_combine=self.merge)
|
|
163
151
|
|
|
164
152
|
def merge(self, values: Any) -> tuple[dict[AnyStr, AnyStr], tuple[tuple[int, float], ...]]:
|
|
165
153
|
merged_labels: dict[AnyStr, AnyStr] = {}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import dataclasses
|
|
4
|
-
from typing import
|
|
4
|
+
from typing import NamedTuple
|
|
5
5
|
|
|
6
6
|
from coredis._json import json
|
|
7
7
|
from coredis.typing import (
|
|
@@ -81,10 +81,6 @@ class AutocompleteSuggestion(Generic[AnyStr]):
|
|
|
81
81
|
payload: AnyStr | None
|
|
82
82
|
|
|
83
83
|
|
|
84
|
-
#: Type alias for valid python types that can be represented as json
|
|
85
|
-
JsonType = str | int | float | bool | dict[str, Any] | list[Any] | None
|
|
86
|
-
|
|
87
|
-
|
|
88
84
|
@dataclasses.dataclass
|
|
89
85
|
class GraphNode(Generic[AnyStr]):
|
|
90
86
|
"""
|