redis-benchmarks-specification 0.1.320__py3-none-any.whl → 0.1.321__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of redis-benchmarks-specification might be problematic. Click here for more details.

@@ -73,18 +73,6 @@ def main():
73
73
  parser.add_argument(
74
74
  "--arch", type=str, default="amd64", help="arch to build artifacts"
75
75
  )
76
- parser.add_argument(
77
- "--builder-group",
78
- type=str,
79
- default=STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
80
- help="Consumer group name to read from the stream",
81
- )
82
- parser.add_argument(
83
- "--builder-id",
84
- type=str,
85
- default="1",
86
- help="Consumer id to read from the stream",
87
- )
88
76
  parser.add_argument(
89
77
  "--setups-folder",
90
78
  type=str,
@@ -161,14 +149,7 @@ def main():
161
149
 
162
150
  build_spec_image_prefetch(builders_folder, different_build_specs)
163
151
 
164
- builder_group = args.builder_group
165
- builder_id = args.builder_id
166
- if builder_group is None:
167
- builder_group = STREAM_GH_EVENTS_COMMIT_BUILDERS_CG
168
- if builder_id is None:
169
- builder_id = "1"
170
-
171
- builder_consumer_group_create(conn, builder_group)
152
+ builder_consumer_group_create(conn)
172
153
  if args.github_token is not None:
173
154
  logging.info("detected a github token. will update as much as possible!!! =)")
174
155
  previous_id = args.consumer_start_id
@@ -181,26 +162,28 @@ def main():
181
162
  args.docker_air_gap,
182
163
  arch,
183
164
  args.github_token,
184
- builder_group,
185
- builder_id,
186
165
  )
187
166
 
188
167
 
189
- def builder_consumer_group_create(
190
- conn, builder_group=STREAM_GH_EVENTS_COMMIT_BUILDERS_CG, id="$"
191
- ):
168
+ def builder_consumer_group_create(conn, id="$"):
192
169
  try:
193
170
  conn.xgroup_create(
194
171
  STREAM_KEYNAME_GH_EVENTS_COMMIT,
195
- builder_group,
172
+ STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
196
173
  mkstream=True,
197
174
  id=id,
198
175
  )
199
176
  logging.info(
200
- "Created consumer group named {} to distribute work.".format(builder_group)
177
+ "Created consumer group named {} to distribute work.".format(
178
+ STREAM_GH_EVENTS_COMMIT_BUILDERS_CG
179
+ )
201
180
  )
202
181
  except redis.exceptions.ResponseError:
203
- logging.info("Consumer group named {} already existed.".format(builder_group))
182
+ logging.info(
183
+ "Consumer group named {} already existed.".format(
184
+ STREAM_GH_EVENTS_COMMIT_BUILDERS_CG
185
+ )
186
+ )
204
187
 
205
188
 
206
189
  def check_benchmark_build_comment(comments):
@@ -222,22 +205,14 @@ def builder_process_stream(
222
205
  docker_air_gap=False,
223
206
  arch="amd64",
224
207
  github_token=None,
225
- builder_group=None,
226
- builder_id=None,
227
208
  ):
228
209
  new_builds_count = 0
229
210
  auto_approve_github_comments = True
230
211
  build_stream_fields_arr = []
231
- if builder_group is None:
232
- builder_group = STREAM_GH_EVENTS_COMMIT_BUILDERS_CG
233
- if builder_id is None:
234
- builder_id = "1"
235
- consumer_name = "{}-proc#{}".format(builder_group, builder_id)
236
- logging.info(
237
- f"Entering blocking read waiting for work. building for arch: {arch}. Using consumer id {consumer_name}"
238
- )
212
+ logging.info("Entering blocking read waiting for work.")
213
+ consumer_name = "{}-proc#{}".format(STREAM_GH_EVENTS_COMMIT_BUILDERS_CG, "1")
239
214
  newTestInfo = conn.xreadgroup(
240
- builder_group,
215
+ STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
241
216
  consumer_name,
242
217
  {STREAM_KEYNAME_GH_EVENTS_COMMIT: previous_id},
243
218
  count=1,
@@ -255,21 +230,6 @@ def builder_process_stream(
255
230
  docker_client = docker.from_env()
256
231
  from pathlib import Path
257
232
 
258
- build_request_arch = None
259
- if b"arch" in testDetails:
260
- build_request_arch = testDetails[b"arch"].decode()
261
- elif b"build_arch" in testDetails:
262
- build_request_arch = testDetails[b"build_arch"].decode()
263
- else:
264
- logging.info("No arch info found on the stream.")
265
- if build_request_arch is not None and build_request_arch != arch:
266
- logging.info(
267
- "skipping build request given requested build arch {}!={}".format(
268
- build_request_arch, arch
269
- )
270
- )
271
- return previous_id, new_builds_count, build_stream_fields_arr
272
-
273
233
  home = str(Path.home())
274
234
  if b"git_hash" in testDetails:
275
235
  git_hash = testDetails[b"git_hash"]
@@ -138,7 +138,7 @@ def spec_cli_args(parser):
138
138
  parser.add_argument("--gh_repo", type=str, default="redis")
139
139
  parser.add_argument("--server_name", type=str, default=None)
140
140
  parser.add_argument("--run_image", type=str, default="redis")
141
- parser.add_argument("--build_arch", type=str, default=None)
141
+ parser.add_argument("--build_arch", type=str, default="amd64")
142
142
  parser.add_argument("--id", type=str, default="dockerhub")
143
143
  parser.add_argument("--mnt_point", type=str, default="")
144
144
  parser.add_argument("--trigger-unstable-commits", type=bool, default=True)
@@ -432,9 +432,6 @@ def trigger_tests_cli_command_logic(args, project_name, project_version):
432
432
  commit_dict["tests_groups_regexp"] = tests_groups_regexp
433
433
  commit_dict["github_org"] = args.gh_org
434
434
  commit_dict["github_repo"] = args.gh_repo
435
- if args.build_arch is not None:
436
- commit_dict["build_arch"] = args.build_arch
437
- commit_dict["arch"] = args.build_arch
438
435
  if args.server_name is not None and args.server_name != "":
439
436
  commit_dict["server_name"] = args.server_name
440
437
  if args.build_artifacts != "":
@@ -15,7 +15,7 @@ def generate_standalone_redis_server_args(
15
15
  redis_arguments="",
16
16
  password=None,
17
17
  ):
18
- added_params = ["port", "protected-mode", "dir", "requirepass", "logfile"]
18
+ added_params = ["port", "protected-mode", "dir", "requirepass"]
19
19
  # start redis-server
20
20
  command = [
21
21
  binary,
@@ -31,7 +31,6 @@ def generate_standalone_redis_server_args(
31
31
  logging.info("Redis server will be started with password authentication")
32
32
  if dbdir != "":
33
33
  command.extend(["--dir", dbdir])
34
- command.extend(["--logfile", f"{dbdir}redis.log"])
35
34
  if configuration_parameters is not None:
36
35
  for parameter, parameter_value in configuration_parameters.items():
37
36
  if parameter not in added_params:
@@ -107,43 +107,6 @@ from redis_benchmarks_specification.__self_contained_coordinator__.artifacts imp
107
107
  from redis_benchmarks_specification.__self_contained_coordinator__.build_info import (
108
108
  extract_build_info_from_streamdata,
109
109
  )
110
-
111
-
112
- def print_directory_logs(directory_path, description=""):
113
- """Print all .log files in a directory for debugging purposes."""
114
- if not os.path.exists(directory_path):
115
- logging.warning(f"Directory {directory_path} does not exist")
116
- return
117
-
118
- logging.info(
119
- f"Printing all .log files in {description} directory: {directory_path}"
120
- )
121
- try:
122
- for root, dirs, files in os.walk(directory_path):
123
- for file in files:
124
- # Only process .log files
125
- if not file.endswith(".log"):
126
- continue
127
-
128
- file_path = os.path.join(root, file)
129
- logging.info(f"Found log file: {file_path}")
130
- try:
131
- # Try to read and print the log file content
132
- with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
133
- content = f.read()
134
- if content.strip(): # Only print non-empty files
135
- logging.info(f"Content of {file_path}:")
136
- logging.info("-" * 40)
137
- logging.info(content)
138
- logging.info("-" * 40)
139
- else:
140
- logging.info(f"Log file {file_path} is empty")
141
- except Exception as e:
142
- logging.warning(f"Could not read log file {file_path}: {e}")
143
- except Exception as e:
144
- logging.error(f"Error walking directory {directory_path}: {e}")
145
-
146
-
147
110
  from redis_benchmarks_specification.__self_contained_coordinator__.cpuset import (
148
111
  extract_db_cpu_limit,
149
112
  generate_cpuset_cpus,
@@ -1356,18 +1319,6 @@ def process_self_contained_coordinator_stream(
1356
1319
 
1357
1320
  print("-" * 60)
1358
1321
 
1359
- # Print all log files in the temporary directories for debugging
1360
- logging.critical(
1361
- "Printing all files in temporary directories for debugging..."
1362
- )
1363
- try:
1364
- print_directory_logs(temporary_dir, "Redis server")
1365
- print_directory_logs(temporary_dir_client, "Client")
1366
- except Exception as log_error:
1367
- logging.error(
1368
- f"Failed to print directory logs: {log_error}"
1369
- )
1370
-
1371
1322
  test_result = False
1372
1323
  # tear-down
1373
1324
  logging.info("Tearing down setup")
@@ -1398,27 +1349,14 @@ def process_self_contained_coordinator_stream(
1398
1349
  )
1399
1350
  )
1400
1351
  pass
1401
-
1402
- # Only remove temporary directories if test passed
1403
- if test_result:
1404
- logging.info(
1405
- "Test passed. Removing temporary dirs {} and {}".format(
1406
- temporary_dir, temporary_dir_client
1407
- )
1408
- )
1409
- shutil.rmtree(temporary_dir, ignore_errors=True)
1410
- shutil.rmtree(
1411
- temporary_dir_client, ignore_errors=True
1412
- )
1413
- else:
1414
- logging.warning(
1415
- "Test failed. Preserving temporary dirs for debugging: {} and {}".format(
1416
- temporary_dir, temporary_dir_client
1417
- )
1352
+ logging.info(
1353
+ "Removing temporary dirs {} and {}".format(
1354
+ temporary_dir, temporary_dir_client
1418
1355
  )
1419
- # Print all log files in the temporary directories for debugging
1420
- print_directory_logs(temporary_dir, "Redis server")
1421
- print_directory_logs(temporary_dir_client, "Client")
1356
+ )
1357
+
1358
+ shutil.rmtree(temporary_dir, ignore_errors=True)
1359
+ shutil.rmtree(temporary_dir_client, ignore_errors=True)
1422
1360
 
1423
1361
  overall_result &= test_result
1424
1362
 
@@ -0,0 +1,98 @@
1
+ version: 0.4
2
+ name: memtier_benchmark-session-caching-string-100k-sessions
3
+ description: |
4
+ Runs memtier_benchmark to simulate a session caching workload for a SaaS application.
5
+ This benchmark focuses exclusively on **string-based session storage**, where each session
6
+ is stored in Redis as a serialized JSON string (`session:<id>`) containing fields like
7
+ user ID, timestamps, device info, and metadata (total ~400–600B).
8
+
9
+ The benchmark models a typical read-heavy cache usage pattern, with an approximate
10
+ **read:write ratio of 80:20**, reflecting session retrievals and infrequent updates.
11
+
12
+ Command groups:
13
+ - Session cache reads (`GET`): ~80%
14
+ - Session cache writes (`SET`): ~20%
15
+
16
+ To better approximate real-world access patterns, the benchmark uses a **Zipfian key distribution**
17
+ (`--command-key-pattern=Z`). This simulates **skewed access** where a small subset of sessions (hot keys)
18
+ receives a majority of reads — a common pattern in production workloads.
19
+
20
+ While Zipfian is technically a power-law distribution, it effectively mimics **Poisson-like behavior**
21
+ in large-scale systems, where access frequency is uneven but statistically predictable.
22
+ This access skew mirrors real-life scenarios such as:
23
+ - Frequently accessed or "sticky" user sessions
24
+ - Popular user accounts or active devices
25
+ - Hot caches for trending or recently used resources
26
+
27
+ Using Zipfian distribution allows this benchmark to capture **contention**, **cache pressure**, and
28
+ **read amplification** effects that occur in real SaaS applications under load.
29
+
30
+ dbconfig:
31
+ configuration-parameters:
32
+ save: '""'
33
+ resources:
34
+ requests:
35
+ memory: 1g
36
+ init_lua: |
37
+ local seed = 12345
38
+ math.randomseed(seed)
39
+ local now = tonumber(redis.call('TIME')[1])
40
+ local function rand_str(len)
41
+ local chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
42
+ local res = ''
43
+ for i = 1, len do
44
+ local idx = math.random(#chars)
45
+ res = res .. chars:sub(idx, idx)
46
+ end
47
+ return res
48
+ end
49
+ for i = 1, 100000 do
50
+ local session_id = 'session:' .. i
51
+ local session_data = string.format(
52
+ '{"userId":"user-%d","organizationId":"org-%d","role":"member","createdAt":"%d","lastAccessed":"%d","ipAddress":"192.168.1.%d","device":"device-%s","authMethod":"password","status":"active","metadata":"%s"}',
53
+ i, i, now - math.random(3600), now, (i % 255), rand_str(8), rand_str(200 + (i % 100))
54
+ )
55
+ redis.call('SET', session_id, session_data)
56
+ end
57
+ return 'OK'
58
+
59
+ tested-groups:
60
+ - string
61
+
62
+ tested-commands:
63
+ - get
64
+ - set
65
+
66
+ redis-topologies:
67
+ - oss-standalone
68
+
69
+ build-variants:
70
+ - gcc:15.2.0-amd64-debian-bookworm-default
71
+ - gcc:15.2.0-arm64-debian-bookworm-default
72
+ - dockerhub
73
+
74
+ clientconfig:
75
+ run_image: redislabs/memtier_benchmark:edge
76
+ tool: memtier_benchmark
77
+ arguments: >
78
+ --key-prefix ""
79
+ --key-minimum 1
80
+ --key-maximum 100000
81
+ --data-size-range=400-600
82
+ --pipeline=1
83
+ --print-percentiles=50,90,95,99
84
+ --run-count=1
85
+ --test-time=120
86
+ --command="GET session:__key__"
87
+ --command-key-pattern=Z
88
+ --command-ratio=90
89
+ --command='SET session:__key__ "{\"userId\":\"user-__key__\",\"organizationId\":\"org-__key__\",\"role\":\"member\",\"createdAt\":\"1754905396\",\"lastAccessed\":\"1754906472\",\"ipAddress\":\"192.168.1.36\",\"device\":\"device-2T8YGLbl\",\"authMethod\":\"password\",\"status\":\"active\",\"metadata\":\"wDVmiQsSe2oSEPfhhvYN6jbVxVykSCzQXmnsqCIv5MEmpslD1LMgwJcUe8Wmhvhh56dgTDhH4o3M9vYa6JyAS3Axs2zufVVPWtTsCRVbNGjMmumy7j5vIM0OuclgbHBOxtGDtCU88YBc9IP5oNYiycXXCmq5s7mWAdmRhFrmFxOy3VKlrXJz4ZrtF6KHEflGhoF1FzXtLO8Dckg3B34kdxGsnNiRjNIEtWVxeXekUSDyrxcnwiIy29Zx8SxX2g8ZUrz3sTpEwX7KQZFpIi7XoSKZywvC6I63VMNpZcAO\"}"'
90
+ --command-key-pattern=Z
91
+ --command-ratio=10
92
+ --hide-histogram
93
+ resources:
94
+ requests:
95
+ cpus: '4'
96
+ memory: 2g
97
+
98
+ priority: 150
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: redis-benchmarks-specification
3
- Version: 0.1.320
3
+ Version: 0.1.321
4
4
  Summary: The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute.
5
5
  Author: filipecosta90
6
6
  Author-email: filipecosta.90@gmail.com
@@ -4,11 +4,11 @@ redis_benchmarks_specification/__api__/api.py,sha256=k_CMICtMm1z8jY3hByaL0hIr_5v
4
4
  redis_benchmarks_specification/__api__/app.py,sha256=JzQm84DjIVdfLbDO423BJbrds6gFzMbA0syRkHE_aUU,7063
5
5
  redis_benchmarks_specification/__builder__/Readme.md,sha256=O6MV_J3OSgzW-ir2TbukP8Vhkm_LOzQJJndG1Cykqic,111
6
6
  redis_benchmarks_specification/__builder__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
7
- redis_benchmarks_specification/__builder__/builder.py,sha256=86DQuqf9LhPl1_bpmQK2rkACBxYBz13Wu8fsAnKkm7g,29730
7
+ redis_benchmarks_specification/__builder__/builder.py,sha256=cK2yGtoS6Xnux7wANQkTT2rCvkz5Y5_7huQbQFAcMMk,28324
8
8
  redis_benchmarks_specification/__builder__/schema.py,sha256=1wcmyVJBcWrBvK58pghN9NCoWLCO3BzPsmdKWYfkVog,584
9
9
  redis_benchmarks_specification/__cli__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
10
- redis_benchmarks_specification/__cli__/args.py,sha256=C0EdJbq5F6Td6kvEkzN5ZWMhWYuizV_tGzVhkPLKEi0,7207
11
- redis_benchmarks_specification/__cli__/cli.py,sha256=iTjINQ-RV_q2ovq1neSoRCAggpGdeP5mX3_1aFxSScY,22001
10
+ redis_benchmarks_specification/__cli__/args.py,sha256=uZkk1Jom9i0xJ_OpVMrIWbw_70jFo7IswLV2EtKTKEA,7210
11
+ redis_benchmarks_specification/__cli__/cli.py,sha256=6tt0Ai-JIFEF3ykWFU2_g5ZrzKVIoyLLXUmyzYpVDF4,21843
12
12
  redis_benchmarks_specification/__cli__/stats.py,sha256=8R6fsiR00Uqa-01_Yq0PegriZkiM313KjCpDv5PhEdM,28965
13
13
  redis_benchmarks_specification/__common__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
14
  redis_benchmarks_specification/__common__/builder_schema.py,sha256=kfDpRIk7NkJrb5qj9jzsBhLVNO7K_W2Clumj4pxrkG8,5938
@@ -32,11 +32,11 @@ redis_benchmarks_specification/__self_contained_coordinator__/artifacts.py,sha25
32
32
  redis_benchmarks_specification/__self_contained_coordinator__/build_info.py,sha256=vlg8H8Rxu2falW8xp1GvL1SV1fyBguSbz6Apxc7A2yM,2282
33
33
  redis_benchmarks_specification/__self_contained_coordinator__/clients.py,sha256=EL1V4-i-tTav1mcF_CUosqPF3Q1qi9BZL0zFajEk70c,1878
34
34
  redis_benchmarks_specification/__self_contained_coordinator__/cpuset.py,sha256=sRvtoJIitppcOpm3R5LbVmSfPEAqPumOqVATnF5Wbek,594
35
- redis_benchmarks_specification/__self_contained_coordinator__/docker.py,sha256=09SyAfqlzs1KG9ZAajClNWtiNk4Jqzd--4-m3n1rLjU,3156
35
+ redis_benchmarks_specification/__self_contained_coordinator__/docker.py,sha256=eXJM2FybaVNTjvTrKwHextcNmkCIK9HQaG8ZNWjgx18,3086
36
36
  redis_benchmarks_specification/__self_contained_coordinator__/post_processing.py,sha256=sVLKNnWdAqYY9DjVdqRC5tDaIrVSaI3Ca7w8-DQ-LRM,776
37
37
  redis_benchmarks_specification/__self_contained_coordinator__/prepopulation.py,sha256=1UeFr2T1ZQBcHCSd4W1ZtaWgXyFPfjLyDi_DgDc1eTA,2957
38
38
  redis_benchmarks_specification/__self_contained_coordinator__/runners.py,sha256=noRHn9leTfEm2fa1yHBHQd8TUGhFDoU86QQkHABnWSs,30073
39
- redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=QcQwPWvhS5C96NNkZFzi0xwMzwLzl6kxYmS1sWTU7s0,82532
39
+ redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=hlSBMa-n6byXW7zVxq5nzqEKN34DrPpcgN-NnzGk-_c,79375
40
40
  redis_benchmarks_specification/__setups__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
41
  redis_benchmarks_specification/__setups__/topologies.py,sha256=xQ1IJkcTji_ZjLiJd3vOxZpvbNtBLZw9cPkw5hGJKHU,481
42
42
  redis_benchmarks_specification/__spec__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
@@ -272,12 +272,13 @@ redis_benchmarks_specification/test-suites/memtier_benchmark-nokeys-pubsub-mixed
272
272
  redis_benchmarks_specification/test-suites/memtier_benchmark-nokeys-pubsub-publish-1K-channels-10B-no-subscribers.yml,sha256=8M9AdpNaVBuSUm78cqKModOF_xITV_RM7RLCTyvtvaQ,825
273
273
  redis_benchmarks_specification/test-suites/memtier_benchmark-nokeys-server-time-pipeline-10.yml,sha256=zeTHtpbhNGfzTXHfr9P5e62PebxUONRvTsCbXJZhsTs,721
274
274
  redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-caching-hash-100k-sessions.yml,sha256=H98DrXlq-lRhoe1M7vehfDBbUdZ7WwWroriTpklx-PI,3646
275
+ redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-caching-string-100k-sessions.yml,sha256=yVrHQxMp2xzSpZ5Vx7GCTqhwbjMB-RoBf1T20qz9puE,3909
275
276
  redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-storage-100k-sessions.yml,sha256=GCNREyvwGlhMDmrvOaRWi1w0GqG9YA-8TRoh1xIa4xw,7012
276
277
  redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-storage-1k-sessions.yml,sha256=2egtIxPxCze2jlbAfgsk4v9JSQHNMoPLbDWFEW8olDg,7006
277
278
  redis_benchmarks_specification/test-suites/template.txt,sha256=ezqGiRPOvuSDO0iG7GEf-AGXNfHbgXI89_G0RUEzL88,481
278
279
  redis_benchmarks_specification/vector-search-test-suites/vector_db_benchmark_test.yml,sha256=PD7ow-k4Ll2BkhEC3aIqiaCZt8Hc4aJIp96Lw3J3mcI,791
279
- redis_benchmarks_specification-0.1.320.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
280
- redis_benchmarks_specification-0.1.320.dist-info/METADATA,sha256=dsa2e5C8eW97s-VDqOhHSoAX-I6U4qcqVBZ62Wmx-8M,22726
281
- redis_benchmarks_specification-0.1.320.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
282
- redis_benchmarks_specification-0.1.320.dist-info/entry_points.txt,sha256=x5WBXCZsnDRTZxV7SBGmC65L2k-ygdDOxV8vuKN00Nk,715
283
- redis_benchmarks_specification-0.1.320.dist-info/RECORD,,
280
+ redis_benchmarks_specification-0.1.321.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
281
+ redis_benchmarks_specification-0.1.321.dist-info/METADATA,sha256=Ba9UTZowrE7S3u_ir4Sg9H2FHp2eWRE-nVdCCiyi9r0,22726
282
+ redis_benchmarks_specification-0.1.321.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
283
+ redis_benchmarks_specification-0.1.321.dist-info/entry_points.txt,sha256=x5WBXCZsnDRTZxV7SBGmC65L2k-ygdDOxV8vuKN00Nk,715
284
+ redis_benchmarks_specification-0.1.321.dist-info/RECORD,,