redis-benchmarks-specification 0.1.320__py3-none-any.whl → 0.1.322__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of redis-benchmarks-specification might be problematic. Click here for more details.

Files changed (16) hide show
  1. redis_benchmarks_specification/__builder__/builder.py +14 -54
  2. redis_benchmarks_specification/__cli__/args.py +1 -1
  3. redis_benchmarks_specification/__cli__/cli.py +0 -3
  4. redis_benchmarks_specification/__compare__/args.py +6 -0
  5. redis_benchmarks_specification/__compare__/compare.py +33 -0
  6. redis_benchmarks_specification/__self_contained_coordinator__/docker.py +1 -2
  7. redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py +7 -69
  8. redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-1KiB-values-pipeline-10.yml +32 -0
  9. redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-caching-string-100k-sessions.yml +98 -0
  10. {redis_benchmarks_specification-0.1.320.dist-info → redis_benchmarks_specification-0.1.322.dist-info}/METADATA +1 -1
  11. {redis_benchmarks_specification-0.1.320.dist-info → redis_benchmarks_specification-0.1.322.dist-info}/RECORD +16 -14
  12. /redis_benchmarks_specification/setups/builders/{gcc:15.2.0-amd64-debian-buster-default.yml → gcc:15.2.0-amd64-debian-bookworm-default.yml} +0 -0
  13. /redis_benchmarks_specification/setups/builders/{gcc:15.2.0-arm64-debian-buster-default.yml → gcc:15.2.0-arm64-debian-bookworm-default.yml} +0 -0
  14. {redis_benchmarks_specification-0.1.320.dist-info → redis_benchmarks_specification-0.1.322.dist-info}/LICENSE +0 -0
  15. {redis_benchmarks_specification-0.1.320.dist-info → redis_benchmarks_specification-0.1.322.dist-info}/WHEEL +0 -0
  16. {redis_benchmarks_specification-0.1.320.dist-info → redis_benchmarks_specification-0.1.322.dist-info}/entry_points.txt +0 -0
@@ -73,18 +73,6 @@ def main():
73
73
  parser.add_argument(
74
74
  "--arch", type=str, default="amd64", help="arch to build artifacts"
75
75
  )
76
- parser.add_argument(
77
- "--builder-group",
78
- type=str,
79
- default=STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
80
- help="Consumer group name to read from the stream",
81
- )
82
- parser.add_argument(
83
- "--builder-id",
84
- type=str,
85
- default="1",
86
- help="Consumer id to read from the stream",
87
- )
88
76
  parser.add_argument(
89
77
  "--setups-folder",
90
78
  type=str,
@@ -161,14 +149,7 @@ def main():
161
149
 
162
150
  build_spec_image_prefetch(builders_folder, different_build_specs)
163
151
 
164
- builder_group = args.builder_group
165
- builder_id = args.builder_id
166
- if builder_group is None:
167
- builder_group = STREAM_GH_EVENTS_COMMIT_BUILDERS_CG
168
- if builder_id is None:
169
- builder_id = "1"
170
-
171
- builder_consumer_group_create(conn, builder_group)
152
+ builder_consumer_group_create(conn)
172
153
  if args.github_token is not None:
173
154
  logging.info("detected a github token. will update as much as possible!!! =)")
174
155
  previous_id = args.consumer_start_id
@@ -181,26 +162,28 @@ def main():
181
162
  args.docker_air_gap,
182
163
  arch,
183
164
  args.github_token,
184
- builder_group,
185
- builder_id,
186
165
  )
187
166
 
188
167
 
189
- def builder_consumer_group_create(
190
- conn, builder_group=STREAM_GH_EVENTS_COMMIT_BUILDERS_CG, id="$"
191
- ):
168
+ def builder_consumer_group_create(conn, id="$"):
192
169
  try:
193
170
  conn.xgroup_create(
194
171
  STREAM_KEYNAME_GH_EVENTS_COMMIT,
195
- builder_group,
172
+ STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
196
173
  mkstream=True,
197
174
  id=id,
198
175
  )
199
176
  logging.info(
200
- "Created consumer group named {} to distribute work.".format(builder_group)
177
+ "Created consumer group named {} to distribute work.".format(
178
+ STREAM_GH_EVENTS_COMMIT_BUILDERS_CG
179
+ )
201
180
  )
202
181
  except redis.exceptions.ResponseError:
203
- logging.info("Consumer group named {} already existed.".format(builder_group))
182
+ logging.info(
183
+ "Consumer group named {} already existed.".format(
184
+ STREAM_GH_EVENTS_COMMIT_BUILDERS_CG
185
+ )
186
+ )
204
187
 
205
188
 
206
189
  def check_benchmark_build_comment(comments):
@@ -222,22 +205,14 @@ def builder_process_stream(
222
205
  docker_air_gap=False,
223
206
  arch="amd64",
224
207
  github_token=None,
225
- builder_group=None,
226
- builder_id=None,
227
208
  ):
228
209
  new_builds_count = 0
229
210
  auto_approve_github_comments = True
230
211
  build_stream_fields_arr = []
231
- if builder_group is None:
232
- builder_group = STREAM_GH_EVENTS_COMMIT_BUILDERS_CG
233
- if builder_id is None:
234
- builder_id = "1"
235
- consumer_name = "{}-proc#{}".format(builder_group, builder_id)
236
- logging.info(
237
- f"Entering blocking read waiting for work. building for arch: {arch}. Using consumer id {consumer_name}"
238
- )
212
+ logging.info("Entering blocking read waiting for work.")
213
+ consumer_name = "{}-proc#{}".format(STREAM_GH_EVENTS_COMMIT_BUILDERS_CG, "1")
239
214
  newTestInfo = conn.xreadgroup(
240
- builder_group,
215
+ STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
241
216
  consumer_name,
242
217
  {STREAM_KEYNAME_GH_EVENTS_COMMIT: previous_id},
243
218
  count=1,
@@ -255,21 +230,6 @@ def builder_process_stream(
255
230
  docker_client = docker.from_env()
256
231
  from pathlib import Path
257
232
 
258
- build_request_arch = None
259
- if b"arch" in testDetails:
260
- build_request_arch = testDetails[b"arch"].decode()
261
- elif b"build_arch" in testDetails:
262
- build_request_arch = testDetails[b"build_arch"].decode()
263
- else:
264
- logging.info("No arch info found on the stream.")
265
- if build_request_arch is not None and build_request_arch != arch:
266
- logging.info(
267
- "skipping build request given requested build arch {}!={}".format(
268
- build_request_arch, arch
269
- )
270
- )
271
- return previous_id, new_builds_count, build_stream_fields_arr
272
-
273
233
  home = str(Path.home())
274
234
  if b"git_hash" in testDetails:
275
235
  git_hash = testDetails[b"git_hash"]
@@ -138,7 +138,7 @@ def spec_cli_args(parser):
138
138
  parser.add_argument("--gh_repo", type=str, default="redis")
139
139
  parser.add_argument("--server_name", type=str, default=None)
140
140
  parser.add_argument("--run_image", type=str, default="redis")
141
- parser.add_argument("--build_arch", type=str, default=None)
141
+ parser.add_argument("--build_arch", type=str, default="amd64")
142
142
  parser.add_argument("--id", type=str, default="dockerhub")
143
143
  parser.add_argument("--mnt_point", type=str, default="")
144
144
  parser.add_argument("--trigger-unstable-commits", type=bool, default=True)
@@ -432,9 +432,6 @@ def trigger_tests_cli_command_logic(args, project_name, project_version):
432
432
  commit_dict["tests_groups_regexp"] = tests_groups_regexp
433
433
  commit_dict["github_org"] = args.gh_org
434
434
  commit_dict["github_repo"] = args.gh_repo
435
- if args.build_arch is not None:
436
- commit_dict["build_arch"] = args.build_arch
437
- commit_dict["arch"] = args.build_arch
438
435
  if args.server_name is not None and args.server_name != "":
439
436
  commit_dict["server_name"] = args.server_name
440
437
  if args.build_artifacts != "":
@@ -46,6 +46,12 @@ def create_compare_arguments(parser):
46
46
  default="",
47
47
  help="specify a test (or a comma separated list of tests) to use for comparison. If none is specified by default will use all of them.",
48
48
  )
49
+ parser.add_argument(
50
+ "--use-test-suites-folder",
51
+ action="store_true",
52
+ default=False,
53
+ help="Use test names from YAML files in test-suites folder instead of database",
54
+ )
49
55
  parser.add_argument(
50
56
  "--defaults_filename",
51
57
  type=str,
@@ -399,6 +399,8 @@ def compare_command_logic(args, project_name, project_version):
399
399
  args.regression_str,
400
400
  args.improvement_str,
401
401
  tests_with_config,
402
+ args.use_test_suites_folder,
403
+ testsuites_folder,
402
404
  )
403
405
  total_regressions = len(regressions_list)
404
406
  total_improvements = len(improvements_list)
@@ -686,6 +688,8 @@ def compute_regression_table(
686
688
  regression_str="REGRESSION",
687
689
  improvement_str="IMPROVEMENT",
688
690
  tests_with_config={},
691
+ use_test_suites_folder=False,
692
+ test_suites_folder=None,
689
693
  ):
690
694
  START_TIME_NOW_UTC, _, _ = get_start_time_vars()
691
695
  START_TIME_LAST_MONTH_UTC = START_TIME_NOW_UTC - datetime.timedelta(days=31)
@@ -746,6 +750,10 @@ def compute_regression_table(
746
750
  if test != "":
747
751
  test_names = test.split(",")
748
752
  logging.info("Using test name {}".format(test_names))
753
+ elif use_test_suites_folder:
754
+ test_names = get_test_names_from_yaml_files(
755
+ test_suites_folder, tags_regex_string
756
+ )
749
757
  else:
750
758
  test_names = get_test_names_from_db(
751
759
  rts, tags_regex_string, test_names, used_key
@@ -1620,6 +1628,31 @@ def get_test_names_from_db(rts, tags_regex_string, test_names, used_key):
1620
1628
  return test_names
1621
1629
 
1622
1630
 
1631
+ def get_test_names_from_yaml_files(test_suites_folder, tags_regex_string):
1632
+ """Get test names from YAML files in test-suites folder"""
1633
+ from redis_benchmarks_specification.__common__.runner import get_benchmark_specs
1634
+
1635
+ # Get all YAML files
1636
+ yaml_files = get_benchmark_specs(test_suites_folder, test="", test_regex=".*")
1637
+
1638
+ # Extract test names (remove path and .yml extension)
1639
+ test_names = []
1640
+ for yaml_file in yaml_files:
1641
+ test_name = os.path.basename(yaml_file).replace(".yml", "")
1642
+ # Apply regex filtering like database version
1643
+ match_obj = re.search(tags_regex_string, test_name)
1644
+ if match_obj is not None:
1645
+ test_names.append(test_name)
1646
+
1647
+ test_names.sort()
1648
+ logging.info(
1649
+ "Based on test-suites folder ({}) we have {} comparison points: {}".format(
1650
+ test_suites_folder, len(test_names), test_names
1651
+ )
1652
+ )
1653
+ return test_names
1654
+
1655
+
1623
1656
  def get_line(
1624
1657
  baseline_v_str,
1625
1658
  comparison_v_str,
@@ -15,7 +15,7 @@ def generate_standalone_redis_server_args(
15
15
  redis_arguments="",
16
16
  password=None,
17
17
  ):
18
- added_params = ["port", "protected-mode", "dir", "requirepass", "logfile"]
18
+ added_params = ["port", "protected-mode", "dir", "requirepass"]
19
19
  # start redis-server
20
20
  command = [
21
21
  binary,
@@ -31,7 +31,6 @@ def generate_standalone_redis_server_args(
31
31
  logging.info("Redis server will be started with password authentication")
32
32
  if dbdir != "":
33
33
  command.extend(["--dir", dbdir])
34
- command.extend(["--logfile", f"{dbdir}redis.log"])
35
34
  if configuration_parameters is not None:
36
35
  for parameter, parameter_value in configuration_parameters.items():
37
36
  if parameter not in added_params:
@@ -107,43 +107,6 @@ from redis_benchmarks_specification.__self_contained_coordinator__.artifacts imp
107
107
  from redis_benchmarks_specification.__self_contained_coordinator__.build_info import (
108
108
  extract_build_info_from_streamdata,
109
109
  )
110
-
111
-
112
- def print_directory_logs(directory_path, description=""):
113
- """Print all .log files in a directory for debugging purposes."""
114
- if not os.path.exists(directory_path):
115
- logging.warning(f"Directory {directory_path} does not exist")
116
- return
117
-
118
- logging.info(
119
- f"Printing all .log files in {description} directory: {directory_path}"
120
- )
121
- try:
122
- for root, dirs, files in os.walk(directory_path):
123
- for file in files:
124
- # Only process .log files
125
- if not file.endswith(".log"):
126
- continue
127
-
128
- file_path = os.path.join(root, file)
129
- logging.info(f"Found log file: {file_path}")
130
- try:
131
- # Try to read and print the log file content
132
- with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
133
- content = f.read()
134
- if content.strip(): # Only print non-empty files
135
- logging.info(f"Content of {file_path}:")
136
- logging.info("-" * 40)
137
- logging.info(content)
138
- logging.info("-" * 40)
139
- else:
140
- logging.info(f"Log file {file_path} is empty")
141
- except Exception as e:
142
- logging.warning(f"Could not read log file {file_path}: {e}")
143
- except Exception as e:
144
- logging.error(f"Error walking directory {directory_path}: {e}")
145
-
146
-
147
110
  from redis_benchmarks_specification.__self_contained_coordinator__.cpuset import (
148
111
  extract_db_cpu_limit,
149
112
  generate_cpuset_cpus,
@@ -1356,18 +1319,6 @@ def process_self_contained_coordinator_stream(
1356
1319
 
1357
1320
  print("-" * 60)
1358
1321
 
1359
- # Print all log files in the temporary directories for debugging
1360
- logging.critical(
1361
- "Printing all files in temporary directories for debugging..."
1362
- )
1363
- try:
1364
- print_directory_logs(temporary_dir, "Redis server")
1365
- print_directory_logs(temporary_dir_client, "Client")
1366
- except Exception as log_error:
1367
- logging.error(
1368
- f"Failed to print directory logs: {log_error}"
1369
- )
1370
-
1371
1322
  test_result = False
1372
1323
  # tear-down
1373
1324
  logging.info("Tearing down setup")
@@ -1398,27 +1349,14 @@ def process_self_contained_coordinator_stream(
1398
1349
  )
1399
1350
  )
1400
1351
  pass
1401
-
1402
- # Only remove temporary directories if test passed
1403
- if test_result:
1404
- logging.info(
1405
- "Test passed. Removing temporary dirs {} and {}".format(
1406
- temporary_dir, temporary_dir_client
1407
- )
1408
- )
1409
- shutil.rmtree(temporary_dir, ignore_errors=True)
1410
- shutil.rmtree(
1411
- temporary_dir_client, ignore_errors=True
1412
- )
1413
- else:
1414
- logging.warning(
1415
- "Test failed. Preserving temporary dirs for debugging: {} and {}".format(
1416
- temporary_dir, temporary_dir_client
1417
- )
1352
+ logging.info(
1353
+ "Removing temporary dirs {} and {}".format(
1354
+ temporary_dir, temporary_dir_client
1418
1355
  )
1419
- # Print all log files in the temporary directories for debugging
1420
- print_directory_logs(temporary_dir, "Redis server")
1421
- print_directory_logs(temporary_dir_client, "Client")
1356
+ )
1357
+
1358
+ shutil.rmtree(temporary_dir, ignore_errors=True)
1359
+ shutil.rmtree(temporary_dir_client, ignore_errors=True)
1422
1360
 
1423
1361
  overall_result &= test_result
1424
1362
 
@@ -0,0 +1,32 @@
1
+ version: 0.4
2
+ name: memtier_benchmark-1Mkeys-load-string-with-1KiB-values-pipeline-10
3
+ description: Runs memtier_benchmark, for a keyspace length of 1M keys loading STRINGs
4
+ in which the value has a data size of 1000 Bytes.
5
+ dbconfig:
6
+ configuration-parameters:
7
+ save: '""'
8
+ check:
9
+ keyspacelen: 0
10
+ resources:
11
+ requests:
12
+ memory: 3g
13
+ tested-commands:
14
+ - set
15
+ redis-topologies:
16
+ - oss-standalone
17
+ build-variants:
18
+ - gcc:15.2.0-amd64-debian-bookworm-default
19
+ - gcc:15.2.0-arm64-debian-bookworm-default
20
+ - dockerhub
21
+ clientconfig:
22
+ run_image: redislabs/memtier_benchmark:edge
23
+ tool: memtier_benchmark
24
+ arguments: '--pipeline 10 --distinct-client-seed --data-size 1000 --ratio 1:0 --key-pattern R:R --key-minimum=1 --key-maximum
25
+ 1000000 --test-time 180 -c 50 -t 4 --hide-histogram'
26
+ resources:
27
+ requests:
28
+ cpus: '4'
29
+ memory: 2g
30
+ tested-groups:
31
+ - string
32
+ priority: 17
@@ -0,0 +1,98 @@
1
+ version: 0.4
2
+ name: memtier_benchmark-session-caching-string-100k-sessions
3
+ description: |
4
+ Runs memtier_benchmark to simulate a session caching workload for a SaaS application.
5
+ This benchmark focuses exclusively on **string-based session storage**, where each session
6
+ is stored in Redis as a serialized JSON string (`session:<id>`) containing fields like
7
+ user ID, timestamps, device info, and metadata (total ~400–600B).
8
+
9
+ The benchmark models a typical read-heavy cache usage pattern, with an approximate
10
+ **read:write ratio of 80:20**, reflecting session retrievals and infrequent updates.
11
+
12
+ Command groups:
13
+ - Session cache reads (`GET`): ~80%
14
+ - Session cache writes (`SET`): ~20%
15
+
16
+ To better approximate real-world access patterns, the benchmark uses a **Zipfian key distribution**
17
+ (`--command-key-pattern=Z`). This simulates **skewed access** where a small subset of sessions (hot keys)
18
+ receives a majority of reads — a common pattern in production workloads.
19
+
20
+ While Zipfian is technically a power-law distribution, it effectively mimics **Poisson-like behavior**
21
+ in large-scale systems, where access frequency is uneven but statistically predictable.
22
+ This access skew mirrors real-life scenarios such as:
23
+ - Frequently accessed or "sticky" user sessions
24
+ - Popular user accounts or active devices
25
+ - Hot caches for trending or recently used resources
26
+
27
+ Using Zipfian distribution allows this benchmark to capture **contention**, **cache pressure**, and
28
+ **read amplification** effects that occur in real SaaS applications under load.
29
+
30
+ dbconfig:
31
+ configuration-parameters:
32
+ save: '""'
33
+ resources:
34
+ requests:
35
+ memory: 1g
36
+ init_lua: |
37
+ local seed = 12345
38
+ math.randomseed(seed)
39
+ local now = tonumber(redis.call('TIME')[1])
40
+ local function rand_str(len)
41
+ local chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
42
+ local res = ''
43
+ for i = 1, len do
44
+ local idx = math.random(#chars)
45
+ res = res .. chars:sub(idx, idx)
46
+ end
47
+ return res
48
+ end
49
+ for i = 1, 100000 do
50
+ local session_id = 'session:' .. i
51
+ local session_data = string.format(
52
+ '{"userId":"user-%d","organizationId":"org-%d","role":"member","createdAt":"%d","lastAccessed":"%d","ipAddress":"192.168.1.%d","device":"device-%s","authMethod":"password","status":"active","metadata":"%s"}',
53
+ i, i, now - math.random(3600), now, (i % 255), rand_str(8), rand_str(200 + (i % 100))
54
+ )
55
+ redis.call('SET', session_id, session_data)
56
+ end
57
+ return 'OK'
58
+
59
+ tested-groups:
60
+ - string
61
+
62
+ tested-commands:
63
+ - get
64
+ - set
65
+
66
+ redis-topologies:
67
+ - oss-standalone
68
+
69
+ build-variants:
70
+ - gcc:15.2.0-amd64-debian-bookworm-default
71
+ - gcc:15.2.0-arm64-debian-bookworm-default
72
+ - dockerhub
73
+
74
+ clientconfig:
75
+ run_image: redislabs/memtier_benchmark:edge
76
+ tool: memtier_benchmark
77
+ arguments: >
78
+ --key-prefix ""
79
+ --key-minimum 1
80
+ --key-maximum 100000
81
+ --data-size-range=400-600
82
+ --pipeline=1
83
+ --print-percentiles=50,90,95,99
84
+ --run-count=1
85
+ --test-time=120
86
+ --command="GET session:__key__"
87
+ --command-key-pattern=Z
88
+ --command-ratio=90
89
+ --command='SET session:__key__ "{\"userId\":\"user-__key__\",\"organizationId\":\"org-__key__\",\"role\":\"member\",\"createdAt\":\"1754905396\",\"lastAccessed\":\"1754906472\",\"ipAddress\":\"192.168.1.36\",\"device\":\"device-2T8YGLbl\",\"authMethod\":\"password\",\"status\":\"active\",\"metadata\":\"wDVmiQsSe2oSEPfhhvYN6jbVxVykSCzQXmnsqCIv5MEmpslD1LMgwJcUe8Wmhvhh56dgTDhH4o3M9vYa6JyAS3Axs2zufVVPWtTsCRVbNGjMmumy7j5vIM0OuclgbHBOxtGDtCU88YBc9IP5oNYiycXXCmq5s7mWAdmRhFrmFxOy3VKlrXJz4ZrtF6KHEflGhoF1FzXtLO8Dckg3B34kdxGsnNiRjNIEtWVxeXekUSDyrxcnwiIy29Zx8SxX2g8ZUrz3sTpEwX7KQZFpIi7XoSKZywvC6I63VMNpZcAO\"}"'
90
+ --command-key-pattern=Z
91
+ --command-ratio=10
92
+ --hide-histogram
93
+ resources:
94
+ requests:
95
+ cpus: '4'
96
+ memory: 2g
97
+
98
+ priority: 150
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: redis-benchmarks-specification
3
- Version: 0.1.320
3
+ Version: 0.1.322
4
4
  Summary: The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute.
5
5
  Author: filipecosta90
6
6
  Author-email: filipecosta.90@gmail.com
@@ -4,11 +4,11 @@ redis_benchmarks_specification/__api__/api.py,sha256=k_CMICtMm1z8jY3hByaL0hIr_5v
4
4
  redis_benchmarks_specification/__api__/app.py,sha256=JzQm84DjIVdfLbDO423BJbrds6gFzMbA0syRkHE_aUU,7063
5
5
  redis_benchmarks_specification/__builder__/Readme.md,sha256=O6MV_J3OSgzW-ir2TbukP8Vhkm_LOzQJJndG1Cykqic,111
6
6
  redis_benchmarks_specification/__builder__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
7
- redis_benchmarks_specification/__builder__/builder.py,sha256=86DQuqf9LhPl1_bpmQK2rkACBxYBz13Wu8fsAnKkm7g,29730
7
+ redis_benchmarks_specification/__builder__/builder.py,sha256=cK2yGtoS6Xnux7wANQkTT2rCvkz5Y5_7huQbQFAcMMk,28324
8
8
  redis_benchmarks_specification/__builder__/schema.py,sha256=1wcmyVJBcWrBvK58pghN9NCoWLCO3BzPsmdKWYfkVog,584
9
9
  redis_benchmarks_specification/__cli__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
10
- redis_benchmarks_specification/__cli__/args.py,sha256=C0EdJbq5F6Td6kvEkzN5ZWMhWYuizV_tGzVhkPLKEi0,7207
11
- redis_benchmarks_specification/__cli__/cli.py,sha256=iTjINQ-RV_q2ovq1neSoRCAggpGdeP5mX3_1aFxSScY,22001
10
+ redis_benchmarks_specification/__cli__/args.py,sha256=uZkk1Jom9i0xJ_OpVMrIWbw_70jFo7IswLV2EtKTKEA,7210
11
+ redis_benchmarks_specification/__cli__/cli.py,sha256=6tt0Ai-JIFEF3ykWFU2_g5ZrzKVIoyLLXUmyzYpVDF4,21843
12
12
  redis_benchmarks_specification/__cli__/stats.py,sha256=8R6fsiR00Uqa-01_Yq0PegriZkiM313KjCpDv5PhEdM,28965
13
13
  redis_benchmarks_specification/__common__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
14
  redis_benchmarks_specification/__common__/builder_schema.py,sha256=kfDpRIk7NkJrb5qj9jzsBhLVNO7K_W2Clumj4pxrkG8,5938
@@ -19,8 +19,8 @@ redis_benchmarks_specification/__common__/runner.py,sha256=M-o1QZVlp3thFW-55PiaW
19
19
  redis_benchmarks_specification/__common__/spec.py,sha256=D_SN48wg6NMthW_-OS1H5bydSDiuZpfd4WPPj7Vfwmc,5760
20
20
  redis_benchmarks_specification/__common__/timeseries.py,sha256=uvS3T2zdrSmW_B2S0MYTekJfHUllqU3RlD0LrF957RQ,52904
21
21
  redis_benchmarks_specification/__compare__/__init__.py,sha256=DtBXRp0Q01XgCFmY-1OIePMyyYihVNAjZ1Y8zwqSDN0,101
22
- redis_benchmarks_specification/__compare__/args.py,sha256=f3ZSs8GzyIzaMzX2h9cx0nOrnlO4aXToO1SBzBlpzKM,7608
23
- redis_benchmarks_specification/__compare__/compare.py,sha256=OrpCpY66rlbP5so6aYCdSF9Sy3sdhKrnzVJK1u3XQno,62912
22
+ redis_benchmarks_specification/__compare__/args.py,sha256=-3lPYzvLPiDsx1oW9KmuafxZWzA4hhbhYsBAgiGtD_w,7816
23
+ redis_benchmarks_specification/__compare__/compare.py,sha256=uHlnjnF2kkiPIBDIJIw1inEnfUQX0OiABPfrJkp1bgo,64111
24
24
  redis_benchmarks_specification/__init__.py,sha256=YQIEx2sLPPA0JR9OuCuMNMNtm-f_gqDKgzvNJnkGNKY,491
25
25
  redis_benchmarks_specification/__runner__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
26
26
  redis_benchmarks_specification/__runner__/args.py,sha256=-el2RttOjjc4Y9yOM1P5y9BwIkBPp_Y1k7OsP91P2BI,10651
@@ -32,11 +32,11 @@ redis_benchmarks_specification/__self_contained_coordinator__/artifacts.py,sha25
32
32
  redis_benchmarks_specification/__self_contained_coordinator__/build_info.py,sha256=vlg8H8Rxu2falW8xp1GvL1SV1fyBguSbz6Apxc7A2yM,2282
33
33
  redis_benchmarks_specification/__self_contained_coordinator__/clients.py,sha256=EL1V4-i-tTav1mcF_CUosqPF3Q1qi9BZL0zFajEk70c,1878
34
34
  redis_benchmarks_specification/__self_contained_coordinator__/cpuset.py,sha256=sRvtoJIitppcOpm3R5LbVmSfPEAqPumOqVATnF5Wbek,594
35
- redis_benchmarks_specification/__self_contained_coordinator__/docker.py,sha256=09SyAfqlzs1KG9ZAajClNWtiNk4Jqzd--4-m3n1rLjU,3156
35
+ redis_benchmarks_specification/__self_contained_coordinator__/docker.py,sha256=eXJM2FybaVNTjvTrKwHextcNmkCIK9HQaG8ZNWjgx18,3086
36
36
  redis_benchmarks_specification/__self_contained_coordinator__/post_processing.py,sha256=sVLKNnWdAqYY9DjVdqRC5tDaIrVSaI3Ca7w8-DQ-LRM,776
37
37
  redis_benchmarks_specification/__self_contained_coordinator__/prepopulation.py,sha256=1UeFr2T1ZQBcHCSd4W1ZtaWgXyFPfjLyDi_DgDc1eTA,2957
38
38
  redis_benchmarks_specification/__self_contained_coordinator__/runners.py,sha256=noRHn9leTfEm2fa1yHBHQd8TUGhFDoU86QQkHABnWSs,30073
39
- redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=QcQwPWvhS5C96NNkZFzi0xwMzwLzl6kxYmS1sWTU7s0,82532
39
+ redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=hlSBMa-n6byXW7zVxq5nzqEKN34DrPpcgN-NnzGk-_c,79375
40
40
  redis_benchmarks_specification/__setups__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
41
  redis_benchmarks_specification/__setups__/topologies.py,sha256=xQ1IJkcTji_ZjLiJd3vOxZpvbNtBLZw9cPkw5hGJKHU,481
42
42
  redis_benchmarks_specification/__spec__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
@@ -47,8 +47,8 @@ redis_benchmarks_specification/__watchdog__/args.py,sha256=azW3WkS9uqQJthtZt7TPG
47
47
  redis_benchmarks_specification/__watchdog__/watchdog.py,sha256=MASAPSusxEOWCf_iVW4xIwwgFm_snLHJaI2XRiMlZhs,5832
48
48
  redis_benchmarks_specification/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
49
  redis_benchmarks_specification/commands/commands.py,sha256=hJbKkGzAFt_l40fJyQLfBKY_zgCp-1j-siUFc6fQ71c,450
50
- redis_benchmarks_specification/setups/builders/gcc:15.2.0-amd64-debian-buster-default.yml,sha256=UobsjPRRQALKNvAkOqvYJJs8HLrlG9AbfJwuIkLpwHU,528
51
- redis_benchmarks_specification/setups/builders/gcc:15.2.0-arm64-debian-buster-default.yml,sha256=zexg-qwlrdjNEsJDigcwQgm-CluwtrWHPygvXzv0wwo,528
50
+ redis_benchmarks_specification/setups/builders/gcc:15.2.0-amd64-debian-bookworm-default.yml,sha256=UobsjPRRQALKNvAkOqvYJJs8HLrlG9AbfJwuIkLpwHU,528
51
+ redis_benchmarks_specification/setups/builders/gcc:15.2.0-arm64-debian-bookworm-default.yml,sha256=zexg-qwlrdjNEsJDigcwQgm-CluwtrWHPygvXzv0wwo,528
52
52
  redis_benchmarks_specification/setups/platforms/aws-ec2-1node-c5.4xlarge.yml,sha256=l7HsjccpebwZXeutnt3SHSETw4iiRwQ9dCDXLOySSRQ,622
53
53
  redis_benchmarks_specification/setups/topologies/topologies.yml,sha256=N2UOKA8tG_pLpaSFtn7WdUmDNYwxRyTv9Ln_PCOPTco,3261
54
54
  redis_benchmarks_specification/test-suites/defaults.yml,sha256=EJHv9INdjoNVMOgHY8qo4IVCHfvXVz5sv7Vxtr3DAIE,1392
@@ -119,6 +119,7 @@ redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-
119
119
  redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-10B-values-pipeline-50.yml,sha256=TaC4cWYPGiPIKoxRRJzCnnrsATEAR4V4P_k60969438,897
120
120
  redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-10B-values-pipeline-500.yml,sha256=oMqOV5SsCsmB2DdcxdpOQOx_Ea3osGI6sUnz51Kv0W8,899
121
121
  redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-10B-values.yml,sha256=brWaqyfx3DuYmGrzxhZg78gnUUDxr_9dcaNg5X97xdU,838
122
+ redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-1KiB-values-pipeline-10.yml,sha256=geMNHCghhORcc9eOugIVy1xEjyzJ1qXDlLw9Fi-nXZs,888
122
123
  redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-1KiB-values.yml,sha256=VxKiUTOjCLt2bcSA7iAf-44du9ttK8D3Qf_67S5eRtY,843
123
124
  redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-20KiB-values.yml,sha256=fU58-OT7Tg444tKeXYL0ni7i44KfBmcvEGf3OQ5EoMU,898
124
125
  redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-zset-listpack-with-100-elements-double-score.yml,sha256=ILqf3SY5VJkRs_8mur_torHdbVLCsYaZcwzReBwsmfg,6487
@@ -272,12 +273,13 @@ redis_benchmarks_specification/test-suites/memtier_benchmark-nokeys-pubsub-mixed
272
273
  redis_benchmarks_specification/test-suites/memtier_benchmark-nokeys-pubsub-publish-1K-channels-10B-no-subscribers.yml,sha256=8M9AdpNaVBuSUm78cqKModOF_xITV_RM7RLCTyvtvaQ,825
273
274
  redis_benchmarks_specification/test-suites/memtier_benchmark-nokeys-server-time-pipeline-10.yml,sha256=zeTHtpbhNGfzTXHfr9P5e62PebxUONRvTsCbXJZhsTs,721
274
275
  redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-caching-hash-100k-sessions.yml,sha256=H98DrXlq-lRhoe1M7vehfDBbUdZ7WwWroriTpklx-PI,3646
276
+ redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-caching-string-100k-sessions.yml,sha256=yVrHQxMp2xzSpZ5Vx7GCTqhwbjMB-RoBf1T20qz9puE,3909
275
277
  redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-storage-100k-sessions.yml,sha256=GCNREyvwGlhMDmrvOaRWi1w0GqG9YA-8TRoh1xIa4xw,7012
276
278
  redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-storage-1k-sessions.yml,sha256=2egtIxPxCze2jlbAfgsk4v9JSQHNMoPLbDWFEW8olDg,7006
277
279
  redis_benchmarks_specification/test-suites/template.txt,sha256=ezqGiRPOvuSDO0iG7GEf-AGXNfHbgXI89_G0RUEzL88,481
278
280
  redis_benchmarks_specification/vector-search-test-suites/vector_db_benchmark_test.yml,sha256=PD7ow-k4Ll2BkhEC3aIqiaCZt8Hc4aJIp96Lw3J3mcI,791
279
- redis_benchmarks_specification-0.1.320.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
280
- redis_benchmarks_specification-0.1.320.dist-info/METADATA,sha256=dsa2e5C8eW97s-VDqOhHSoAX-I6U4qcqVBZ62Wmx-8M,22726
281
- redis_benchmarks_specification-0.1.320.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
282
- redis_benchmarks_specification-0.1.320.dist-info/entry_points.txt,sha256=x5WBXCZsnDRTZxV7SBGmC65L2k-ygdDOxV8vuKN00Nk,715
283
- redis_benchmarks_specification-0.1.320.dist-info/RECORD,,
281
+ redis_benchmarks_specification-0.1.322.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
282
+ redis_benchmarks_specification-0.1.322.dist-info/METADATA,sha256=IJ8st5rZHb_Jz7k27HlFYeDiiNfowbXUzdhYCwcjCW4,22726
283
+ redis_benchmarks_specification-0.1.322.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
284
+ redis_benchmarks_specification-0.1.322.dist-info/entry_points.txt,sha256=x5WBXCZsnDRTZxV7SBGmC65L2k-ygdDOxV8vuKN00Nk,715
285
+ redis_benchmarks_specification-0.1.322.dist-info/RECORD,,