redis-benchmarks-specification 0.1.340__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of redis-benchmarks-specification might be problematic. Click here for more details.
- redis_benchmarks_specification/__builder__/builder.py +38 -2
- redis_benchmarks_specification/__cli__/args.py +1 -1
- redis_benchmarks_specification/__cli__/cli.py +10 -6
- redis_benchmarks_specification/__common__/env.py +14 -0
- redis_benchmarks_specification/__self_contained_coordinator__/runners.py +23 -8
- redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py +18 -7
- {redis_benchmarks_specification-0.1.340.dist-info → redis_benchmarks_specification-0.2.1.dist-info}/METADATA +1 -1
- {redis_benchmarks_specification-0.1.340.dist-info → redis_benchmarks_specification-0.2.1.dist-info}/RECORD +11 -11
- {redis_benchmarks_specification-0.1.340.dist-info → redis_benchmarks_specification-0.2.1.dist-info}/LICENSE +0 -0
- {redis_benchmarks_specification-0.1.340.dist-info → redis_benchmarks_specification-0.2.1.dist-info}/WHEEL +0 -0
- {redis_benchmarks_specification-0.1.340.dist-info → redis_benchmarks_specification-0.2.1.dist-info}/entry_points.txt +0 -0
|
@@ -28,6 +28,7 @@ from redis_benchmarks_specification.__common__.env import (
|
|
|
28
28
|
SPECS_PATH_SETUPS,
|
|
29
29
|
STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
|
|
30
30
|
STREAM_KEYNAME_NEW_BUILD_EVENTS,
|
|
31
|
+
get_arch_specific_stream_name,
|
|
31
32
|
REDIS_HEALTH_CHECK_INTERVAL,
|
|
32
33
|
REDIS_SOCKET_TIMEOUT,
|
|
33
34
|
REDIS_BINS_EXPIRE_SECS,
|
|
@@ -371,6 +372,11 @@ def builder_process_stream(
|
|
|
371
372
|
)
|
|
372
373
|
)
|
|
373
374
|
return previous_id, new_builds_count, build_stream_fields_arr
|
|
375
|
+
else:
|
|
376
|
+
logging.info(
|
|
377
|
+
"No arch info found on the stream. Using default arch {}.".format(arch)
|
|
378
|
+
)
|
|
379
|
+
build_request_arch = arch
|
|
374
380
|
|
|
375
381
|
home = str(Path.home())
|
|
376
382
|
if b"git_hash" in testDetails:
|
|
@@ -532,7 +538,7 @@ def builder_process_stream(
|
|
|
532
538
|
server_name = testDetails[b"server_name"].decode()
|
|
533
539
|
|
|
534
540
|
# Check if artifacts already exist before building
|
|
535
|
-
prefix = f"github_org={github_org}/github_repo={github_repo}/git_branch={str(git_branch)}/git_version={str(git_version)}/git_hash={str(git_hash)}"
|
|
541
|
+
prefix = f"build_spec={build_spec}/github_org={github_org}/github_repo={github_repo}/git_branch={str(git_branch)}/git_version={str(git_version)}/git_hash={str(git_hash)}"
|
|
536
542
|
|
|
537
543
|
# Create a comprehensive build signature that includes all build-affecting parameters
|
|
538
544
|
import hashlib
|
|
@@ -598,6 +604,32 @@ def builder_process_stream(
|
|
|
598
604
|
github_repo,
|
|
599
605
|
artifact_keys, # Pass existing artifact keys
|
|
600
606
|
)
|
|
607
|
+
# Add to benchmark stream even when reusing artifacts
|
|
608
|
+
if result is True:
|
|
609
|
+
arch_specific_stream = get_arch_specific_stream_name(build_arch)
|
|
610
|
+
logging.info(
|
|
611
|
+
f"Adding reused build work to architecture-specific stream: {arch_specific_stream}"
|
|
612
|
+
)
|
|
613
|
+
benchmark_stream_id = conn.xadd(
|
|
614
|
+
arch_specific_stream, build_stream_fields
|
|
615
|
+
)
|
|
616
|
+
logging.info(
|
|
617
|
+
"successfully reused build variant {} for redis git_sha {}. Stream id: {}".format(
|
|
618
|
+
id, git_hash, benchmark_stream_id
|
|
619
|
+
)
|
|
620
|
+
)
|
|
621
|
+
streamId_decoded = streamId.decode()
|
|
622
|
+
benchmark_stream_id_decoded = benchmark_stream_id.decode()
|
|
623
|
+
builder_list_completed = (
|
|
624
|
+
f"builder:{streamId_decoded}:builds_completed"
|
|
625
|
+
)
|
|
626
|
+
conn.lpush(builder_list_completed, benchmark_stream_id_decoded)
|
|
627
|
+
conn.expire(builder_list_completed, REDIS_BINS_EXPIRE_SECS)
|
|
628
|
+
logging.info(
|
|
629
|
+
f"Adding information of build->benchmark stream info in list {builder_list_completed}. Adding benchmark stream id: {benchmark_stream_id_decoded}"
|
|
630
|
+
)
|
|
631
|
+
build_stream_fields_arr.append(build_stream_fields)
|
|
632
|
+
new_builds_count = new_builds_count + 1
|
|
601
633
|
continue # Skip to next build spec
|
|
602
634
|
|
|
603
635
|
logging.info(
|
|
@@ -685,8 +717,12 @@ def builder_process_stream(
|
|
|
685
717
|
None, # existing_artifact_keys - None for new builds
|
|
686
718
|
)
|
|
687
719
|
if result is True:
|
|
720
|
+
arch_specific_stream = get_arch_specific_stream_name(build_arch)
|
|
721
|
+
logging.info(
|
|
722
|
+
f"Adding new build work to architecture-specific stream: {arch_specific_stream}"
|
|
723
|
+
)
|
|
688
724
|
benchmark_stream_id = conn.xadd(
|
|
689
|
-
|
|
725
|
+
arch_specific_stream, build_stream_fields
|
|
690
726
|
)
|
|
691
727
|
logging.info(
|
|
692
728
|
"sucessfully built build variant {} for redis git_sha {}. Stream id: {}".format(
|
|
@@ -138,7 +138,7 @@ def spec_cli_args(parser):
|
|
|
138
138
|
parser.add_argument("--gh_repo", type=str, default="redis")
|
|
139
139
|
parser.add_argument("--server_name", type=str, default=None)
|
|
140
140
|
parser.add_argument("--run_image", type=str, default="redis")
|
|
141
|
-
parser.add_argument("--
|
|
141
|
+
parser.add_argument("--arch", type=str, default="amd64")
|
|
142
142
|
parser.add_argument("--id", type=str, default="dockerhub")
|
|
143
143
|
parser.add_argument("--mnt_point", type=str, default="")
|
|
144
144
|
parser.add_argument("--trigger-unstable-commits", type=bool, default=True)
|
|
@@ -44,6 +44,7 @@ from redis_benchmarks_specification.__common__.env import (
|
|
|
44
44
|
STREAM_KEYNAME_GH_EVENTS_COMMIT,
|
|
45
45
|
STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
|
|
46
46
|
STREAM_KEYNAME_NEW_BUILD_EVENTS,
|
|
47
|
+
get_arch_specific_stream_name,
|
|
47
48
|
)
|
|
48
49
|
from redis_benchmarks_specification.__common__.package import (
|
|
49
50
|
get_version_string,
|
|
@@ -84,7 +85,7 @@ def trigger_tests_dockerhub_cli_command_logic(args, project_name, project_versio
|
|
|
84
85
|
args.id,
|
|
85
86
|
conn,
|
|
86
87
|
args.run_image,
|
|
87
|
-
args.
|
|
88
|
+
args.arch,
|
|
88
89
|
testDetails,
|
|
89
90
|
"n/a",
|
|
90
91
|
[],
|
|
@@ -124,9 +125,12 @@ def trigger_tests_dockerhub_cli_command_logic(args, project_name, project_versio
|
|
|
124
125
|
store_airgap_image_redis(conn, docker_client, args.run_image)
|
|
125
126
|
|
|
126
127
|
if result is True:
|
|
127
|
-
|
|
128
|
-
|
|
128
|
+
# Use architecture-specific stream
|
|
129
|
+
arch_specific_stream = get_arch_specific_stream_name(args.arch)
|
|
130
|
+
logging.info(
|
|
131
|
+
f"CLI adding work to architecture-specific stream: {arch_specific_stream}"
|
|
129
132
|
)
|
|
133
|
+
benchmark_stream_id = conn.xadd(arch_specific_stream, build_stream_fields)
|
|
130
134
|
logging.info(
|
|
131
135
|
"sucessfully requested a new run {}. Stream id: {}".format(
|
|
132
136
|
build_stream_fields, benchmark_stream_id
|
|
@@ -438,9 +442,9 @@ def trigger_tests_cli_command_logic(args, project_name, project_version):
|
|
|
438
442
|
commit_dict["tests_groups_regexp"] = tests_groups_regexp
|
|
439
443
|
commit_dict["github_org"] = args.gh_org
|
|
440
444
|
commit_dict["github_repo"] = args.gh_repo
|
|
441
|
-
if args.
|
|
442
|
-
commit_dict["build_arch"] = args.
|
|
443
|
-
commit_dict["arch"] = args.
|
|
445
|
+
if args.arch is not None:
|
|
446
|
+
commit_dict["build_arch"] = args.arch
|
|
447
|
+
commit_dict["arch"] = args.arch
|
|
444
448
|
if args.server_name is not None and args.server_name != "":
|
|
445
449
|
commit_dict["server_name"] = args.server_name
|
|
446
450
|
if args.build_artifacts != "":
|
|
@@ -32,6 +32,20 @@ STREAM_KEYNAME_NEW_BUILD_EVENTS = os.getenv(
|
|
|
32
32
|
"STREAM_KEYNAME_NEW_BUILD_EVENTS", "oss:api:gh/redis/redis/builds"
|
|
33
33
|
)
|
|
34
34
|
|
|
35
|
+
|
|
36
|
+
# Function to get architecture-specific build events stream name
|
|
37
|
+
def get_arch_specific_stream_name(arch):
|
|
38
|
+
"""Get architecture-specific stream name for build events"""
|
|
39
|
+
base_stream = STREAM_KEYNAME_NEW_BUILD_EVENTS
|
|
40
|
+
if arch in ["amd64", "x86_64"]:
|
|
41
|
+
return f"{base_stream}:amd64"
|
|
42
|
+
elif arch in ["arm64", "aarch64"]:
|
|
43
|
+
return f"{base_stream}:arm64"
|
|
44
|
+
else:
|
|
45
|
+
# Fallback to base stream for unknown architectures
|
|
46
|
+
return base_stream
|
|
47
|
+
|
|
48
|
+
|
|
35
49
|
STREAM_GH_NEW_BUILD_RUNNERS_CG = os.getenv(
|
|
36
50
|
"STREAM_GH_NEW_BUILD_RUNNERS_CG", "runners-cg:redis/redis/commits"
|
|
37
51
|
)
|
|
@@ -38,6 +38,7 @@ from redisbench_admin.utils.results import post_process_benchmark_results
|
|
|
38
38
|
|
|
39
39
|
from redis_benchmarks_specification.__common__.env import (
|
|
40
40
|
STREAM_KEYNAME_NEW_BUILD_EVENTS,
|
|
41
|
+
get_arch_specific_stream_name,
|
|
41
42
|
STREAM_GH_NEW_BUILD_RUNNERS_CG,
|
|
42
43
|
S3_BUCKET_NAME,
|
|
43
44
|
)
|
|
@@ -71,12 +72,16 @@ from redis_benchmarks_specification.__self_contained_coordinator__.prepopulation
|
|
|
71
72
|
)
|
|
72
73
|
|
|
73
74
|
|
|
74
|
-
def build_runners_consumer_group_create(conn, running_platform, id="$"):
|
|
75
|
+
def build_runners_consumer_group_create(conn, running_platform, arch="amd64", id="$"):
|
|
75
76
|
consumer_group_name = get_runners_consumer_group_name(running_platform)
|
|
77
|
+
arch_specific_stream = get_arch_specific_stream_name(arch)
|
|
76
78
|
logging.info("Will use consumer group named {}.".format(consumer_group_name))
|
|
79
|
+
logging.info(
|
|
80
|
+
"Will read from architecture-specific stream: {}.".format(arch_specific_stream)
|
|
81
|
+
)
|
|
77
82
|
try:
|
|
78
83
|
conn.xgroup_create(
|
|
79
|
-
|
|
84
|
+
arch_specific_stream,
|
|
80
85
|
consumer_group_name,
|
|
81
86
|
mkstream=True,
|
|
82
87
|
id=id,
|
|
@@ -99,17 +104,23 @@ def get_runners_consumer_group_name(running_platform):
|
|
|
99
104
|
return consumer_group_name
|
|
100
105
|
|
|
101
106
|
|
|
102
|
-
def clear_pending_messages_for_consumer(
|
|
107
|
+
def clear_pending_messages_for_consumer(
|
|
108
|
+
conn, running_platform, consumer_pos, arch="amd64"
|
|
109
|
+
):
|
|
103
110
|
"""Clear all pending messages for a specific consumer on startup"""
|
|
104
111
|
consumer_group_name = get_runners_consumer_group_name(running_platform)
|
|
105
112
|
consumer_name = "{}-self-contained-proc#{}".format(
|
|
106
113
|
consumer_group_name, consumer_pos
|
|
107
114
|
)
|
|
115
|
+
arch_specific_stream = get_arch_specific_stream_name(arch)
|
|
116
|
+
logging.info(
|
|
117
|
+
f"Clearing pending messages from architecture-specific stream: {arch_specific_stream}"
|
|
118
|
+
)
|
|
108
119
|
|
|
109
120
|
try:
|
|
110
121
|
# Get pending messages for this specific consumer
|
|
111
122
|
pending_info = conn.xpending_range(
|
|
112
|
-
|
|
123
|
+
arch_specific_stream,
|
|
113
124
|
consumer_group_name,
|
|
114
125
|
min="-",
|
|
115
126
|
max="+",
|
|
@@ -125,7 +136,7 @@ def clear_pending_messages_for_consumer(conn, running_platform, consumer_pos):
|
|
|
125
136
|
|
|
126
137
|
# Acknowledge all pending messages to clear them
|
|
127
138
|
ack_count = conn.xack(
|
|
128
|
-
|
|
139
|
+
arch_specific_stream, consumer_group_name, *message_ids
|
|
129
140
|
)
|
|
130
141
|
|
|
131
142
|
logging.info(
|
|
@@ -143,15 +154,19 @@ def clear_pending_messages_for_consumer(conn, running_platform, consumer_pos):
|
|
|
143
154
|
logging.error(f"Unexpected error clearing pending messages: {e}")
|
|
144
155
|
|
|
145
156
|
|
|
146
|
-
def reset_consumer_group_to_latest(conn, running_platform):
|
|
157
|
+
def reset_consumer_group_to_latest(conn, running_platform, arch="amd64"):
|
|
147
158
|
"""Reset the consumer group position to only read new messages (skip old ones)"""
|
|
148
159
|
consumer_group_name = get_runners_consumer_group_name(running_platform)
|
|
160
|
+
arch_specific_stream = get_arch_specific_stream_name(arch)
|
|
161
|
+
logging.info(
|
|
162
|
+
f"Resetting consumer group position for architecture-specific stream: {arch_specific_stream}"
|
|
163
|
+
)
|
|
149
164
|
|
|
150
165
|
try:
|
|
151
166
|
# Set the consumer group position to '$' (latest) to skip all existing messages
|
|
152
|
-
conn.xgroup_setid(
|
|
167
|
+
conn.xgroup_setid(arch_specific_stream, consumer_group_name, id="$")
|
|
153
168
|
logging.info(
|
|
154
|
-
f"Reset consumer group {consumer_group_name} position to latest - will only process new messages"
|
|
169
|
+
f"Reset consumer group {consumer_group_name} position to latest on stream {arch_specific_stream} - will only process new messages"
|
|
155
170
|
)
|
|
156
171
|
|
|
157
172
|
except redis.exceptions.ResponseError as e:
|
|
@@ -102,6 +102,7 @@ from redisbench_admin.utils.results import post_process_benchmark_results
|
|
|
102
102
|
|
|
103
103
|
from redis_benchmarks_specification.__common__.env import (
|
|
104
104
|
STREAM_KEYNAME_NEW_BUILD_EVENTS,
|
|
105
|
+
get_arch_specific_stream_name,
|
|
105
106
|
S3_BUCKET_NAME,
|
|
106
107
|
)
|
|
107
108
|
from redis_benchmarks_specification.__common__.spec import (
|
|
@@ -650,7 +651,7 @@ def main():
|
|
|
650
651
|
|
|
651
652
|
logging.info("checking build spec requirements")
|
|
652
653
|
running_platform = args.platform_name
|
|
653
|
-
build_runners_consumer_group_create(gh_event_conn, running_platform)
|
|
654
|
+
build_runners_consumer_group_create(gh_event_conn, running_platform, args.arch)
|
|
654
655
|
|
|
655
656
|
# Clear pending messages and reset consumer group position by default (unless explicitly skipped)
|
|
656
657
|
if not args.skip_clear_pending_on_startup:
|
|
@@ -659,9 +660,9 @@ def main():
|
|
|
659
660
|
"Clearing pending messages and resetting consumer group position on startup (default behavior)"
|
|
660
661
|
)
|
|
661
662
|
clear_pending_messages_for_consumer(
|
|
662
|
-
gh_event_conn, running_platform, consumer_pos
|
|
663
|
+
gh_event_conn, running_platform, consumer_pos, args.arch
|
|
663
664
|
)
|
|
664
|
-
reset_consumer_group_to_latest(gh_event_conn, running_platform)
|
|
665
|
+
reset_consumer_group_to_latest(gh_event_conn, running_platform, args.arch)
|
|
665
666
|
else:
|
|
666
667
|
logging.info(
|
|
667
668
|
"Skipping pending message cleanup and consumer group reset as requested"
|
|
@@ -819,10 +820,15 @@ def self_contained_coordinator_blocking_read(
|
|
|
819
820
|
get_runners_consumer_group_name(platform_name), consumer_name
|
|
820
821
|
)
|
|
821
822
|
)
|
|
823
|
+
# Use architecture-specific stream
|
|
824
|
+
arch_specific_stream = get_arch_specific_stream_name(arch)
|
|
825
|
+
logging.info(
|
|
826
|
+
f"Reading work from architecture-specific stream: {arch_specific_stream}"
|
|
827
|
+
)
|
|
822
828
|
newTestInfo = github_event_conn.xreadgroup(
|
|
823
829
|
get_runners_consumer_group_name(platform_name),
|
|
824
830
|
consumer_name,
|
|
825
|
-
{
|
|
831
|
+
{arch_specific_stream: stream_id},
|
|
826
832
|
count=1,
|
|
827
833
|
block=0,
|
|
828
834
|
)
|
|
@@ -874,8 +880,9 @@ def self_contained_coordinator_blocking_read(
|
|
|
874
880
|
num_process_test_suites = num_process_test_suites + total_test_suite_runs
|
|
875
881
|
|
|
876
882
|
# Always acknowledge the message, even if it was filtered out
|
|
883
|
+
arch_specific_stream = get_arch_specific_stream_name(arch)
|
|
877
884
|
ack_reply = github_event_conn.xack(
|
|
878
|
-
|
|
885
|
+
arch_specific_stream,
|
|
879
886
|
get_runners_consumer_group_name(platform_name),
|
|
880
887
|
stream_id,
|
|
881
888
|
)
|
|
@@ -1127,11 +1134,15 @@ def process_self_contained_coordinator_stream(
|
|
|
1127
1134
|
skip_test = False
|
|
1128
1135
|
if b"platform" in testDetails:
|
|
1129
1136
|
platform = testDetails[b"platform"]
|
|
1130
|
-
|
|
1137
|
+
# Decode bytes to string for proper comparison
|
|
1138
|
+
platform_str = (
|
|
1139
|
+
platform.decode() if isinstance(platform, bytes) else platform
|
|
1140
|
+
)
|
|
1141
|
+
if running_platform != platform_str:
|
|
1131
1142
|
skip_test = True
|
|
1132
1143
|
logging.info(
|
|
1133
1144
|
"skipping stream_id {} given plaform {}!={}".format(
|
|
1134
|
-
stream_id, running_platform,
|
|
1145
|
+
stream_id, running_platform, platform_str
|
|
1135
1146
|
)
|
|
1136
1147
|
)
|
|
1137
1148
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: redis-benchmarks-specification
|
|
3
|
-
Version: 0.1
|
|
3
|
+
Version: 0.2.1
|
|
4
4
|
Summary: The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute.
|
|
5
5
|
Author: filipecosta90
|
|
6
6
|
Author-email: filipecosta.90@gmail.com
|
|
@@ -4,15 +4,15 @@ redis_benchmarks_specification/__api__/api.py,sha256=k_CMICtMm1z8jY3hByaL0hIr_5v
|
|
|
4
4
|
redis_benchmarks_specification/__api__/app.py,sha256=JzQm84DjIVdfLbDO423BJbrds6gFzMbA0syRkHE_aUU,7063
|
|
5
5
|
redis_benchmarks_specification/__builder__/Readme.md,sha256=O6MV_J3OSgzW-ir2TbukP8Vhkm_LOzQJJndG1Cykqic,111
|
|
6
6
|
redis_benchmarks_specification/__builder__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
|
|
7
|
-
redis_benchmarks_specification/__builder__/builder.py,sha256=
|
|
7
|
+
redis_benchmarks_specification/__builder__/builder.py,sha256=vG6Cp0SAAgvZt9zyjTQB_-mSUGbPBbM3Ue9hJpb7oYM,40984
|
|
8
8
|
redis_benchmarks_specification/__builder__/schema.py,sha256=1wcmyVJBcWrBvK58pghN9NCoWLCO3BzPsmdKWYfkVog,584
|
|
9
9
|
redis_benchmarks_specification/__cli__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
|
|
10
|
-
redis_benchmarks_specification/__cli__/args.py,sha256=
|
|
11
|
-
redis_benchmarks_specification/__cli__/cli.py,sha256=
|
|
10
|
+
redis_benchmarks_specification/__cli__/args.py,sha256=y_FHtkjhRKsU532sutlXPeXgtFZu7KGOp7lTSi1C5uc,7427
|
|
11
|
+
redis_benchmarks_specification/__cli__/cli.py,sha256=0S3Lwci-oxpPJMekPIFeG5XJiSF53rpa5sRnS-aAZnA,22416
|
|
12
12
|
redis_benchmarks_specification/__cli__/stats.py,sha256=r9JIfwGCSR3maozYbDZfZrkthNFQSs0xIymS86yZ6Iw,55574
|
|
13
13
|
redis_benchmarks_specification/__common__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
14
|
redis_benchmarks_specification/__common__/builder_schema.py,sha256=kfDpRIk7NkJrb5qj9jzsBhLVNO7K_W2Clumj4pxrkG8,5938
|
|
15
|
-
redis_benchmarks_specification/__common__/env.py,sha256=
|
|
15
|
+
redis_benchmarks_specification/__common__/env.py,sha256=5E5LS8o7_wISw-gX11bcMJx1A9DHo0j3cbV383lMFeo,3591
|
|
16
16
|
redis_benchmarks_specification/__common__/github.py,sha256=9TZtnISsSgXTSAN_VQejo5YRPDPhlU0gjxgKGPw_sP8,10699
|
|
17
17
|
redis_benchmarks_specification/__common__/package.py,sha256=4uVt1BAZ999LV2rZkq--Tk6otAVIf9YR3g3KGeUpiW4,834
|
|
18
18
|
redis_benchmarks_specification/__common__/runner.py,sha256=TKMUFJ3nLSfmSU7P_ok9oM5-pI4L4tFxsWLUWaUHhbI,16733
|
|
@@ -36,8 +36,8 @@ redis_benchmarks_specification/__self_contained_coordinator__/cpuset.py,sha256=s
|
|
|
36
36
|
redis_benchmarks_specification/__self_contained_coordinator__/docker.py,sha256=09SyAfqlzs1KG9ZAajClNWtiNk4Jqzd--4-m3n1rLjU,3156
|
|
37
37
|
redis_benchmarks_specification/__self_contained_coordinator__/post_processing.py,sha256=sVLKNnWdAqYY9DjVdqRC5tDaIrVSaI3Ca7w8-DQ-LRM,776
|
|
38
38
|
redis_benchmarks_specification/__self_contained_coordinator__/prepopulation.py,sha256=1UeFr2T1ZQBcHCSd4W1ZtaWgXyFPfjLyDi_DgDc1eTA,2957
|
|
39
|
-
redis_benchmarks_specification/__self_contained_coordinator__/runners.py,sha256=
|
|
40
|
-
redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=
|
|
39
|
+
redis_benchmarks_specification/__self_contained_coordinator__/runners.py,sha256=1SvzQI8tCh5dBvd87PMpwEgycyhEcbr7NviqTxXl65I,33444
|
|
40
|
+
redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=CY9Ame9I2zwOyOfZTlQst7sxiCI8EWVSOjIGxxpRQIc,113591
|
|
41
41
|
redis_benchmarks_specification/__setups__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
42
42
|
redis_benchmarks_specification/__setups__/topologies.py,sha256=xQ1IJkcTji_ZjLiJd3vOxZpvbNtBLZw9cPkw5hGJKHU,481
|
|
43
43
|
redis_benchmarks_specification/__spec__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
|
|
@@ -282,8 +282,8 @@ redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-st
|
|
|
282
282
|
redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-storage-1k-sessions.yml,sha256=2egtIxPxCze2jlbAfgsk4v9JSQHNMoPLbDWFEW8olDg,7006
|
|
283
283
|
redis_benchmarks_specification/test-suites/template.txt,sha256=ezqGiRPOvuSDO0iG7GEf-AGXNfHbgXI89_G0RUEzL88,481
|
|
284
284
|
redis_benchmarks_specification/vector-search-test-suites/vector_db_benchmark_test.yml,sha256=PD7ow-k4Ll2BkhEC3aIqiaCZt8Hc4aJIp96Lw3J3mcI,791
|
|
285
|
-
redis_benchmarks_specification-0.1.
|
|
286
|
-
redis_benchmarks_specification-0.1.
|
|
287
|
-
redis_benchmarks_specification-0.1.
|
|
288
|
-
redis_benchmarks_specification-0.1.
|
|
289
|
-
redis_benchmarks_specification-0.1.
|
|
285
|
+
redis_benchmarks_specification-0.2.1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
286
|
+
redis_benchmarks_specification-0.2.1.dist-info/METADATA,sha256=D38N0_-5BdXiT6cyQTccPFaHEiI1OYGQ1Y9T8m4nfUM,22766
|
|
287
|
+
redis_benchmarks_specification-0.2.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
288
|
+
redis_benchmarks_specification-0.2.1.dist-info/entry_points.txt,sha256=x5WBXCZsnDRTZxV7SBGmC65L2k-ygdDOxV8vuKN00Nk,715
|
|
289
|
+
redis_benchmarks_specification-0.2.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|