redis-benchmarks-specification 0.1.338__py3-none-any.whl → 0.1.340__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of redis-benchmarks-specification might be problematic. Click here for more details.
- redis_benchmarks_specification/__builder__/builder.py +221 -15
- redis_benchmarks_specification/__cli__/cli.py +6 -0
- redis_benchmarks_specification/__self_contained_coordinator__/runners.py +1 -5
- redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py +27 -15
- {redis_benchmarks_specification-0.1.338.dist-info → redis_benchmarks_specification-0.1.340.dist-info}/METADATA +1 -1
- {redis_benchmarks_specification-0.1.338.dist-info → redis_benchmarks_specification-0.1.340.dist-info}/RECORD +9 -9
- {redis_benchmarks_specification-0.1.338.dist-info → redis_benchmarks_specification-0.1.340.dist-info}/LICENSE +0 -0
- {redis_benchmarks_specification-0.1.338.dist-info → redis_benchmarks_specification-0.1.340.dist-info}/WHEEL +0 -0
- {redis_benchmarks_specification-0.1.338.dist-info → redis_benchmarks_specification-0.1.340.dist-info}/entry_points.txt +0 -0
|
@@ -47,6 +47,69 @@ from redis_benchmarks_specification.__common__.package import (
|
|
|
47
47
|
PERFORMANCE_GH_TOKEN = os.getenv("PERFORMANCE_GH_TOKEN", None)
|
|
48
48
|
|
|
49
49
|
|
|
50
|
+
def clear_pending_messages_for_builder_consumer(conn, builder_group, builder_id):
|
|
51
|
+
"""Clear all pending messages for a specific builder consumer on startup"""
|
|
52
|
+
consumer_name = f"{builder_group}-proc#{builder_id}"
|
|
53
|
+
|
|
54
|
+
try:
|
|
55
|
+
# Get pending messages for this specific consumer
|
|
56
|
+
pending_info = conn.xpending_range(
|
|
57
|
+
STREAM_KEYNAME_GH_EVENTS_COMMIT,
|
|
58
|
+
builder_group,
|
|
59
|
+
min="-",
|
|
60
|
+
max="+",
|
|
61
|
+
count=1000, # Get up to 1000 pending messages
|
|
62
|
+
consumername=consumer_name,
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
if pending_info:
|
|
66
|
+
message_ids = [msg["message_id"] for msg in pending_info]
|
|
67
|
+
logging.info(
|
|
68
|
+
f"Found {len(message_ids)} pending messages for builder consumer {consumer_name}. Clearing them..."
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# Acknowledge all pending messages to clear them
|
|
72
|
+
ack_count = conn.xack(
|
|
73
|
+
STREAM_KEYNAME_GH_EVENTS_COMMIT, builder_group, *message_ids
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
logging.info(
|
|
77
|
+
f"Successfully cleared {ack_count} pending messages for builder consumer {consumer_name}"
|
|
78
|
+
)
|
|
79
|
+
else:
|
|
80
|
+
logging.info(
|
|
81
|
+
f"No pending messages found for builder consumer {consumer_name}"
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
except redis.exceptions.ResponseError as e:
|
|
85
|
+
if "NOGROUP" in str(e):
|
|
86
|
+
logging.info(f"Builder consumer group {builder_group} does not exist yet")
|
|
87
|
+
else:
|
|
88
|
+
logging.warning(f"Error clearing pending messages: {e}")
|
|
89
|
+
except Exception as e:
|
|
90
|
+
logging.error(f"Unexpected error clearing pending messages: {e}")
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def reset_builder_consumer_group_to_latest(conn, builder_group):
|
|
94
|
+
"""Reset the builder consumer group position to only read new messages (skip old ones)"""
|
|
95
|
+
try:
|
|
96
|
+
# Set the consumer group position to '$' (latest) to skip all existing messages
|
|
97
|
+
conn.xgroup_setid(STREAM_KEYNAME_GH_EVENTS_COMMIT, builder_group, id="$")
|
|
98
|
+
logging.info(
|
|
99
|
+
f"Reset builder consumer group {builder_group} position to latest - will only process new messages"
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
except redis.exceptions.ResponseError as e:
|
|
103
|
+
if "NOGROUP" in str(e):
|
|
104
|
+
logging.info(f"Builder consumer group {builder_group} does not exist yet")
|
|
105
|
+
else:
|
|
106
|
+
logging.warning(f"Error resetting builder consumer group position: {e}")
|
|
107
|
+
except Exception as e:
|
|
108
|
+
logging.error(
|
|
109
|
+
f"Unexpected error resetting builder consumer group position: {e}"
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
|
|
50
113
|
class ZipFileWithPermissions(ZipFile):
|
|
51
114
|
def _extract_member(self, member, targetpath, pwd):
|
|
52
115
|
if not isinstance(member, ZipInfo):
|
|
@@ -104,6 +167,12 @@ def main():
|
|
|
104
167
|
)
|
|
105
168
|
parser.add_argument("--github_token", type=str, default=PERFORMANCE_GH_TOKEN)
|
|
106
169
|
parser.add_argument("--pull-request", type=str, default=None, nargs="?", const="")
|
|
170
|
+
parser.add_argument(
|
|
171
|
+
"--skip-clear-pending-on-startup",
|
|
172
|
+
default=False,
|
|
173
|
+
action="store_true",
|
|
174
|
+
help="Skip automatically clearing pending messages and resetting consumer group position on startup. By default, pending messages are cleared and consumer group is reset to latest position to skip old work and recover from crashes.",
|
|
175
|
+
)
|
|
107
176
|
args = parser.parse_args()
|
|
108
177
|
if args.logname is not None:
|
|
109
178
|
print("Writting log to {}".format(args.logname))
|
|
@@ -169,6 +238,19 @@ def main():
|
|
|
169
238
|
builder_id = "1"
|
|
170
239
|
|
|
171
240
|
builder_consumer_group_create(conn, builder_group)
|
|
241
|
+
|
|
242
|
+
# Clear pending messages and reset consumer group position by default (unless explicitly skipped)
|
|
243
|
+
if not args.skip_clear_pending_on_startup:
|
|
244
|
+
logging.info(
|
|
245
|
+
"Clearing pending messages and resetting builder consumer group position on startup (default behavior)"
|
|
246
|
+
)
|
|
247
|
+
clear_pending_messages_for_builder_consumer(conn, builder_group, builder_id)
|
|
248
|
+
reset_builder_consumer_group_to_latest(conn, builder_group)
|
|
249
|
+
else:
|
|
250
|
+
logging.info(
|
|
251
|
+
"Skipping pending message cleanup and builder consumer group reset as requested"
|
|
252
|
+
)
|
|
253
|
+
|
|
172
254
|
if args.github_token is not None:
|
|
173
255
|
logging.info("detected a github token. will update as much as possible!!! =)")
|
|
174
256
|
previous_id = args.consumer_start_id
|
|
@@ -268,6 +350,26 @@ def builder_process_stream(
|
|
|
268
350
|
build_request_arch, arch
|
|
269
351
|
)
|
|
270
352
|
)
|
|
353
|
+
# Acknowledge the message even though we're skipping it
|
|
354
|
+
ack_reply = conn.xack(
|
|
355
|
+
STREAM_KEYNAME_GH_EVENTS_COMMIT,
|
|
356
|
+
STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
|
|
357
|
+
streamId,
|
|
358
|
+
)
|
|
359
|
+
if type(ack_reply) == bytes:
|
|
360
|
+
ack_reply = ack_reply.decode()
|
|
361
|
+
if ack_reply == "1" or ack_reply == 1:
|
|
362
|
+
logging.info(
|
|
363
|
+
"Successfully acknowledged build variation stream with id {} (filtered by arch).".format(
|
|
364
|
+
streamId
|
|
365
|
+
)
|
|
366
|
+
)
|
|
367
|
+
else:
|
|
368
|
+
logging.error(
|
|
369
|
+
"Unable to acknowledge build variation stream with id {}. XACK reply {}".format(
|
|
370
|
+
streamId, ack_reply
|
|
371
|
+
)
|
|
372
|
+
)
|
|
271
373
|
return previous_id, new_builds_count, build_stream_fields_arr
|
|
272
374
|
|
|
273
375
|
home = str(Path.home())
|
|
@@ -429,6 +531,79 @@ def builder_process_stream(
|
|
|
429
531
|
if b"server_name" in testDetails:
|
|
430
532
|
server_name = testDetails[b"server_name"].decode()
|
|
431
533
|
|
|
534
|
+
# Check if artifacts already exist before building
|
|
535
|
+
prefix = f"github_org={github_org}/github_repo={github_repo}/git_branch={str(git_branch)}/git_version={str(git_version)}/git_hash={str(git_hash)}"
|
|
536
|
+
|
|
537
|
+
# Create a comprehensive build signature that includes all build-affecting parameters
|
|
538
|
+
import hashlib
|
|
539
|
+
|
|
540
|
+
build_signature_parts = [
|
|
541
|
+
str(id), # build config ID
|
|
542
|
+
str(build_command), # build command
|
|
543
|
+
str(build_vars_str), # environment variables
|
|
544
|
+
str(compiler), # compiler
|
|
545
|
+
str(cpp_compiler), # C++ compiler
|
|
546
|
+
str(build_image), # build image
|
|
547
|
+
str(build_os), # OS
|
|
548
|
+
str(build_arch), # architecture
|
|
549
|
+
",".join(sorted(build_artifacts)), # artifacts list
|
|
550
|
+
]
|
|
551
|
+
build_signature = hashlib.sha256(
|
|
552
|
+
":".join(build_signature_parts).encode()
|
|
553
|
+
).hexdigest()[:16]
|
|
554
|
+
|
|
555
|
+
# Check if all artifacts already exist
|
|
556
|
+
all_artifacts_exist = True
|
|
557
|
+
artifact_keys = {}
|
|
558
|
+
for artifact in build_artifacts:
|
|
559
|
+
bin_key = f"zipped:artifacts:{prefix}:{id}:{build_signature}:{artifact}.zip"
|
|
560
|
+
artifact_keys[artifact] = bin_key
|
|
561
|
+
if not conn.exists(bin_key):
|
|
562
|
+
all_artifacts_exist = False
|
|
563
|
+
break
|
|
564
|
+
|
|
565
|
+
if all_artifacts_exist:
|
|
566
|
+
logging.info(
|
|
567
|
+
f"Artifacts for {git_hash}:{id} with build signature {build_signature} already exist, reusing them"
|
|
568
|
+
)
|
|
569
|
+
# Skip build and reuse existing artifacts
|
|
570
|
+
build_stream_fields, result = generate_benchmark_stream_request(
|
|
571
|
+
id,
|
|
572
|
+
conn,
|
|
573
|
+
run_image,
|
|
574
|
+
build_arch,
|
|
575
|
+
testDetails,
|
|
576
|
+
build_os,
|
|
577
|
+
build_artifacts,
|
|
578
|
+
build_command,
|
|
579
|
+
build_config_metadata,
|
|
580
|
+
build_image,
|
|
581
|
+
build_vars_str,
|
|
582
|
+
compiler,
|
|
583
|
+
cpp_compiler,
|
|
584
|
+
git_branch,
|
|
585
|
+
git_hash,
|
|
586
|
+
git_timestamp_ms,
|
|
587
|
+
git_version,
|
|
588
|
+
pull_request,
|
|
589
|
+
None, # redis_temporary_dir not needed for reuse
|
|
590
|
+
tests_groups_regexp,
|
|
591
|
+
tests_priority_lower_limit,
|
|
592
|
+
tests_priority_upper_limit,
|
|
593
|
+
tests_regexp,
|
|
594
|
+
".*", # command_regexp - default to all commands
|
|
595
|
+
use_git_timestamp,
|
|
596
|
+
server_name,
|
|
597
|
+
github_org,
|
|
598
|
+
github_repo,
|
|
599
|
+
artifact_keys, # Pass existing artifact keys
|
|
600
|
+
)
|
|
601
|
+
continue # Skip to next build spec
|
|
602
|
+
|
|
603
|
+
logging.info(
|
|
604
|
+
f"Building artifacts for {git_hash}:{id} with build signature {build_signature}"
|
|
605
|
+
)
|
|
606
|
+
|
|
432
607
|
build_start_datetime = datetime.datetime.utcnow()
|
|
433
608
|
logging.info(
|
|
434
609
|
"Using the following build command {}.".format(build_command)
|
|
@@ -507,6 +682,7 @@ def builder_process_stream(
|
|
|
507
682
|
server_name,
|
|
508
683
|
github_org,
|
|
509
684
|
github_repo,
|
|
685
|
+
None, # existing_artifact_keys - None for new builds
|
|
510
686
|
)
|
|
511
687
|
if result is True:
|
|
512
688
|
benchmark_stream_id = conn.xadd(
|
|
@@ -648,6 +824,7 @@ def generate_benchmark_stream_request(
|
|
|
648
824
|
server_name="redis",
|
|
649
825
|
github_org="redis",
|
|
650
826
|
github_repo="redis",
|
|
827
|
+
existing_artifact_keys=None,
|
|
651
828
|
):
|
|
652
829
|
build_stream_fields = {
|
|
653
830
|
"id": id,
|
|
@@ -691,21 +868,50 @@ def generate_benchmark_stream_request(
|
|
|
691
868
|
if git_timestamp_ms is not None:
|
|
692
869
|
build_stream_fields["git_timestamp_ms"] = git_timestamp_ms
|
|
693
870
|
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
871
|
+
if existing_artifact_keys is not None:
|
|
872
|
+
# Use existing artifact keys (for reuse case)
|
|
873
|
+
for artifact in build_artifacts:
|
|
874
|
+
bin_key = existing_artifact_keys[artifact]
|
|
875
|
+
build_stream_fields[artifact] = bin_key
|
|
876
|
+
# Get the length from the existing artifact
|
|
877
|
+
bin_artifact_len = conn.strlen(bin_key)
|
|
878
|
+
build_stream_fields["{}_len_bytes".format(artifact)] = bin_artifact_len
|
|
879
|
+
else:
|
|
880
|
+
# Build new artifacts and store them
|
|
881
|
+
prefix = f"github_org={github_org}/github_repo={github_repo}/git_branch={str(git_branch)}/git_version={str(git_version)}/git_hash={str(git_hash)}"
|
|
882
|
+
|
|
883
|
+
# Create build signature for new artifacts
|
|
884
|
+
import hashlib
|
|
885
|
+
|
|
886
|
+
build_signature_parts = [
|
|
887
|
+
str(id), # build config ID
|
|
888
|
+
str(build_command), # build command
|
|
889
|
+
str(build_vars_str), # environment variables
|
|
890
|
+
str(compiler), # compiler
|
|
891
|
+
str(cpp_compiler), # C++ compiler
|
|
892
|
+
str(build_image), # build image
|
|
893
|
+
str(build_os), # OS
|
|
894
|
+
str(build_arch), # architecture
|
|
895
|
+
",".join(sorted(build_artifacts)), # artifacts list
|
|
896
|
+
]
|
|
897
|
+
build_signature = hashlib.sha256(
|
|
898
|
+
":".join(build_signature_parts).encode()
|
|
899
|
+
).hexdigest()[:16]
|
|
900
|
+
|
|
901
|
+
for artifact in build_artifacts:
|
|
902
|
+
bin_key = f"zipped:artifacts:{prefix}:{id}:{build_signature}:{artifact}.zip"
|
|
903
|
+
if artifact == "redisearch.so":
|
|
904
|
+
bin_artifact = open(
|
|
905
|
+
f"{redis_temporary_dir}modules/redisearch/src/bin/linux-x64-release/search-community/{artifact}",
|
|
906
|
+
"rb",
|
|
907
|
+
).read()
|
|
908
|
+
else:
|
|
909
|
+
bin_artifact = open(f"{redis_temporary_dir}src/{artifact}", "rb").read()
|
|
910
|
+
bin_artifact_len = len(bytes(bin_artifact))
|
|
911
|
+
assert bin_artifact_len > 0
|
|
912
|
+
conn.set(bin_key, bytes(bin_artifact), ex=REDIS_BINS_EXPIRE_SECS)
|
|
913
|
+
build_stream_fields[artifact] = bin_key
|
|
914
|
+
build_stream_fields["{}_len_bytes".format(artifact)] = bin_artifact_len
|
|
709
915
|
result = True
|
|
710
916
|
if b"platform" in testDetails:
|
|
711
917
|
build_stream_fields["platform"] = testDetails[b"platform"]
|
|
@@ -104,6 +104,12 @@ def trigger_tests_dockerhub_cli_command_logic(args, project_name, project_versio
|
|
|
104
104
|
0,
|
|
105
105
|
10000,
|
|
106
106
|
args.tests_regexp,
|
|
107
|
+
".*", # command_regexp
|
|
108
|
+
False, # use_git_timestamp
|
|
109
|
+
"redis", # server_name
|
|
110
|
+
"redis", # github_org
|
|
111
|
+
"redis", # github_repo
|
|
112
|
+
None, # existing_artifact_keys
|
|
107
113
|
)
|
|
108
114
|
build_stream_fields["github_repo"] = args.gh_repo
|
|
109
115
|
build_stream_fields["github_org"] = args.gh_org
|
|
@@ -149,11 +149,7 @@ def reset_consumer_group_to_latest(conn, running_platform):
|
|
|
149
149
|
|
|
150
150
|
try:
|
|
151
151
|
# Set the consumer group position to '$' (latest) to skip all existing messages
|
|
152
|
-
conn.xgroup_setid(
|
|
153
|
-
STREAM_KEYNAME_NEW_BUILD_EVENTS,
|
|
154
|
-
consumer_group_name,
|
|
155
|
-
id="$"
|
|
156
|
-
)
|
|
152
|
+
conn.xgroup_setid(STREAM_KEYNAME_NEW_BUILD_EVENTS, consumer_group_name, id="$")
|
|
157
153
|
logging.info(
|
|
158
154
|
f"Reset consumer group {consumer_group_name} position to latest - will only process new messages"
|
|
159
155
|
)
|
|
@@ -655,13 +655,17 @@ def main():
|
|
|
655
655
|
# Clear pending messages and reset consumer group position by default (unless explicitly skipped)
|
|
656
656
|
if not args.skip_clear_pending_on_startup:
|
|
657
657
|
consumer_pos = args.consumer_pos
|
|
658
|
-
logging.info(
|
|
658
|
+
logging.info(
|
|
659
|
+
"Clearing pending messages and resetting consumer group position on startup (default behavior)"
|
|
660
|
+
)
|
|
659
661
|
clear_pending_messages_for_consumer(
|
|
660
662
|
gh_event_conn, running_platform, consumer_pos
|
|
661
663
|
)
|
|
662
664
|
reset_consumer_group_to_latest(gh_event_conn, running_platform)
|
|
663
665
|
else:
|
|
664
|
-
logging.info(
|
|
666
|
+
logging.info(
|
|
667
|
+
"Skipping pending message cleanup and consumer group reset as requested"
|
|
668
|
+
)
|
|
665
669
|
|
|
666
670
|
stream_id = None
|
|
667
671
|
docker_client = docker.from_env()
|
|
@@ -868,26 +872,34 @@ def self_contained_coordinator_blocking_read(
|
|
|
868
872
|
)
|
|
869
873
|
num_process_streams = num_process_streams + 1
|
|
870
874
|
num_process_test_suites = num_process_test_suites + total_test_suite_runs
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
875
|
+
|
|
876
|
+
# Always acknowledge the message, even if it was filtered out
|
|
877
|
+
ack_reply = github_event_conn.xack(
|
|
878
|
+
STREAM_KEYNAME_NEW_BUILD_EVENTS,
|
|
879
|
+
get_runners_consumer_group_name(platform_name),
|
|
880
|
+
stream_id,
|
|
881
|
+
)
|
|
882
|
+
if type(ack_reply) == bytes:
|
|
883
|
+
ack_reply = ack_reply.decode()
|
|
884
|
+
if ack_reply == "1" or ack_reply == 1:
|
|
885
|
+
if overall_result is True:
|
|
880
886
|
logging.info(
|
|
881
|
-
"
|
|
887
|
+
"Successfully acknowledged BENCHMARK variation stream with id {} (processed).".format(
|
|
882
888
|
stream_id
|
|
883
889
|
)
|
|
884
890
|
)
|
|
885
891
|
else:
|
|
886
|
-
logging.
|
|
887
|
-
"
|
|
888
|
-
stream_id
|
|
892
|
+
logging.info(
|
|
893
|
+
"Successfully acknowledged BENCHMARK variation stream with id {} (filtered/skipped).".format(
|
|
894
|
+
stream_id
|
|
889
895
|
)
|
|
890
896
|
)
|
|
897
|
+
else:
|
|
898
|
+
logging.error(
|
|
899
|
+
"Unable to acknowledge build variation stream with id {}. XACK reply {}".format(
|
|
900
|
+
stream_id, ack_reply
|
|
901
|
+
)
|
|
902
|
+
)
|
|
891
903
|
return overall_result, stream_id, num_process_streams, num_process_test_suites
|
|
892
904
|
|
|
893
905
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: redis-benchmarks-specification
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.340
|
|
4
4
|
Summary: The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute.
|
|
5
5
|
Author: filipecosta90
|
|
6
6
|
Author-email: filipecosta.90@gmail.com
|
|
@@ -4,11 +4,11 @@ redis_benchmarks_specification/__api__/api.py,sha256=k_CMICtMm1z8jY3hByaL0hIr_5v
|
|
|
4
4
|
redis_benchmarks_specification/__api__/app.py,sha256=JzQm84DjIVdfLbDO423BJbrds6gFzMbA0syRkHE_aUU,7063
|
|
5
5
|
redis_benchmarks_specification/__builder__/Readme.md,sha256=O6MV_J3OSgzW-ir2TbukP8Vhkm_LOzQJJndG1Cykqic,111
|
|
6
6
|
redis_benchmarks_specification/__builder__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
|
|
7
|
-
redis_benchmarks_specification/__builder__/builder.py,sha256=
|
|
7
|
+
redis_benchmarks_specification/__builder__/builder.py,sha256=5GhZ76itxcXfaHr7YtNrJp2zcYC_XHskpqZjIbMh3UI,38862
|
|
8
8
|
redis_benchmarks_specification/__builder__/schema.py,sha256=1wcmyVJBcWrBvK58pghN9NCoWLCO3BzPsmdKWYfkVog,584
|
|
9
9
|
redis_benchmarks_specification/__cli__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
|
|
10
10
|
redis_benchmarks_specification/__cli__/args.py,sha256=X7VlHJvX3n85ZPUQFoovmaFDnY4t7irUrDLf07QAfaA,7430
|
|
11
|
-
redis_benchmarks_specification/__cli__/cli.py,sha256=
|
|
11
|
+
redis_benchmarks_specification/__cli__/cli.py,sha256=GMd_Swn6HA8JAFd7hokLsLO3_F2qnd_2eavVQ66M1lk,22204
|
|
12
12
|
redis_benchmarks_specification/__cli__/stats.py,sha256=r9JIfwGCSR3maozYbDZfZrkthNFQSs0xIymS86yZ6Iw,55574
|
|
13
13
|
redis_benchmarks_specification/__common__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
14
|
redis_benchmarks_specification/__common__/builder_schema.py,sha256=kfDpRIk7NkJrb5qj9jzsBhLVNO7K_W2Clumj4pxrkG8,5938
|
|
@@ -36,8 +36,8 @@ redis_benchmarks_specification/__self_contained_coordinator__/cpuset.py,sha256=s
|
|
|
36
36
|
redis_benchmarks_specification/__self_contained_coordinator__/docker.py,sha256=09SyAfqlzs1KG9ZAajClNWtiNk4Jqzd--4-m3n1rLjU,3156
|
|
37
37
|
redis_benchmarks_specification/__self_contained_coordinator__/post_processing.py,sha256=sVLKNnWdAqYY9DjVdqRC5tDaIrVSaI3Ca7w8-DQ-LRM,776
|
|
38
38
|
redis_benchmarks_specification/__self_contained_coordinator__/prepopulation.py,sha256=1UeFr2T1ZQBcHCSd4W1ZtaWgXyFPfjLyDi_DgDc1eTA,2957
|
|
39
|
-
redis_benchmarks_specification/__self_contained_coordinator__/runners.py,sha256=
|
|
40
|
-
redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=
|
|
39
|
+
redis_benchmarks_specification/__self_contained_coordinator__/runners.py,sha256=1bpGiybmeQrdHh-z-fAyMvzOggZk4_MNHMTWrAfGwQU,32826
|
|
40
|
+
redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=vtBJzzH6eq2v7M7EK1Ve5EZyUfpgSFKTHQfIUY8agFE,113065
|
|
41
41
|
redis_benchmarks_specification/__setups__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
42
42
|
redis_benchmarks_specification/__setups__/topologies.py,sha256=xQ1IJkcTji_ZjLiJd3vOxZpvbNtBLZw9cPkw5hGJKHU,481
|
|
43
43
|
redis_benchmarks_specification/__spec__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
|
|
@@ -282,8 +282,8 @@ redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-st
|
|
|
282
282
|
redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-storage-1k-sessions.yml,sha256=2egtIxPxCze2jlbAfgsk4v9JSQHNMoPLbDWFEW8olDg,7006
|
|
283
283
|
redis_benchmarks_specification/test-suites/template.txt,sha256=ezqGiRPOvuSDO0iG7GEf-AGXNfHbgXI89_G0RUEzL88,481
|
|
284
284
|
redis_benchmarks_specification/vector-search-test-suites/vector_db_benchmark_test.yml,sha256=PD7ow-k4Ll2BkhEC3aIqiaCZt8Hc4aJIp96Lw3J3mcI,791
|
|
285
|
-
redis_benchmarks_specification-0.1.
|
|
286
|
-
redis_benchmarks_specification-0.1.
|
|
287
|
-
redis_benchmarks_specification-0.1.
|
|
288
|
-
redis_benchmarks_specification-0.1.
|
|
289
|
-
redis_benchmarks_specification-0.1.
|
|
285
|
+
redis_benchmarks_specification-0.1.340.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
286
|
+
redis_benchmarks_specification-0.1.340.dist-info/METADATA,sha256=DDrmuBuSVQ7qSeKz2z-mgLP8oNV0DNDuAfO3a_oxZ0I,22768
|
|
287
|
+
redis_benchmarks_specification-0.1.340.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
288
|
+
redis_benchmarks_specification-0.1.340.dist-info/entry_points.txt,sha256=x5WBXCZsnDRTZxV7SBGmC65L2k-ygdDOxV8vuKN00Nk,715
|
|
289
|
+
redis_benchmarks_specification-0.1.340.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|