redis-benchmarks-specification 0.1.337__py3-none-any.whl → 0.1.339__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of redis-benchmarks-specification might be problematic. Click here for more details.

@@ -429,6 +429,79 @@ def builder_process_stream(
429
429
  if b"server_name" in testDetails:
430
430
  server_name = testDetails[b"server_name"].decode()
431
431
 
432
+ # Check if artifacts already exist before building
433
+ prefix = f"github_org={github_org}/github_repo={github_repo}/git_branch={str(git_branch)}/git_version={str(git_version)}/git_hash={str(git_hash)}"
434
+
435
+ # Create a comprehensive build signature that includes all build-affecting parameters
436
+ import hashlib
437
+
438
+ build_signature_parts = [
439
+ str(id), # build config ID
440
+ str(build_command), # build command
441
+ str(build_vars_str), # environment variables
442
+ str(compiler), # compiler
443
+ str(cpp_compiler), # C++ compiler
444
+ str(build_image), # build image
445
+ str(build_os), # OS
446
+ str(build_arch), # architecture
447
+ ",".join(sorted(build_artifacts)), # artifacts list
448
+ ]
449
+ build_signature = hashlib.sha256(
450
+ ":".join(build_signature_parts).encode()
451
+ ).hexdigest()[:16]
452
+
453
+ # Check if all artifacts already exist
454
+ all_artifacts_exist = True
455
+ artifact_keys = {}
456
+ for artifact in build_artifacts:
457
+ bin_key = f"zipped:artifacts:{prefix}:{id}:{build_signature}:{artifact}.zip"
458
+ artifact_keys[artifact] = bin_key
459
+ if not conn.exists(bin_key):
460
+ all_artifacts_exist = False
461
+ break
462
+
463
+ if all_artifacts_exist:
464
+ logging.info(
465
+ f"Artifacts for {git_hash}:{id} with build signature {build_signature} already exist, reusing them"
466
+ )
467
+ # Skip build and reuse existing artifacts
468
+ build_stream_fields, result = generate_benchmark_stream_request(
469
+ id,
470
+ conn,
471
+ run_image,
472
+ build_arch,
473
+ testDetails,
474
+ build_os,
475
+ build_artifacts,
476
+ build_command,
477
+ build_config_metadata,
478
+ build_image,
479
+ build_vars_str,
480
+ compiler,
481
+ cpp_compiler,
482
+ git_branch,
483
+ git_hash,
484
+ git_timestamp_ms,
485
+ git_version,
486
+ pull_request,
487
+ None, # redis_temporary_dir not needed for reuse
488
+ tests_groups_regexp,
489
+ tests_priority_lower_limit,
490
+ tests_priority_upper_limit,
491
+ tests_regexp,
492
+ ".*", # command_regexp - default to all commands
493
+ use_git_timestamp,
494
+ server_name,
495
+ github_org,
496
+ github_repo,
497
+ artifact_keys, # Pass existing artifact keys
498
+ )
499
+ continue # Skip to next build spec
500
+
501
+ logging.info(
502
+ f"Building artifacts for {git_hash}:{id} with build signature {build_signature}"
503
+ )
504
+
432
505
  build_start_datetime = datetime.datetime.utcnow()
433
506
  logging.info(
434
507
  "Using the following build command {}.".format(build_command)
@@ -507,6 +580,7 @@ def builder_process_stream(
507
580
  server_name,
508
581
  github_org,
509
582
  github_repo,
583
+ None, # existing_artifact_keys - None for new builds
510
584
  )
511
585
  if result is True:
512
586
  benchmark_stream_id = conn.xadd(
@@ -648,6 +722,7 @@ def generate_benchmark_stream_request(
648
722
  server_name="redis",
649
723
  github_org="redis",
650
724
  github_repo="redis",
725
+ existing_artifact_keys=None,
651
726
  ):
652
727
  build_stream_fields = {
653
728
  "id": id,
@@ -691,21 +766,50 @@ def generate_benchmark_stream_request(
691
766
  if git_timestamp_ms is not None:
692
767
  build_stream_fields["git_timestamp_ms"] = git_timestamp_ms
693
768
 
694
- prefix = f"github_org={github_org}/github_repo={github_repo}/git_branch={str(git_branch)}/git_version={str(git_version)}/git_hash={str(git_hash)}"
695
- for artifact in build_artifacts:
696
- bin_key = f"zipped:artifacts:{prefix}:{id}:{artifact}.zip"
697
- if artifact == "redisearch.so":
698
- bin_artifact = open(
699
- f"{redis_temporary_dir}modules/redisearch/src/bin/linux-x64-release/search-community/{artifact}",
700
- "rb",
701
- ).read()
702
- else:
703
- bin_artifact = open(f"{redis_temporary_dir}src/{artifact}", "rb").read()
704
- bin_artifact_len = len(bytes(bin_artifact))
705
- assert bin_artifact_len > 0
706
- conn.set(bin_key, bytes(bin_artifact), ex=REDIS_BINS_EXPIRE_SECS)
707
- build_stream_fields[artifact] = bin_key
708
- build_stream_fields["{}_len_bytes".format(artifact)] = bin_artifact_len
769
+ if existing_artifact_keys is not None:
770
+ # Use existing artifact keys (for reuse case)
771
+ for artifact in build_artifacts:
772
+ bin_key = existing_artifact_keys[artifact]
773
+ build_stream_fields[artifact] = bin_key
774
+ # Get the length from the existing artifact
775
+ bin_artifact_len = conn.strlen(bin_key)
776
+ build_stream_fields["{}_len_bytes".format(artifact)] = bin_artifact_len
777
+ else:
778
+ # Build new artifacts and store them
779
+ prefix = f"github_org={github_org}/github_repo={github_repo}/git_branch={str(git_branch)}/git_version={str(git_version)}/git_hash={str(git_hash)}"
780
+
781
+ # Create build signature for new artifacts
782
+ import hashlib
783
+
784
+ build_signature_parts = [
785
+ str(id), # build config ID
786
+ str(build_command), # build command
787
+ str(build_vars_str), # environment variables
788
+ str(compiler), # compiler
789
+ str(cpp_compiler), # C++ compiler
790
+ str(build_image), # build image
791
+ str(build_os), # OS
792
+ str(build_arch), # architecture
793
+ ",".join(sorted(build_artifacts)), # artifacts list
794
+ ]
795
+ build_signature = hashlib.sha256(
796
+ ":".join(build_signature_parts).encode()
797
+ ).hexdigest()[:16]
798
+
799
+ for artifact in build_artifacts:
800
+ bin_key = f"zipped:artifacts:{prefix}:{id}:{build_signature}:{artifact}.zip"
801
+ if artifact == "redisearch.so":
802
+ bin_artifact = open(
803
+ f"{redis_temporary_dir}modules/redisearch/src/bin/linux-x64-release/search-community/{artifact}",
804
+ "rb",
805
+ ).read()
806
+ else:
807
+ bin_artifact = open(f"{redis_temporary_dir}src/{artifact}", "rb").read()
808
+ bin_artifact_len = len(bytes(bin_artifact))
809
+ assert bin_artifact_len > 0
810
+ conn.set(bin_key, bytes(bin_artifact), ex=REDIS_BINS_EXPIRE_SECS)
811
+ build_stream_fields[artifact] = bin_key
812
+ build_stream_fields["{}_len_bytes".format(artifact)] = bin_artifact_len
709
813
  result = True
710
814
  if b"platform" in testDetails:
711
815
  build_stream_fields["platform"] = testDetails[b"platform"]
@@ -104,6 +104,12 @@ def trigger_tests_dockerhub_cli_command_logic(args, project_name, project_versio
104
104
  0,
105
105
  10000,
106
106
  args.tests_regexp,
107
+ ".*", # command_regexp
108
+ False, # use_git_timestamp
109
+ "redis", # server_name
110
+ "redis", # github_org
111
+ "redis", # github_repo
112
+ None, # existing_artifact_keys
107
113
  )
108
114
  build_stream_fields["github_repo"] = args.gh_repo
109
115
  build_stream_fields["github_org"] = args.gh_org
@@ -1934,7 +1934,7 @@ def process_self_contained_coordinator_stream(
1934
1934
  benchmark_tool_global=benchmark_tool_global,
1935
1935
  )
1936
1936
  continue
1937
- logging.info(
1937
+ logging.debug(
1938
1938
  "Test {} priority ({}) is within the priority limit [{},{}]".format(
1939
1939
  test_name,
1940
1940
  priority,
@@ -205,6 +205,6 @@ def create_self_contained_coordinator_args(project_name):
205
205
  "--skip-clear-pending-on-startup",
206
206
  default=False,
207
207
  action="store_true",
208
- help="Skip automatically clearing pending messages for this consumer on startup. By default, pending messages are cleared to recover from crashes.",
208
+ help="Skip automatically clearing pending messages and resetting consumer group position on startup. By default, pending messages are cleared and consumer group is reset to latest position to skip old work and recover from crashes.",
209
209
  )
210
210
  return parser
@@ -143,6 +143,26 @@ def clear_pending_messages_for_consumer(conn, running_platform, consumer_pos):
143
143
  logging.error(f"Unexpected error clearing pending messages: {e}")
144
144
 
145
145
 
146
+ def reset_consumer_group_to_latest(conn, running_platform):
147
+ """Reset the consumer group position to only read new messages (skip old ones)"""
148
+ consumer_group_name = get_runners_consumer_group_name(running_platform)
149
+
150
+ try:
151
+ # Set the consumer group position to '$' (latest) to skip all existing messages
152
+ conn.xgroup_setid(STREAM_KEYNAME_NEW_BUILD_EVENTS, consumer_group_name, id="$")
153
+ logging.info(
154
+ f"Reset consumer group {consumer_group_name} position to latest - will only process new messages"
155
+ )
156
+
157
+ except redis.exceptions.ResponseError as e:
158
+ if "NOGROUP" in str(e):
159
+ logging.info(f"Consumer group {consumer_group_name} does not exist yet")
160
+ else:
161
+ logging.warning(f"Error resetting consumer group position: {e}")
162
+ except Exception as e:
163
+ logging.error(f"Unexpected error resetting consumer group position: {e}")
164
+
165
+
146
166
  def process_self_contained_coordinator_stream(
147
167
  conn,
148
168
  datasink_push_results_redistimeseries,
@@ -76,6 +76,7 @@ from redis_benchmarks_specification.__self_contained_coordinator__.runners impor
76
76
  build_runners_consumer_group_create,
77
77
  get_runners_consumer_group_name,
78
78
  clear_pending_messages_for_consumer,
79
+ reset_consumer_group_to_latest,
79
80
  )
80
81
  from redis_benchmarks_specification.__setups__.topologies import get_topologies
81
82
 
@@ -651,15 +652,20 @@ def main():
651
652
  running_platform = args.platform_name
652
653
  build_runners_consumer_group_create(gh_event_conn, running_platform)
653
654
 
654
- # Clear pending messages by default (unless explicitly skipped)
655
+ # Clear pending messages and reset consumer group position by default (unless explicitly skipped)
655
656
  if not args.skip_clear_pending_on_startup:
656
657
  consumer_pos = args.consumer_pos
657
- logging.info("Clearing pending messages on startup (default behavior)")
658
+ logging.info(
659
+ "Clearing pending messages and resetting consumer group position on startup (default behavior)"
660
+ )
658
661
  clear_pending_messages_for_consumer(
659
662
  gh_event_conn, running_platform, consumer_pos
660
663
  )
664
+ reset_consumer_group_to_latest(gh_event_conn, running_platform)
661
665
  else:
662
- logging.info("Skipping pending message cleanup as requested")
666
+ logging.info(
667
+ "Skipping pending message cleanup and consumer group reset as requested"
668
+ )
663
669
 
664
670
  stream_id = None
665
671
  docker_client = docker.from_env()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: redis-benchmarks-specification
3
- Version: 0.1.337
3
+ Version: 0.1.339
4
4
  Summary: The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute.
5
5
  Author: filipecosta90
6
6
  Author-email: filipecosta.90@gmail.com
@@ -4,11 +4,11 @@ redis_benchmarks_specification/__api__/api.py,sha256=k_CMICtMm1z8jY3hByaL0hIr_5v
4
4
  redis_benchmarks_specification/__api__/app.py,sha256=JzQm84DjIVdfLbDO423BJbrds6gFzMbA0syRkHE_aUU,7063
5
5
  redis_benchmarks_specification/__builder__/Readme.md,sha256=O6MV_J3OSgzW-ir2TbukP8Vhkm_LOzQJJndG1Cykqic,111
6
6
  redis_benchmarks_specification/__builder__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
7
- redis_benchmarks_specification/__builder__/builder.py,sha256=lAoEQ8ab9AWstYcpF2hoixZ_HFmMKf9Icwzc0WV0t_I,29867
7
+ redis_benchmarks_specification/__builder__/builder.py,sha256=NiigX_UPeCulCBfrgZvntLIFRfgHe-j43CqWKSlvguk,34555
8
8
  redis_benchmarks_specification/__builder__/schema.py,sha256=1wcmyVJBcWrBvK58pghN9NCoWLCO3BzPsmdKWYfkVog,584
9
9
  redis_benchmarks_specification/__cli__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
10
10
  redis_benchmarks_specification/__cli__/args.py,sha256=X7VlHJvX3n85ZPUQFoovmaFDnY4t7irUrDLf07QAfaA,7430
11
- redis_benchmarks_specification/__cli__/cli.py,sha256=iTjINQ-RV_q2ovq1neSoRCAggpGdeP5mX3_1aFxSScY,22001
11
+ redis_benchmarks_specification/__cli__/cli.py,sha256=GMd_Swn6HA8JAFd7hokLsLO3_F2qnd_2eavVQ66M1lk,22204
12
12
  redis_benchmarks_specification/__cli__/stats.py,sha256=r9JIfwGCSR3maozYbDZfZrkthNFQSs0xIymS86yZ6Iw,55574
13
13
  redis_benchmarks_specification/__common__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
14
  redis_benchmarks_specification/__common__/builder_schema.py,sha256=kfDpRIk7NkJrb5qj9jzsBhLVNO7K_W2Clumj4pxrkG8,5938
@@ -26,9 +26,9 @@ redis_benchmarks_specification/__init__.py,sha256=YQIEx2sLPPA0JR9OuCuMNMNtm-f_gq
26
26
  redis_benchmarks_specification/__runner__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
27
27
  redis_benchmarks_specification/__runner__/args.py,sha256=K3VGmBC0-9lSv9H6VDp0N-6FGMWvc_4H0pG_TOXN5u8,11312
28
28
  redis_benchmarks_specification/__runner__/remote_profiling.py,sha256=R7obNQju8mmY9oKkcndjI4aAuxi84OCLhDSqqaYu1SU,18610
29
- redis_benchmarks_specification/__runner__/runner.py,sha256=JW2fB0C6Ce4d6VVQK50qNqpSNGEjq6QVjowUMUA0gzs,156345
29
+ redis_benchmarks_specification/__runner__/runner.py,sha256=xuPc2ht3sPlnqZblANgECtqHX_BgQNodDMKvdurDuTk,156346
30
30
  redis_benchmarks_specification/__self_contained_coordinator__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
31
- redis_benchmarks_specification/__self_contained_coordinator__/args.py,sha256=Rkajbvb-R4aEJd01gHNbAWrKuiqycHNfKVdO28nDEjI,7244
31
+ redis_benchmarks_specification/__self_contained_coordinator__/args.py,sha256=2nTD4g4V1NjMRjRuDvHaoub5sjcav0GCnxv2HFiXWKc,7329
32
32
  redis_benchmarks_specification/__self_contained_coordinator__/artifacts.py,sha256=OVHqJzDgeSSRfUSiKp1ZTAVv14PvSbk-5yJsAAoUfpw,936
33
33
  redis_benchmarks_specification/__self_contained_coordinator__/build_info.py,sha256=vlg8H8Rxu2falW8xp1GvL1SV1fyBguSbz6Apxc7A2yM,2282
34
34
  redis_benchmarks_specification/__self_contained_coordinator__/clients.py,sha256=EL1V4-i-tTav1mcF_CUosqPF3Q1qi9BZL0zFajEk70c,1878
@@ -36,8 +36,8 @@ redis_benchmarks_specification/__self_contained_coordinator__/cpuset.py,sha256=s
36
36
  redis_benchmarks_specification/__self_contained_coordinator__/docker.py,sha256=09SyAfqlzs1KG9ZAajClNWtiNk4Jqzd--4-m3n1rLjU,3156
37
37
  redis_benchmarks_specification/__self_contained_coordinator__/post_processing.py,sha256=sVLKNnWdAqYY9DjVdqRC5tDaIrVSaI3Ca7w8-DQ-LRM,776
38
38
  redis_benchmarks_specification/__self_contained_coordinator__/prepopulation.py,sha256=1UeFr2T1ZQBcHCSd4W1ZtaWgXyFPfjLyDi_DgDc1eTA,2957
39
- redis_benchmarks_specification/__self_contained_coordinator__/runners.py,sha256=agom6H0iDUH_oQkObS8EtoAm0JUpTVeiBv-EMEnEMtY,31908
40
- redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=l7OvqbF86l7YhwQQ80En5uxECxytvqlW7NeW43q6YUM,112547
39
+ redis_benchmarks_specification/__self_contained_coordinator__/runners.py,sha256=1bpGiybmeQrdHh-z-fAyMvzOggZk4_MNHMTWrAfGwQU,32826
40
+ redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=BhOSyinsF8AkaPtR5jjB8vj3VTC0UwyBtCY7iGkWw_I,112796
41
41
  redis_benchmarks_specification/__setups__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
42
  redis_benchmarks_specification/__setups__/topologies.py,sha256=xQ1IJkcTji_ZjLiJd3vOxZpvbNtBLZw9cPkw5hGJKHU,481
43
43
  redis_benchmarks_specification/__spec__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
@@ -282,8 +282,8 @@ redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-st
282
282
  redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-storage-1k-sessions.yml,sha256=2egtIxPxCze2jlbAfgsk4v9JSQHNMoPLbDWFEW8olDg,7006
283
283
  redis_benchmarks_specification/test-suites/template.txt,sha256=ezqGiRPOvuSDO0iG7GEf-AGXNfHbgXI89_G0RUEzL88,481
284
284
  redis_benchmarks_specification/vector-search-test-suites/vector_db_benchmark_test.yml,sha256=PD7ow-k4Ll2BkhEC3aIqiaCZt8Hc4aJIp96Lw3J3mcI,791
285
- redis_benchmarks_specification-0.1.337.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
286
- redis_benchmarks_specification-0.1.337.dist-info/METADATA,sha256=MwhGo3EAg1Au3m-RJ41H-Bi400Fj4d4xjGqcRxgGNLQ,22768
287
- redis_benchmarks_specification-0.1.337.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
288
- redis_benchmarks_specification-0.1.337.dist-info/entry_points.txt,sha256=x5WBXCZsnDRTZxV7SBGmC65L2k-ygdDOxV8vuKN00Nk,715
289
- redis_benchmarks_specification-0.1.337.dist-info/RECORD,,
285
+ redis_benchmarks_specification-0.1.339.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
286
+ redis_benchmarks_specification-0.1.339.dist-info/METADATA,sha256=xFGR8MNuEXNuxwQJKb2SRDKcXKkWeN64LPxQpD4RG5o,22768
287
+ redis_benchmarks_specification-0.1.339.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
288
+ redis_benchmarks_specification-0.1.339.dist-info/entry_points.txt,sha256=x5WBXCZsnDRTZxV7SBGmC65L2k-ygdDOxV8vuKN00Nk,715
289
+ redis_benchmarks_specification-0.1.339.dist-info/RECORD,,