redis-benchmarks-specification 0.1.338__py3-none-any.whl → 0.1.339__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of redis-benchmarks-specification might be problematic. Click here for more details.

@@ -429,6 +429,79 @@ def builder_process_stream(
429
429
  if b"server_name" in testDetails:
430
430
  server_name = testDetails[b"server_name"].decode()
431
431
 
432
+ # Check if artifacts already exist before building
433
+ prefix = f"github_org={github_org}/github_repo={github_repo}/git_branch={str(git_branch)}/git_version={str(git_version)}/git_hash={str(git_hash)}"
434
+
435
+ # Create a comprehensive build signature that includes all build-affecting parameters
436
+ import hashlib
437
+
438
+ build_signature_parts = [
439
+ str(id), # build config ID
440
+ str(build_command), # build command
441
+ str(build_vars_str), # environment variables
442
+ str(compiler), # compiler
443
+ str(cpp_compiler), # C++ compiler
444
+ str(build_image), # build image
445
+ str(build_os), # OS
446
+ str(build_arch), # architecture
447
+ ",".join(sorted(build_artifacts)), # artifacts list
448
+ ]
449
+ build_signature = hashlib.sha256(
450
+ ":".join(build_signature_parts).encode()
451
+ ).hexdigest()[:16]
452
+
453
+ # Check if all artifacts already exist
454
+ all_artifacts_exist = True
455
+ artifact_keys = {}
456
+ for artifact in build_artifacts:
457
+ bin_key = f"zipped:artifacts:{prefix}:{id}:{build_signature}:{artifact}.zip"
458
+ artifact_keys[artifact] = bin_key
459
+ if not conn.exists(bin_key):
460
+ all_artifacts_exist = False
461
+ break
462
+
463
+ if all_artifacts_exist:
464
+ logging.info(
465
+ f"Artifacts for {git_hash}:{id} with build signature {build_signature} already exist, reusing them"
466
+ )
467
+ # Skip build and reuse existing artifacts
468
+ build_stream_fields, result = generate_benchmark_stream_request(
469
+ id,
470
+ conn,
471
+ run_image,
472
+ build_arch,
473
+ testDetails,
474
+ build_os,
475
+ build_artifacts,
476
+ build_command,
477
+ build_config_metadata,
478
+ build_image,
479
+ build_vars_str,
480
+ compiler,
481
+ cpp_compiler,
482
+ git_branch,
483
+ git_hash,
484
+ git_timestamp_ms,
485
+ git_version,
486
+ pull_request,
487
+ None, # redis_temporary_dir not needed for reuse
488
+ tests_groups_regexp,
489
+ tests_priority_lower_limit,
490
+ tests_priority_upper_limit,
491
+ tests_regexp,
492
+ ".*", # command_regexp - default to all commands
493
+ use_git_timestamp,
494
+ server_name,
495
+ github_org,
496
+ github_repo,
497
+ artifact_keys, # Pass existing artifact keys
498
+ )
499
+ continue # Skip to next build spec
500
+
501
+ logging.info(
502
+ f"Building artifacts for {git_hash}:{id} with build signature {build_signature}"
503
+ )
504
+
432
505
  build_start_datetime = datetime.datetime.utcnow()
433
506
  logging.info(
434
507
  "Using the following build command {}.".format(build_command)
@@ -507,6 +580,7 @@ def builder_process_stream(
507
580
  server_name,
508
581
  github_org,
509
582
  github_repo,
583
+ None, # existing_artifact_keys - None for new builds
510
584
  )
511
585
  if result is True:
512
586
  benchmark_stream_id = conn.xadd(
@@ -648,6 +722,7 @@ def generate_benchmark_stream_request(
648
722
  server_name="redis",
649
723
  github_org="redis",
650
724
  github_repo="redis",
725
+ existing_artifact_keys=None,
651
726
  ):
652
727
  build_stream_fields = {
653
728
  "id": id,
@@ -691,21 +766,50 @@ def generate_benchmark_stream_request(
691
766
  if git_timestamp_ms is not None:
692
767
  build_stream_fields["git_timestamp_ms"] = git_timestamp_ms
693
768
 
694
- prefix = f"github_org={github_org}/github_repo={github_repo}/git_branch={str(git_branch)}/git_version={str(git_version)}/git_hash={str(git_hash)}"
695
- for artifact in build_artifacts:
696
- bin_key = f"zipped:artifacts:{prefix}:{id}:{artifact}.zip"
697
- if artifact == "redisearch.so":
698
- bin_artifact = open(
699
- f"{redis_temporary_dir}modules/redisearch/src/bin/linux-x64-release/search-community/{artifact}",
700
- "rb",
701
- ).read()
702
- else:
703
- bin_artifact = open(f"{redis_temporary_dir}src/{artifact}", "rb").read()
704
- bin_artifact_len = len(bytes(bin_artifact))
705
- assert bin_artifact_len > 0
706
- conn.set(bin_key, bytes(bin_artifact), ex=REDIS_BINS_EXPIRE_SECS)
707
- build_stream_fields[artifact] = bin_key
708
- build_stream_fields["{}_len_bytes".format(artifact)] = bin_artifact_len
769
+ if existing_artifact_keys is not None:
770
+ # Use existing artifact keys (for reuse case)
771
+ for artifact in build_artifacts:
772
+ bin_key = existing_artifact_keys[artifact]
773
+ build_stream_fields[artifact] = bin_key
774
+ # Get the length from the existing artifact
775
+ bin_artifact_len = conn.strlen(bin_key)
776
+ build_stream_fields["{}_len_bytes".format(artifact)] = bin_artifact_len
777
+ else:
778
+ # Build new artifacts and store them
779
+ prefix = f"github_org={github_org}/github_repo={github_repo}/git_branch={str(git_branch)}/git_version={str(git_version)}/git_hash={str(git_hash)}"
780
+
781
+ # Create build signature for new artifacts
782
+ import hashlib
783
+
784
+ build_signature_parts = [
785
+ str(id), # build config ID
786
+ str(build_command), # build command
787
+ str(build_vars_str), # environment variables
788
+ str(compiler), # compiler
789
+ str(cpp_compiler), # C++ compiler
790
+ str(build_image), # build image
791
+ str(build_os), # OS
792
+ str(build_arch), # architecture
793
+ ",".join(sorted(build_artifacts)), # artifacts list
794
+ ]
795
+ build_signature = hashlib.sha256(
796
+ ":".join(build_signature_parts).encode()
797
+ ).hexdigest()[:16]
798
+
799
+ for artifact in build_artifacts:
800
+ bin_key = f"zipped:artifacts:{prefix}:{id}:{build_signature}:{artifact}.zip"
801
+ if artifact == "redisearch.so":
802
+ bin_artifact = open(
803
+ f"{redis_temporary_dir}modules/redisearch/src/bin/linux-x64-release/search-community/{artifact}",
804
+ "rb",
805
+ ).read()
806
+ else:
807
+ bin_artifact = open(f"{redis_temporary_dir}src/{artifact}", "rb").read()
808
+ bin_artifact_len = len(bytes(bin_artifact))
809
+ assert bin_artifact_len > 0
810
+ conn.set(bin_key, bytes(bin_artifact), ex=REDIS_BINS_EXPIRE_SECS)
811
+ build_stream_fields[artifact] = bin_key
812
+ build_stream_fields["{}_len_bytes".format(artifact)] = bin_artifact_len
709
813
  result = True
710
814
  if b"platform" in testDetails:
711
815
  build_stream_fields["platform"] = testDetails[b"platform"]
@@ -104,6 +104,12 @@ def trigger_tests_dockerhub_cli_command_logic(args, project_name, project_versio
104
104
  0,
105
105
  10000,
106
106
  args.tests_regexp,
107
+ ".*", # command_regexp
108
+ False, # use_git_timestamp
109
+ "redis", # server_name
110
+ "redis", # github_org
111
+ "redis", # github_repo
112
+ None, # existing_artifact_keys
107
113
  )
108
114
  build_stream_fields["github_repo"] = args.gh_repo
109
115
  build_stream_fields["github_org"] = args.gh_org
@@ -149,11 +149,7 @@ def reset_consumer_group_to_latest(conn, running_platform):
149
149
 
150
150
  try:
151
151
  # Set the consumer group position to '$' (latest) to skip all existing messages
152
- conn.xgroup_setid(
153
- STREAM_KEYNAME_NEW_BUILD_EVENTS,
154
- consumer_group_name,
155
- id="$"
156
- )
152
+ conn.xgroup_setid(STREAM_KEYNAME_NEW_BUILD_EVENTS, consumer_group_name, id="$")
157
153
  logging.info(
158
154
  f"Reset consumer group {consumer_group_name} position to latest - will only process new messages"
159
155
  )
@@ -655,13 +655,17 @@ def main():
655
655
  # Clear pending messages and reset consumer group position by default (unless explicitly skipped)
656
656
  if not args.skip_clear_pending_on_startup:
657
657
  consumer_pos = args.consumer_pos
658
- logging.info("Clearing pending messages and resetting consumer group position on startup (default behavior)")
658
+ logging.info(
659
+ "Clearing pending messages and resetting consumer group position on startup (default behavior)"
660
+ )
659
661
  clear_pending_messages_for_consumer(
660
662
  gh_event_conn, running_platform, consumer_pos
661
663
  )
662
664
  reset_consumer_group_to_latest(gh_event_conn, running_platform)
663
665
  else:
664
- logging.info("Skipping pending message cleanup and consumer group reset as requested")
666
+ logging.info(
667
+ "Skipping pending message cleanup and consumer group reset as requested"
668
+ )
665
669
 
666
670
  stream_id = None
667
671
  docker_client = docker.from_env()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: redis-benchmarks-specification
3
- Version: 0.1.338
3
+ Version: 0.1.339
4
4
  Summary: The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute.
5
5
  Author: filipecosta90
6
6
  Author-email: filipecosta.90@gmail.com
@@ -4,11 +4,11 @@ redis_benchmarks_specification/__api__/api.py,sha256=k_CMICtMm1z8jY3hByaL0hIr_5v
4
4
  redis_benchmarks_specification/__api__/app.py,sha256=JzQm84DjIVdfLbDO423BJbrds6gFzMbA0syRkHE_aUU,7063
5
5
  redis_benchmarks_specification/__builder__/Readme.md,sha256=O6MV_J3OSgzW-ir2TbukP8Vhkm_LOzQJJndG1Cykqic,111
6
6
  redis_benchmarks_specification/__builder__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
7
- redis_benchmarks_specification/__builder__/builder.py,sha256=lAoEQ8ab9AWstYcpF2hoixZ_HFmMKf9Icwzc0WV0t_I,29867
7
+ redis_benchmarks_specification/__builder__/builder.py,sha256=NiigX_UPeCulCBfrgZvntLIFRfgHe-j43CqWKSlvguk,34555
8
8
  redis_benchmarks_specification/__builder__/schema.py,sha256=1wcmyVJBcWrBvK58pghN9NCoWLCO3BzPsmdKWYfkVog,584
9
9
  redis_benchmarks_specification/__cli__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
10
10
  redis_benchmarks_specification/__cli__/args.py,sha256=X7VlHJvX3n85ZPUQFoovmaFDnY4t7irUrDLf07QAfaA,7430
11
- redis_benchmarks_specification/__cli__/cli.py,sha256=iTjINQ-RV_q2ovq1neSoRCAggpGdeP5mX3_1aFxSScY,22001
11
+ redis_benchmarks_specification/__cli__/cli.py,sha256=GMd_Swn6HA8JAFd7hokLsLO3_F2qnd_2eavVQ66M1lk,22204
12
12
  redis_benchmarks_specification/__cli__/stats.py,sha256=r9JIfwGCSR3maozYbDZfZrkthNFQSs0xIymS86yZ6Iw,55574
13
13
  redis_benchmarks_specification/__common__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
14
  redis_benchmarks_specification/__common__/builder_schema.py,sha256=kfDpRIk7NkJrb5qj9jzsBhLVNO7K_W2Clumj4pxrkG8,5938
@@ -36,8 +36,8 @@ redis_benchmarks_specification/__self_contained_coordinator__/cpuset.py,sha256=s
36
36
  redis_benchmarks_specification/__self_contained_coordinator__/docker.py,sha256=09SyAfqlzs1KG9ZAajClNWtiNk4Jqzd--4-m3n1rLjU,3156
37
37
  redis_benchmarks_specification/__self_contained_coordinator__/post_processing.py,sha256=sVLKNnWdAqYY9DjVdqRC5tDaIrVSaI3Ca7w8-DQ-LRM,776
38
38
  redis_benchmarks_specification/__self_contained_coordinator__/prepopulation.py,sha256=1UeFr2T1ZQBcHCSd4W1ZtaWgXyFPfjLyDi_DgDc1eTA,2957
39
- redis_benchmarks_specification/__self_contained_coordinator__/runners.py,sha256=CXCyYb5G8IBHrm_6Czi2ArPQO7Pd1NJPGUYn836WadE,32872
40
- redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=Jpcby63kpVAOx0HiYzehJd6-gS5b-LNnUz0TRAdQ48M,112752
39
+ redis_benchmarks_specification/__self_contained_coordinator__/runners.py,sha256=1bpGiybmeQrdHh-z-fAyMvzOggZk4_MNHMTWrAfGwQU,32826
40
+ redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=BhOSyinsF8AkaPtR5jjB8vj3VTC0UwyBtCY7iGkWw_I,112796
41
41
  redis_benchmarks_specification/__setups__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
42
  redis_benchmarks_specification/__setups__/topologies.py,sha256=xQ1IJkcTji_ZjLiJd3vOxZpvbNtBLZw9cPkw5hGJKHU,481
43
43
  redis_benchmarks_specification/__spec__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
@@ -282,8 +282,8 @@ redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-st
282
282
  redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-storage-1k-sessions.yml,sha256=2egtIxPxCze2jlbAfgsk4v9JSQHNMoPLbDWFEW8olDg,7006
283
283
  redis_benchmarks_specification/test-suites/template.txt,sha256=ezqGiRPOvuSDO0iG7GEf-AGXNfHbgXI89_G0RUEzL88,481
284
284
  redis_benchmarks_specification/vector-search-test-suites/vector_db_benchmark_test.yml,sha256=PD7ow-k4Ll2BkhEC3aIqiaCZt8Hc4aJIp96Lw3J3mcI,791
285
- redis_benchmarks_specification-0.1.338.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
286
- redis_benchmarks_specification-0.1.338.dist-info/METADATA,sha256=Ak3-Pw7UjP7ftlTuKAivQBA_MuZhMBIRQNFsqI6XLiM,22768
287
- redis_benchmarks_specification-0.1.338.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
288
- redis_benchmarks_specification-0.1.338.dist-info/entry_points.txt,sha256=x5WBXCZsnDRTZxV7SBGmC65L2k-ygdDOxV8vuKN00Nk,715
289
- redis_benchmarks_specification-0.1.338.dist-info/RECORD,,
285
+ redis_benchmarks_specification-0.1.339.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
286
+ redis_benchmarks_specification-0.1.339.dist-info/METADATA,sha256=xFGR8MNuEXNuxwQJKb2SRDKcXKkWeN64LPxQpD4RG5o,22768
287
+ redis_benchmarks_specification-0.1.339.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
288
+ redis_benchmarks_specification-0.1.339.dist-info/entry_points.txt,sha256=x5WBXCZsnDRTZxV7SBGmC65L2k-ygdDOxV8vuKN00Nk,715
289
+ redis_benchmarks_specification-0.1.339.dist-info/RECORD,,