redis-benchmarks-specification 0.1.339__py3-none-any.whl → 0.1.340__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of redis-benchmarks-specification might be problematic. Click here for more details.
- redis_benchmarks_specification/__builder__/builder.py +102 -0
- redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py +21 -13
- {redis_benchmarks_specification-0.1.339.dist-info → redis_benchmarks_specification-0.1.340.dist-info}/METADATA +1 -1
- {redis_benchmarks_specification-0.1.339.dist-info → redis_benchmarks_specification-0.1.340.dist-info}/RECORD +7 -7
- {redis_benchmarks_specification-0.1.339.dist-info → redis_benchmarks_specification-0.1.340.dist-info}/LICENSE +0 -0
- {redis_benchmarks_specification-0.1.339.dist-info → redis_benchmarks_specification-0.1.340.dist-info}/WHEEL +0 -0
- {redis_benchmarks_specification-0.1.339.dist-info → redis_benchmarks_specification-0.1.340.dist-info}/entry_points.txt +0 -0
|
@@ -47,6 +47,69 @@ from redis_benchmarks_specification.__common__.package import (
|
|
|
47
47
|
PERFORMANCE_GH_TOKEN = os.getenv("PERFORMANCE_GH_TOKEN", None)
|
|
48
48
|
|
|
49
49
|
|
|
50
|
+
def clear_pending_messages_for_builder_consumer(conn, builder_group, builder_id):
|
|
51
|
+
"""Clear all pending messages for a specific builder consumer on startup"""
|
|
52
|
+
consumer_name = f"{builder_group}-proc#{builder_id}"
|
|
53
|
+
|
|
54
|
+
try:
|
|
55
|
+
# Get pending messages for this specific consumer
|
|
56
|
+
pending_info = conn.xpending_range(
|
|
57
|
+
STREAM_KEYNAME_GH_EVENTS_COMMIT,
|
|
58
|
+
builder_group,
|
|
59
|
+
min="-",
|
|
60
|
+
max="+",
|
|
61
|
+
count=1000, # Get up to 1000 pending messages
|
|
62
|
+
consumername=consumer_name,
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
if pending_info:
|
|
66
|
+
message_ids = [msg["message_id"] for msg in pending_info]
|
|
67
|
+
logging.info(
|
|
68
|
+
f"Found {len(message_ids)} pending messages for builder consumer {consumer_name}. Clearing them..."
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# Acknowledge all pending messages to clear them
|
|
72
|
+
ack_count = conn.xack(
|
|
73
|
+
STREAM_KEYNAME_GH_EVENTS_COMMIT, builder_group, *message_ids
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
logging.info(
|
|
77
|
+
f"Successfully cleared {ack_count} pending messages for builder consumer {consumer_name}"
|
|
78
|
+
)
|
|
79
|
+
else:
|
|
80
|
+
logging.info(
|
|
81
|
+
f"No pending messages found for builder consumer {consumer_name}"
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
except redis.exceptions.ResponseError as e:
|
|
85
|
+
if "NOGROUP" in str(e):
|
|
86
|
+
logging.info(f"Builder consumer group {builder_group} does not exist yet")
|
|
87
|
+
else:
|
|
88
|
+
logging.warning(f"Error clearing pending messages: {e}")
|
|
89
|
+
except Exception as e:
|
|
90
|
+
logging.error(f"Unexpected error clearing pending messages: {e}")
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def reset_builder_consumer_group_to_latest(conn, builder_group):
|
|
94
|
+
"""Reset the builder consumer group position to only read new messages (skip old ones)"""
|
|
95
|
+
try:
|
|
96
|
+
# Set the consumer group position to '$' (latest) to skip all existing messages
|
|
97
|
+
conn.xgroup_setid(STREAM_KEYNAME_GH_EVENTS_COMMIT, builder_group, id="$")
|
|
98
|
+
logging.info(
|
|
99
|
+
f"Reset builder consumer group {builder_group} position to latest - will only process new messages"
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
except redis.exceptions.ResponseError as e:
|
|
103
|
+
if "NOGROUP" in str(e):
|
|
104
|
+
logging.info(f"Builder consumer group {builder_group} does not exist yet")
|
|
105
|
+
else:
|
|
106
|
+
logging.warning(f"Error resetting builder consumer group position: {e}")
|
|
107
|
+
except Exception as e:
|
|
108
|
+
logging.error(
|
|
109
|
+
f"Unexpected error resetting builder consumer group position: {e}"
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
|
|
50
113
|
class ZipFileWithPermissions(ZipFile):
|
|
51
114
|
def _extract_member(self, member, targetpath, pwd):
|
|
52
115
|
if not isinstance(member, ZipInfo):
|
|
@@ -104,6 +167,12 @@ def main():
|
|
|
104
167
|
)
|
|
105
168
|
parser.add_argument("--github_token", type=str, default=PERFORMANCE_GH_TOKEN)
|
|
106
169
|
parser.add_argument("--pull-request", type=str, default=None, nargs="?", const="")
|
|
170
|
+
parser.add_argument(
|
|
171
|
+
"--skip-clear-pending-on-startup",
|
|
172
|
+
default=False,
|
|
173
|
+
action="store_true",
|
|
174
|
+
help="Skip automatically clearing pending messages and resetting consumer group position on startup. By default, pending messages are cleared and consumer group is reset to latest position to skip old work and recover from crashes.",
|
|
175
|
+
)
|
|
107
176
|
args = parser.parse_args()
|
|
108
177
|
if args.logname is not None:
|
|
109
178
|
print("Writting log to {}".format(args.logname))
|
|
@@ -169,6 +238,19 @@ def main():
|
|
|
169
238
|
builder_id = "1"
|
|
170
239
|
|
|
171
240
|
builder_consumer_group_create(conn, builder_group)
|
|
241
|
+
|
|
242
|
+
# Clear pending messages and reset consumer group position by default (unless explicitly skipped)
|
|
243
|
+
if not args.skip_clear_pending_on_startup:
|
|
244
|
+
logging.info(
|
|
245
|
+
"Clearing pending messages and resetting builder consumer group position on startup (default behavior)"
|
|
246
|
+
)
|
|
247
|
+
clear_pending_messages_for_builder_consumer(conn, builder_group, builder_id)
|
|
248
|
+
reset_builder_consumer_group_to_latest(conn, builder_group)
|
|
249
|
+
else:
|
|
250
|
+
logging.info(
|
|
251
|
+
"Skipping pending message cleanup and builder consumer group reset as requested"
|
|
252
|
+
)
|
|
253
|
+
|
|
172
254
|
if args.github_token is not None:
|
|
173
255
|
logging.info("detected a github token. will update as much as possible!!! =)")
|
|
174
256
|
previous_id = args.consumer_start_id
|
|
@@ -268,6 +350,26 @@ def builder_process_stream(
|
|
|
268
350
|
build_request_arch, arch
|
|
269
351
|
)
|
|
270
352
|
)
|
|
353
|
+
# Acknowledge the message even though we're skipping it
|
|
354
|
+
ack_reply = conn.xack(
|
|
355
|
+
STREAM_KEYNAME_GH_EVENTS_COMMIT,
|
|
356
|
+
STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
|
|
357
|
+
streamId,
|
|
358
|
+
)
|
|
359
|
+
if type(ack_reply) == bytes:
|
|
360
|
+
ack_reply = ack_reply.decode()
|
|
361
|
+
if ack_reply == "1" or ack_reply == 1:
|
|
362
|
+
logging.info(
|
|
363
|
+
"Successfully acknowledged build variation stream with id {} (filtered by arch).".format(
|
|
364
|
+
streamId
|
|
365
|
+
)
|
|
366
|
+
)
|
|
367
|
+
else:
|
|
368
|
+
logging.error(
|
|
369
|
+
"Unable to acknowledge build variation stream with id {}. XACK reply {}".format(
|
|
370
|
+
streamId, ack_reply
|
|
371
|
+
)
|
|
372
|
+
)
|
|
271
373
|
return previous_id, new_builds_count, build_stream_fields_arr
|
|
272
374
|
|
|
273
375
|
home = str(Path.home())
|
|
@@ -872,26 +872,34 @@ def self_contained_coordinator_blocking_read(
|
|
|
872
872
|
)
|
|
873
873
|
num_process_streams = num_process_streams + 1
|
|
874
874
|
num_process_test_suites = num_process_test_suites + total_test_suite_runs
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
875
|
+
|
|
876
|
+
# Always acknowledge the message, even if it was filtered out
|
|
877
|
+
ack_reply = github_event_conn.xack(
|
|
878
|
+
STREAM_KEYNAME_NEW_BUILD_EVENTS,
|
|
879
|
+
get_runners_consumer_group_name(platform_name),
|
|
880
|
+
stream_id,
|
|
881
|
+
)
|
|
882
|
+
if type(ack_reply) == bytes:
|
|
883
|
+
ack_reply = ack_reply.decode()
|
|
884
|
+
if ack_reply == "1" or ack_reply == 1:
|
|
885
|
+
if overall_result is True:
|
|
884
886
|
logging.info(
|
|
885
|
-
"
|
|
887
|
+
"Successfully acknowledged BENCHMARK variation stream with id {} (processed).".format(
|
|
886
888
|
stream_id
|
|
887
889
|
)
|
|
888
890
|
)
|
|
889
891
|
else:
|
|
890
|
-
logging.
|
|
891
|
-
"
|
|
892
|
-
stream_id
|
|
892
|
+
logging.info(
|
|
893
|
+
"Successfully acknowledged BENCHMARK variation stream with id {} (filtered/skipped).".format(
|
|
894
|
+
stream_id
|
|
893
895
|
)
|
|
894
896
|
)
|
|
897
|
+
else:
|
|
898
|
+
logging.error(
|
|
899
|
+
"Unable to acknowledge build variation stream with id {}. XACK reply {}".format(
|
|
900
|
+
stream_id, ack_reply
|
|
901
|
+
)
|
|
902
|
+
)
|
|
895
903
|
return overall_result, stream_id, num_process_streams, num_process_test_suites
|
|
896
904
|
|
|
897
905
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: redis-benchmarks-specification
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.340
|
|
4
4
|
Summary: The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute.
|
|
5
5
|
Author: filipecosta90
|
|
6
6
|
Author-email: filipecosta.90@gmail.com
|
|
@@ -4,7 +4,7 @@ redis_benchmarks_specification/__api__/api.py,sha256=k_CMICtMm1z8jY3hByaL0hIr_5v
|
|
|
4
4
|
redis_benchmarks_specification/__api__/app.py,sha256=JzQm84DjIVdfLbDO423BJbrds6gFzMbA0syRkHE_aUU,7063
|
|
5
5
|
redis_benchmarks_specification/__builder__/Readme.md,sha256=O6MV_J3OSgzW-ir2TbukP8Vhkm_LOzQJJndG1Cykqic,111
|
|
6
6
|
redis_benchmarks_specification/__builder__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
|
|
7
|
-
redis_benchmarks_specification/__builder__/builder.py,sha256=
|
|
7
|
+
redis_benchmarks_specification/__builder__/builder.py,sha256=5GhZ76itxcXfaHr7YtNrJp2zcYC_XHskpqZjIbMh3UI,38862
|
|
8
8
|
redis_benchmarks_specification/__builder__/schema.py,sha256=1wcmyVJBcWrBvK58pghN9NCoWLCO3BzPsmdKWYfkVog,584
|
|
9
9
|
redis_benchmarks_specification/__cli__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
|
|
10
10
|
redis_benchmarks_specification/__cli__/args.py,sha256=X7VlHJvX3n85ZPUQFoovmaFDnY4t7irUrDLf07QAfaA,7430
|
|
@@ -37,7 +37,7 @@ redis_benchmarks_specification/__self_contained_coordinator__/docker.py,sha256=0
|
|
|
37
37
|
redis_benchmarks_specification/__self_contained_coordinator__/post_processing.py,sha256=sVLKNnWdAqYY9DjVdqRC5tDaIrVSaI3Ca7w8-DQ-LRM,776
|
|
38
38
|
redis_benchmarks_specification/__self_contained_coordinator__/prepopulation.py,sha256=1UeFr2T1ZQBcHCSd4W1ZtaWgXyFPfjLyDi_DgDc1eTA,2957
|
|
39
39
|
redis_benchmarks_specification/__self_contained_coordinator__/runners.py,sha256=1bpGiybmeQrdHh-z-fAyMvzOggZk4_MNHMTWrAfGwQU,32826
|
|
40
|
-
redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=
|
|
40
|
+
redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=vtBJzzH6eq2v7M7EK1Ve5EZyUfpgSFKTHQfIUY8agFE,113065
|
|
41
41
|
redis_benchmarks_specification/__setups__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
42
42
|
redis_benchmarks_specification/__setups__/topologies.py,sha256=xQ1IJkcTji_ZjLiJd3vOxZpvbNtBLZw9cPkw5hGJKHU,481
|
|
43
43
|
redis_benchmarks_specification/__spec__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
|
|
@@ -282,8 +282,8 @@ redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-st
|
|
|
282
282
|
redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-storage-1k-sessions.yml,sha256=2egtIxPxCze2jlbAfgsk4v9JSQHNMoPLbDWFEW8olDg,7006
|
|
283
283
|
redis_benchmarks_specification/test-suites/template.txt,sha256=ezqGiRPOvuSDO0iG7GEf-AGXNfHbgXI89_G0RUEzL88,481
|
|
284
284
|
redis_benchmarks_specification/vector-search-test-suites/vector_db_benchmark_test.yml,sha256=PD7ow-k4Ll2BkhEC3aIqiaCZt8Hc4aJIp96Lw3J3mcI,791
|
|
285
|
-
redis_benchmarks_specification-0.1.
|
|
286
|
-
redis_benchmarks_specification-0.1.
|
|
287
|
-
redis_benchmarks_specification-0.1.
|
|
288
|
-
redis_benchmarks_specification-0.1.
|
|
289
|
-
redis_benchmarks_specification-0.1.
|
|
285
|
+
redis_benchmarks_specification-0.1.340.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
286
|
+
redis_benchmarks_specification-0.1.340.dist-info/METADATA,sha256=DDrmuBuSVQ7qSeKz2z-mgLP8oNV0DNDuAfO3a_oxZ0I,22768
|
|
287
|
+
redis_benchmarks_specification-0.1.340.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
288
|
+
redis_benchmarks_specification-0.1.340.dist-info/entry_points.txt,sha256=x5WBXCZsnDRTZxV7SBGmC65L2k-ygdDOxV8vuKN00Nk,715
|
|
289
|
+
redis_benchmarks_specification-0.1.340.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|