redis-benchmarks-specification 0.1.328__py3-none-any.whl → 0.1.330__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of redis-benchmarks-specification might be problematic. Click here for more details.
- redis_benchmarks_specification/__common__/runner.py +2 -7
- redis_benchmarks_specification/__common__/suppress_warnings.py +20 -0
- redis_benchmarks_specification/__common__/timeseries.py +54 -30
- redis_benchmarks_specification/__runner__/runner.py +114 -4
- redis_benchmarks_specification/__self_contained_coordinator__/args.py +30 -0
- redis_benchmarks_specification/__self_contained_coordinator__/runners.py +3 -0
- redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py +272 -4
- {redis_benchmarks_specification-0.1.328.dist-info → redis_benchmarks_specification-0.1.330.dist-info}/METADATA +1 -1
- {redis_benchmarks_specification-0.1.328.dist-info → redis_benchmarks_specification-0.1.330.dist-info}/RECORD +12 -11
- {redis_benchmarks_specification-0.1.328.dist-info → redis_benchmarks_specification-0.1.330.dist-info}/LICENSE +0 -0
- {redis_benchmarks_specification-0.1.328.dist-info → redis_benchmarks_specification-0.1.330.dist-info}/WHEEL +0 -0
- {redis_benchmarks_specification-0.1.328.dist-info → redis_benchmarks_specification-0.1.330.dist-info}/entry_points.txt +0 -0
|
@@ -291,13 +291,7 @@ def export_redis_metrics(
|
|
|
291
291
|
metric_name,
|
|
292
292
|
metric_value,
|
|
293
293
|
) in overall_end_time_metrics.items():
|
|
294
|
-
tsname_metric = "{}/{}/{}/benchmark_end/{}/{}"
|
|
295
|
-
sprefix,
|
|
296
|
-
test_name,
|
|
297
|
-
by_variant,
|
|
298
|
-
setup_name,
|
|
299
|
-
metric_name,
|
|
300
|
-
)
|
|
294
|
+
tsname_metric = f"{sprefix}/{test_name}/{by_variant}/benchmark_end/{running_platform}/{setup_name}/{metric_name}"
|
|
301
295
|
|
|
302
296
|
logging.debug(
|
|
303
297
|
"Adding a redis server side metric collected at the end of benchmark."
|
|
@@ -404,6 +398,7 @@ def exporter_datasink_common(
|
|
|
404
398
|
running_platform,
|
|
405
399
|
None,
|
|
406
400
|
git_hash,
|
|
401
|
+
disable_target_tables=True,
|
|
407
402
|
)
|
|
408
403
|
if collect_memory_metrics:
|
|
409
404
|
logging.info("Collecting memory metrics")
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Warning suppression module that should be imported first to suppress known warnings.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import warnings
|
|
6
|
+
|
|
7
|
+
# Suppress cryptography deprecation warnings from paramiko
|
|
8
|
+
warnings.filterwarnings("ignore", category=DeprecationWarning, module="paramiko")
|
|
9
|
+
warnings.filterwarnings("ignore", message=".*TripleDES.*", category=DeprecationWarning)
|
|
10
|
+
warnings.filterwarnings(
|
|
11
|
+
"ignore", message=".*cryptography.*", category=DeprecationWarning
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
# Also suppress the specific CryptographyDeprecationWarning if it exists
|
|
15
|
+
try:
|
|
16
|
+
from cryptography.utils import CryptographyDeprecationWarning
|
|
17
|
+
|
|
18
|
+
warnings.filterwarnings("ignore", category=CryptographyDeprecationWarning)
|
|
19
|
+
except ImportError:
|
|
20
|
+
pass
|
|
@@ -838,34 +838,47 @@ def common_exporter_logic(
|
|
|
838
838
|
and artifact_version != ""
|
|
839
839
|
and artifact_version != "N/A"
|
|
840
840
|
):
|
|
841
|
-
#
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
841
|
+
# Check if version 255.255.255 should only be pushed for unstable branch
|
|
842
|
+
should_push_version = True
|
|
843
|
+
if artifact_version == "255.255.255":
|
|
844
|
+
if tf_github_branch != "unstable":
|
|
845
|
+
logging.info(
|
|
846
|
+
f"Skipping version 255.255.255 data push for branch '{tf_github_branch}' "
|
|
847
|
+
f"(only pushing for 'unstable' branch)"
|
|
848
|
+
)
|
|
849
|
+
should_push_version = False
|
|
850
|
+
else:
|
|
851
|
+
logging.info(f"Pushing version 255.255.255 data for unstable branch")
|
|
852
|
+
|
|
853
|
+
if should_push_version:
|
|
854
|
+
# extract per-version datapoints
|
|
855
|
+
total_hs_ts = len(per_hash_time_series_dict.keys())
|
|
856
|
+
logging.info(
|
|
857
|
+
f"Extending the by.hash {git_hash} timeseries ({total_hs_ts}) with version info {artifact_version}"
|
|
858
|
+
)
|
|
859
|
+
for hash_timeserie in per_hash_time_series_dict.values():
|
|
860
|
+
hash_timeserie["labels"]["version"] = artifact_version
|
|
861
|
+
(
|
|
862
|
+
_,
|
|
863
|
+
per_version_time_series_dict,
|
|
864
|
+
version_target_tables,
|
|
865
|
+
) = extract_perversion_timeseries_from_results(
|
|
866
|
+
used_ts,
|
|
867
|
+
metrics,
|
|
868
|
+
results_dict,
|
|
869
|
+
artifact_version,
|
|
870
|
+
tf_github_org,
|
|
871
|
+
tf_github_repo,
|
|
872
|
+
deployment_name,
|
|
873
|
+
deployment_type,
|
|
874
|
+
test_name,
|
|
875
|
+
tf_triggering_env,
|
|
876
|
+
metadata_tags,
|
|
877
|
+
build_variant_name,
|
|
878
|
+
running_platform,
|
|
879
|
+
testcase_metric_context_paths,
|
|
880
|
+
)
|
|
881
|
+
total_break_by_added += 1
|
|
869
882
|
else:
|
|
870
883
|
logging.warning(
|
|
871
884
|
"there was no git VERSION information to push data brokedown by VERSION"
|
|
@@ -1118,6 +1131,7 @@ def add_standardized_metric_byversion(
|
|
|
1118
1131
|
tf_triggering_env,
|
|
1119
1132
|
metadata_tags,
|
|
1120
1133
|
build_variant_name,
|
|
1134
|
+
running_platform,
|
|
1121
1135
|
)
|
|
1122
1136
|
labels["version"] = artifact_version
|
|
1123
1137
|
labels["deployment_name+version"] = "{} {}".format(
|
|
@@ -1169,6 +1183,7 @@ def timeseries_test_sucess_flow(
|
|
|
1169
1183
|
running_platform=None,
|
|
1170
1184
|
timeseries_dict=None,
|
|
1171
1185
|
git_hash=None,
|
|
1186
|
+
disable_target_tables=False,
|
|
1172
1187
|
):
|
|
1173
1188
|
testcase_metric_context_paths = []
|
|
1174
1189
|
version_target_tables = None
|
|
@@ -1205,7 +1220,7 @@ def timeseries_test_sucess_flow(
|
|
|
1205
1220
|
)
|
|
1206
1221
|
)
|
|
1207
1222
|
push_data_to_redistimeseries(rts, timeseries_dict)
|
|
1208
|
-
if version_target_tables is not None:
|
|
1223
|
+
if not disable_target_tables and version_target_tables is not None:
|
|
1209
1224
|
logging.info(
|
|
1210
1225
|
"There are a total of {} distinct target tables by version".format(
|
|
1211
1226
|
len(version_target_tables.keys())
|
|
@@ -1225,7 +1240,12 @@ def timeseries_test_sucess_flow(
|
|
|
1225
1240
|
rts.hset(
|
|
1226
1241
|
version_target_table_keyname, None, None, version_target_table_dict
|
|
1227
1242
|
)
|
|
1228
|
-
|
|
1243
|
+
elif disable_target_tables:
|
|
1244
|
+
logging.info(
|
|
1245
|
+
"Target tables disabled - skipping version target table creation"
|
|
1246
|
+
)
|
|
1247
|
+
|
|
1248
|
+
if not disable_target_tables and branch_target_tables is not None:
|
|
1229
1249
|
logging.info(
|
|
1230
1250
|
"There are a total of {} distinct target tables by branch".format(
|
|
1231
1251
|
len(branch_target_tables.keys())
|
|
@@ -1246,6 +1266,10 @@ def timeseries_test_sucess_flow(
|
|
|
1246
1266
|
rts.hset(
|
|
1247
1267
|
branch_target_table_keyname, None, None, branch_target_table_dict
|
|
1248
1268
|
)
|
|
1269
|
+
elif disable_target_tables:
|
|
1270
|
+
logging.info(
|
|
1271
|
+
"Target tables disabled - skipping branch target table creation"
|
|
1272
|
+
)
|
|
1249
1273
|
if test_name is not None:
|
|
1250
1274
|
if type(test_name) is str:
|
|
1251
1275
|
update_secondary_result_keys(
|
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
# Import warning suppression first
|
|
2
|
+
from redis_benchmarks_specification.__common__.suppress_warnings import *
|
|
3
|
+
|
|
1
4
|
import datetime
|
|
2
5
|
import json
|
|
3
6
|
import logging
|
|
@@ -19,15 +22,11 @@ from docker.models.containers import Container
|
|
|
19
22
|
from pytablewriter import CsvTableWriter, MarkdownTableWriter
|
|
20
23
|
from redisbench_admin.profilers.profilers_local import (
|
|
21
24
|
check_compatible_system_and_kernel_and_prepare_profile,
|
|
22
|
-
local_profilers_platform_checks,
|
|
23
|
-
profilers_start_if_required,
|
|
24
|
-
profilers_stop_if_required,
|
|
25
25
|
)
|
|
26
26
|
from redisbench_admin.run.common import (
|
|
27
27
|
get_start_time_vars,
|
|
28
28
|
merge_default_and_config_metrics,
|
|
29
29
|
prepare_benchmark_parameters,
|
|
30
|
-
dbconfig_keyspacelen_check,
|
|
31
30
|
)
|
|
32
31
|
|
|
33
32
|
from redis_benchmarks_specification.__common__.runner import (
|
|
@@ -158,6 +157,83 @@ def parse_redis_uri(uri):
|
|
|
158
157
|
return {}
|
|
159
158
|
|
|
160
159
|
|
|
160
|
+
def validate_benchmark_metrics(
|
|
161
|
+
results_dict, test_name, benchmark_config=None, default_metrics=None
|
|
162
|
+
):
|
|
163
|
+
"""
|
|
164
|
+
Validate benchmark metrics to ensure they contain reasonable values.
|
|
165
|
+
Fails the test if critical metrics indicate something is wrong.
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
results_dict: Dictionary containing benchmark results
|
|
169
|
+
test_name: Name of the test being validated
|
|
170
|
+
benchmark_config: Benchmark configuration (unused, for compatibility)
|
|
171
|
+
default_metrics: Default metrics configuration (unused, for compatibility)
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
tuple: (is_valid, error_message)
|
|
175
|
+
"""
|
|
176
|
+
try:
|
|
177
|
+
# Define validation rules
|
|
178
|
+
throughput_patterns = [
|
|
179
|
+
"ops/sec",
|
|
180
|
+
"qps",
|
|
181
|
+
"totals.ops/sec",
|
|
182
|
+
"all_stats.totals.ops/sec",
|
|
183
|
+
]
|
|
184
|
+
|
|
185
|
+
latency_patterns = ["latency", "p50", "p95", "p99", "p999", "usec", "msec"]
|
|
186
|
+
|
|
187
|
+
validation_errors = []
|
|
188
|
+
|
|
189
|
+
def check_nested_dict(data, path=""):
|
|
190
|
+
"""Recursively check nested dictionary for metrics"""
|
|
191
|
+
if isinstance(data, dict):
|
|
192
|
+
for key, value in data.items():
|
|
193
|
+
current_path = f"{path}.{key}" if path else key
|
|
194
|
+
check_nested_dict(value, current_path)
|
|
195
|
+
elif isinstance(data, (int, float)):
|
|
196
|
+
metric_path_lower = path.lower()
|
|
197
|
+
|
|
198
|
+
# Check throughput metrics
|
|
199
|
+
for pattern in throughput_patterns:
|
|
200
|
+
if pattern in metric_path_lower:
|
|
201
|
+
if data <= 10: # Below 10 QPS threshold
|
|
202
|
+
validation_errors.append(
|
|
203
|
+
f"Throughput metric '{path}' has invalid value: {data} "
|
|
204
|
+
f"(below 10 QPS threshold)"
|
|
205
|
+
)
|
|
206
|
+
break
|
|
207
|
+
|
|
208
|
+
# Check latency metrics
|
|
209
|
+
for pattern in latency_patterns:
|
|
210
|
+
if pattern in metric_path_lower:
|
|
211
|
+
if data <= 0.0: # Invalid latency
|
|
212
|
+
validation_errors.append(
|
|
213
|
+
f"Latency metric '{path}' has invalid value: {data} "
|
|
214
|
+
f"(should be > 0.0)"
|
|
215
|
+
)
|
|
216
|
+
break
|
|
217
|
+
|
|
218
|
+
# Validate the results dictionary
|
|
219
|
+
check_nested_dict(results_dict)
|
|
220
|
+
|
|
221
|
+
if validation_errors:
|
|
222
|
+
error_msg = f"Test {test_name} failed metric validation:\n" + "\n".join(
|
|
223
|
+
validation_errors
|
|
224
|
+
)
|
|
225
|
+
logging.error(error_msg)
|
|
226
|
+
return False, error_msg
|
|
227
|
+
|
|
228
|
+
logging.info(f"Test {test_name} passed metric validation")
|
|
229
|
+
return True, None
|
|
230
|
+
|
|
231
|
+
except Exception as e:
|
|
232
|
+
logging.warning(f"Error during metric validation for test {test_name}: {e}")
|
|
233
|
+
# Don't fail the test if validation itself fails
|
|
234
|
+
return True, None
|
|
235
|
+
|
|
236
|
+
|
|
161
237
|
def run_local_command_with_timeout(command_str, timeout_seconds, description="command"):
|
|
162
238
|
"""
|
|
163
239
|
Run a local command with timeout support.
|
|
@@ -2753,6 +2829,23 @@ def process_self_contained_coordinator_stream(
|
|
|
2753
2829
|
"Using aggregated JSON results from multi-client execution"
|
|
2754
2830
|
)
|
|
2755
2831
|
results_dict = json.loads(client_container_stdout)
|
|
2832
|
+
|
|
2833
|
+
# Validate benchmark metrics
|
|
2834
|
+
is_valid, validation_error = validate_benchmark_metrics(
|
|
2835
|
+
results_dict, test_name, benchmark_config, default_metrics
|
|
2836
|
+
)
|
|
2837
|
+
if not is_valid:
|
|
2838
|
+
logging.error(
|
|
2839
|
+
f"Test {test_name} failed metric validation: {validation_error}"
|
|
2840
|
+
)
|
|
2841
|
+
test_result = False
|
|
2842
|
+
delete_temporary_files(
|
|
2843
|
+
temporary_dir_client=temporary_dir_client,
|
|
2844
|
+
full_result_path=full_result_path,
|
|
2845
|
+
benchmark_tool_global=benchmark_tool_global,
|
|
2846
|
+
)
|
|
2847
|
+
continue
|
|
2848
|
+
|
|
2756
2849
|
# Print results table for multi-client
|
|
2757
2850
|
print_results_table_stdout(
|
|
2758
2851
|
benchmark_config,
|
|
@@ -2815,6 +2908,23 @@ def process_self_contained_coordinator_stream(
|
|
|
2815
2908
|
"r",
|
|
2816
2909
|
) as json_file:
|
|
2817
2910
|
results_dict = json.load(json_file)
|
|
2911
|
+
|
|
2912
|
+
# Validate benchmark metrics
|
|
2913
|
+
is_valid, validation_error = validate_benchmark_metrics(
|
|
2914
|
+
results_dict, test_name, benchmark_config, default_metrics
|
|
2915
|
+
)
|
|
2916
|
+
if not is_valid:
|
|
2917
|
+
logging.error(
|
|
2918
|
+
f"Test {test_name} failed metric validation: {validation_error}"
|
|
2919
|
+
)
|
|
2920
|
+
test_result = False
|
|
2921
|
+
delete_temporary_files(
|
|
2922
|
+
temporary_dir_client=temporary_dir_client,
|
|
2923
|
+
full_result_path=full_result_path,
|
|
2924
|
+
benchmark_tool_global=benchmark_tool_global,
|
|
2925
|
+
)
|
|
2926
|
+
continue
|
|
2927
|
+
|
|
2818
2928
|
print_results_table_stdout(
|
|
2819
2929
|
benchmark_config,
|
|
2820
2930
|
default_metrics,
|
|
@@ -28,6 +28,12 @@ def create_self_contained_coordinator_args(project_name):
|
|
|
28
28
|
description=project_name,
|
|
29
29
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
|
30
30
|
)
|
|
31
|
+
parser.add_argument(
|
|
32
|
+
"--version",
|
|
33
|
+
action="version",
|
|
34
|
+
version=project_name,
|
|
35
|
+
help="Show version information and exit",
|
|
36
|
+
)
|
|
31
37
|
parser.add_argument("--event_stream_host", type=str, default=GH_REDIS_SERVER_HOST)
|
|
32
38
|
parser.add_argument("--event_stream_port", type=int, default=GH_REDIS_SERVER_PORT)
|
|
33
39
|
parser.add_argument("--event_stream_pass", type=str, default=GH_REDIS_SERVER_AUTH)
|
|
@@ -171,4 +177,28 @@ def create_self_contained_coordinator_args(project_name):
|
|
|
171
177
|
default="",
|
|
172
178
|
help="Filter tests to run only with the specified topology (e.g. oss-standalone)",
|
|
173
179
|
)
|
|
180
|
+
parser.add_argument(
|
|
181
|
+
"--exclusive-hardware",
|
|
182
|
+
default=False,
|
|
183
|
+
action="store_true",
|
|
184
|
+
help="Enable exclusive hardware mode. Kills all memtier processes and stops all docker containers before and after each test.",
|
|
185
|
+
)
|
|
186
|
+
parser.add_argument(
|
|
187
|
+
"--http-port",
|
|
188
|
+
type=int,
|
|
189
|
+
default=8080,
|
|
190
|
+
help="Port for HTTP server endpoints (/ping health check and /reset-queue POST endpoint).",
|
|
191
|
+
)
|
|
192
|
+
parser.add_argument(
|
|
193
|
+
"--http-auth-username",
|
|
194
|
+
type=str,
|
|
195
|
+
default=None,
|
|
196
|
+
help="Username for HTTP endpoint authentication. HTTP server is disabled if not provided.",
|
|
197
|
+
)
|
|
198
|
+
parser.add_argument(
|
|
199
|
+
"--http-auth-password",
|
|
200
|
+
type=str,
|
|
201
|
+
default=None,
|
|
202
|
+
help="Password for HTTP endpoint authentication. HTTP server is disabled if not provided.",
|
|
203
|
+
)
|
|
174
204
|
return parser
|
|
@@ -1,9 +1,14 @@
|
|
|
1
|
+
# Import warning suppression first
|
|
2
|
+
from redis_benchmarks_specification.__common__.suppress_warnings import *
|
|
3
|
+
|
|
1
4
|
import datetime
|
|
2
5
|
import json
|
|
3
6
|
import logging
|
|
4
7
|
import pathlib
|
|
5
8
|
import shutil
|
|
9
|
+
import subprocess
|
|
6
10
|
import tempfile
|
|
11
|
+
import threading
|
|
7
12
|
import traceback
|
|
8
13
|
import re
|
|
9
14
|
import docker
|
|
@@ -13,6 +18,9 @@ import os
|
|
|
13
18
|
from pathlib import Path
|
|
14
19
|
import sys
|
|
15
20
|
import time
|
|
21
|
+
import base64
|
|
22
|
+
from http.server import HTTPServer, BaseHTTPRequestHandler
|
|
23
|
+
from urllib.parse import urlparse, parse_qs
|
|
16
24
|
|
|
17
25
|
from docker.models.containers import Container
|
|
18
26
|
from redis_benchmarks_specification.__self_contained_coordinator__.post_processing import (
|
|
@@ -59,6 +67,7 @@ from redis_benchmarks_specification.__compare__.compare import (
|
|
|
59
67
|
from redis_benchmarks_specification.__runner__.runner import (
|
|
60
68
|
print_results_table_stdout,
|
|
61
69
|
prepare_memtier_benchmark_parameters,
|
|
70
|
+
validate_benchmark_metrics,
|
|
62
71
|
)
|
|
63
72
|
from redis_benchmarks_specification.__self_contained_coordinator__.args import (
|
|
64
73
|
create_self_contained_coordinator_args,
|
|
@@ -108,6 +117,192 @@ from redis_benchmarks_specification.__self_contained_coordinator__.build_info im
|
|
|
108
117
|
extract_build_info_from_streamdata,
|
|
109
118
|
)
|
|
110
119
|
|
|
120
|
+
# Global variables for HTTP server control
|
|
121
|
+
_reset_queue_requested = False
|
|
122
|
+
_exclusive_hardware = False
|
|
123
|
+
_http_auth_username = None
|
|
124
|
+
_http_auth_password = None
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
class CoordinatorHTTPHandler(BaseHTTPRequestHandler):
|
|
128
|
+
"""HTTP request handler for coordinator endpoints"""
|
|
129
|
+
|
|
130
|
+
def log_message(self, format, *args):
|
|
131
|
+
"""Override to use our logging system"""
|
|
132
|
+
logging.info(f"HTTP {format % args}")
|
|
133
|
+
|
|
134
|
+
def _authenticate(self):
|
|
135
|
+
"""Check if the request is authenticated"""
|
|
136
|
+
global _http_auth_username, _http_auth_password
|
|
137
|
+
|
|
138
|
+
# Check for Authorization header
|
|
139
|
+
auth_header = self.headers.get("Authorization")
|
|
140
|
+
if not auth_header:
|
|
141
|
+
return False
|
|
142
|
+
|
|
143
|
+
# Parse Basic auth
|
|
144
|
+
try:
|
|
145
|
+
if not auth_header.startswith("Basic "):
|
|
146
|
+
return False
|
|
147
|
+
|
|
148
|
+
# Decode base64 credentials
|
|
149
|
+
encoded_credentials = auth_header[6:] # Remove 'Basic ' prefix
|
|
150
|
+
decoded_credentials = base64.b64decode(encoded_credentials).decode("utf-8")
|
|
151
|
+
username, password = decoded_credentials.split(":", 1)
|
|
152
|
+
|
|
153
|
+
# Verify credentials
|
|
154
|
+
return username == _http_auth_username and password == _http_auth_password
|
|
155
|
+
|
|
156
|
+
except Exception as e:
|
|
157
|
+
logging.warning(f"Authentication error: {e}")
|
|
158
|
+
return False
|
|
159
|
+
|
|
160
|
+
def _send_auth_required(self):
|
|
161
|
+
"""Send 401 Unauthorized response"""
|
|
162
|
+
self.send_response(401)
|
|
163
|
+
self.send_header(
|
|
164
|
+
"WWW-Authenticate", 'Basic realm="Redis Benchmarks Coordinator"'
|
|
165
|
+
)
|
|
166
|
+
self.send_header("Content-type", "application/json")
|
|
167
|
+
self.end_headers()
|
|
168
|
+
response = {
|
|
169
|
+
"error": "Authentication required",
|
|
170
|
+
"message": "Please provide valid credentials using Basic authentication",
|
|
171
|
+
}
|
|
172
|
+
self.wfile.write(json.dumps(response).encode())
|
|
173
|
+
|
|
174
|
+
def do_GET(self):
|
|
175
|
+
"""Handle GET requests"""
|
|
176
|
+
# Check authentication
|
|
177
|
+
if not self._authenticate():
|
|
178
|
+
self._send_auth_required()
|
|
179
|
+
return
|
|
180
|
+
|
|
181
|
+
parsed_path = urlparse(self.path)
|
|
182
|
+
|
|
183
|
+
if parsed_path.path == "/ping":
|
|
184
|
+
self.send_response(200)
|
|
185
|
+
self.send_header("Content-type", "application/json")
|
|
186
|
+
self.end_headers()
|
|
187
|
+
response = {
|
|
188
|
+
"status": "healthy",
|
|
189
|
+
"timestamp": datetime.datetime.utcnow().isoformat(),
|
|
190
|
+
"service": "redis-benchmarks-self-contained-coordinator",
|
|
191
|
+
}
|
|
192
|
+
self.wfile.write(json.dumps(response).encode())
|
|
193
|
+
else:
|
|
194
|
+
self.send_response(404)
|
|
195
|
+
self.send_header("Content-type", "application/json")
|
|
196
|
+
self.end_headers()
|
|
197
|
+
self.wfile.write(json.dumps({"error": "Not found"}).encode())
|
|
198
|
+
|
|
199
|
+
def do_POST(self):
|
|
200
|
+
"""Handle POST requests"""
|
|
201
|
+
# Check authentication
|
|
202
|
+
if not self._authenticate():
|
|
203
|
+
self._send_auth_required()
|
|
204
|
+
return
|
|
205
|
+
|
|
206
|
+
global _reset_queue_requested
|
|
207
|
+
|
|
208
|
+
parsed_path = urlparse(self.path)
|
|
209
|
+
|
|
210
|
+
if parsed_path.path == "/reset-queue":
|
|
211
|
+
try:
|
|
212
|
+
# Read request body
|
|
213
|
+
content_length = int(self.headers.get("Content-Length", 0))
|
|
214
|
+
if content_length > 0:
|
|
215
|
+
post_data = self.rfile.read(content_length)
|
|
216
|
+
try:
|
|
217
|
+
request_data = json.loads(post_data.decode())
|
|
218
|
+
except json.JSONDecodeError:
|
|
219
|
+
request_data = {}
|
|
220
|
+
else:
|
|
221
|
+
request_data = {}
|
|
222
|
+
|
|
223
|
+
# Set the reset flag
|
|
224
|
+
_reset_queue_requested = True
|
|
225
|
+
logging.info("Queue reset requested via HTTP endpoint")
|
|
226
|
+
|
|
227
|
+
self.send_response(200)
|
|
228
|
+
self.send_header("Content-type", "application/json")
|
|
229
|
+
self.end_headers()
|
|
230
|
+
response = {
|
|
231
|
+
"status": "success",
|
|
232
|
+
"message": "Queue reset requested",
|
|
233
|
+
"timestamp": datetime.datetime.utcnow().isoformat(),
|
|
234
|
+
}
|
|
235
|
+
self.wfile.write(json.dumps(response).encode())
|
|
236
|
+
|
|
237
|
+
except Exception as e:
|
|
238
|
+
logging.error(f"Error handling reset-queue request: {e}")
|
|
239
|
+
self.send_response(500)
|
|
240
|
+
self.send_header("Content-type", "application/json")
|
|
241
|
+
self.end_headers()
|
|
242
|
+
self.wfile.write(json.dumps({"error": str(e)}).encode())
|
|
243
|
+
else:
|
|
244
|
+
self.send_response(404)
|
|
245
|
+
self.send_header("Content-type", "application/json")
|
|
246
|
+
self.end_headers()
|
|
247
|
+
self.wfile.write(json.dumps({"error": "Not found"}).encode())
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def start_http_server(port=8080):
|
|
251
|
+
"""Start the HTTP server in a separate thread"""
|
|
252
|
+
|
|
253
|
+
def run_server():
|
|
254
|
+
try:
|
|
255
|
+
server = HTTPServer(("0.0.0.0", port), CoordinatorHTTPHandler)
|
|
256
|
+
logging.info(f"Starting HTTP server on port {port}")
|
|
257
|
+
logging.info(f"Available endpoints:")
|
|
258
|
+
logging.info(f" GET /ping - Health check")
|
|
259
|
+
logging.info(
|
|
260
|
+
f" POST /reset-queue - Reset pending streams and skip running tests"
|
|
261
|
+
)
|
|
262
|
+
server.serve_forever()
|
|
263
|
+
except Exception as e:
|
|
264
|
+
logging.error(f"HTTP server error: {e}")
|
|
265
|
+
|
|
266
|
+
server_thread = threading.Thread(target=run_server, daemon=True)
|
|
267
|
+
server_thread.start()
|
|
268
|
+
return server_thread
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
def cleanup_system_processes():
|
|
272
|
+
"""Clean up memtier processes and docker containers for exclusive hardware mode"""
|
|
273
|
+
global _exclusive_hardware
|
|
274
|
+
|
|
275
|
+
if not _exclusive_hardware:
|
|
276
|
+
return
|
|
277
|
+
|
|
278
|
+
logging.info("Exclusive hardware mode: Cleaning up system processes")
|
|
279
|
+
|
|
280
|
+
try:
|
|
281
|
+
# Kill all memtier_benchmark processes
|
|
282
|
+
logging.info("Killing all memtier_benchmark processes")
|
|
283
|
+
subprocess.run(["pkill", "-f", "memtier_benchmark"], check=False)
|
|
284
|
+
|
|
285
|
+
# Stop all docker containers
|
|
286
|
+
logging.info("Stopping all docker containers")
|
|
287
|
+
docker_client = docker.from_env()
|
|
288
|
+
containers = docker_client.containers.list()
|
|
289
|
+
for container in containers:
|
|
290
|
+
try:
|
|
291
|
+
logging.info(
|
|
292
|
+
f"Stopping container: {container.name} ({container.id[:12]})"
|
|
293
|
+
)
|
|
294
|
+
container.stop(timeout=10)
|
|
295
|
+
container.remove(force=True)
|
|
296
|
+
except Exception as e:
|
|
297
|
+
logging.warning(f"Error stopping container {container.id[:12]}: {e}")
|
|
298
|
+
|
|
299
|
+
# Wait a moment for cleanup to complete
|
|
300
|
+
time.sleep(2)
|
|
301
|
+
logging.info("System cleanup completed")
|
|
302
|
+
|
|
303
|
+
except Exception as e:
|
|
304
|
+
logging.error(f"Error during system cleanup: {e}")
|
|
305
|
+
|
|
111
306
|
|
|
112
307
|
def print_directory_logs(directory_path, description=""):
|
|
113
308
|
"""Print all .log files in a directory for debugging purposes."""
|
|
@@ -157,6 +352,8 @@ from redis_benchmarks_specification.__self_contained_coordinator__.docker import
|
|
|
157
352
|
|
|
158
353
|
|
|
159
354
|
def main():
|
|
355
|
+
global _exclusive_hardware, _http_auth_username, _http_auth_password
|
|
356
|
+
|
|
160
357
|
_, _, project_version = populate_with_poetry_data()
|
|
161
358
|
project_name = "redis-benchmarks-spec runner(self-contained)"
|
|
162
359
|
parser = create_self_contained_coordinator_args(
|
|
@@ -164,6 +361,23 @@ def main():
|
|
|
164
361
|
)
|
|
165
362
|
args = parser.parse_args()
|
|
166
363
|
|
|
364
|
+
# Set global exclusive hardware flag
|
|
365
|
+
_exclusive_hardware = args.exclusive_hardware
|
|
366
|
+
if _exclusive_hardware:
|
|
367
|
+
logging.info("Exclusive hardware mode enabled")
|
|
368
|
+
|
|
369
|
+
# Set HTTP authentication credentials and start server only if credentials are provided
|
|
370
|
+
_http_auth_username = args.http_auth_username
|
|
371
|
+
_http_auth_password = args.http_auth_password
|
|
372
|
+
|
|
373
|
+
if _http_auth_username and _http_auth_password:
|
|
374
|
+
logging.info(
|
|
375
|
+
"Starting HTTP server with authentication on port {}".format(args.http_port)
|
|
376
|
+
)
|
|
377
|
+
start_http_server(args.http_port)
|
|
378
|
+
else:
|
|
379
|
+
logging.info("HTTP server disabled - no authentication credentials provided")
|
|
380
|
+
|
|
167
381
|
if args.logname is not None:
|
|
168
382
|
print("Writting log to {}".format(args.logname))
|
|
169
383
|
logging.basicConfig(
|
|
@@ -750,7 +964,7 @@ def process_self_contained_coordinator_stream(
|
|
|
750
964
|
github_event_conn.expire(
|
|
751
965
|
stream_test_list_pending, REDIS_BINS_EXPIRE_SECS
|
|
752
966
|
)
|
|
753
|
-
logging.
|
|
967
|
+
logging.debug(
|
|
754
968
|
f"Added test named {test_name} to the pending test list in key {stream_test_list_pending}"
|
|
755
969
|
)
|
|
756
970
|
|
|
@@ -781,6 +995,22 @@ def process_self_contained_coordinator_stream(
|
|
|
781
995
|
)
|
|
782
996
|
|
|
783
997
|
for test_file in filtered_test_files:
|
|
998
|
+
# Check if queue reset was requested
|
|
999
|
+
global _reset_queue_requested
|
|
1000
|
+
if _reset_queue_requested:
|
|
1001
|
+
logging.info(
|
|
1002
|
+
"Queue reset requested. Skipping remaining tests and clearing queues."
|
|
1003
|
+
)
|
|
1004
|
+
# Clear all pending tests from the queue
|
|
1005
|
+
github_event_conn.delete(stream_test_list_pending)
|
|
1006
|
+
github_event_conn.delete(stream_test_list_running)
|
|
1007
|
+
logging.info("Cleared pending and running test queues")
|
|
1008
|
+
_reset_queue_requested = False
|
|
1009
|
+
break
|
|
1010
|
+
|
|
1011
|
+
# Clean up system processes if in exclusive hardware mode
|
|
1012
|
+
cleanup_system_processes()
|
|
1013
|
+
|
|
784
1014
|
redis_containers = []
|
|
785
1015
|
client_containers = []
|
|
786
1016
|
with open(test_file, "r") as stream:
|
|
@@ -794,8 +1024,8 @@ def process_self_contained_coordinator_stream(
|
|
|
794
1024
|
github_event_conn.expire(
|
|
795
1025
|
stream_test_list_running, REDIS_BINS_EXPIRE_SECS
|
|
796
1026
|
)
|
|
797
|
-
logging.
|
|
798
|
-
f"Added test named {test_name} to the
|
|
1027
|
+
logging.debug(
|
|
1028
|
+
f"Added test named {test_name} to the running test list in key {stream_test_list_running}"
|
|
799
1029
|
)
|
|
800
1030
|
(
|
|
801
1031
|
_,
|
|
@@ -1250,6 +1480,23 @@ def process_self_contained_coordinator_stream(
|
|
|
1250
1480
|
results_dict = post_process_vector_db(
|
|
1251
1481
|
temporary_dir_client
|
|
1252
1482
|
)
|
|
1483
|
+
|
|
1484
|
+
# Validate benchmark metrics for vector-db-benchmark
|
|
1485
|
+
is_valid, validation_error = (
|
|
1486
|
+
validate_benchmark_metrics(
|
|
1487
|
+
results_dict,
|
|
1488
|
+
test_name,
|
|
1489
|
+
benchmark_config,
|
|
1490
|
+
default_metrics,
|
|
1491
|
+
)
|
|
1492
|
+
)
|
|
1493
|
+
if not is_valid:
|
|
1494
|
+
logging.error(
|
|
1495
|
+
f"Test {test_name} failed metric validation: {validation_error}"
|
|
1496
|
+
)
|
|
1497
|
+
test_result = False
|
|
1498
|
+
failed_tests += 1
|
|
1499
|
+
continue
|
|
1253
1500
|
else:
|
|
1254
1501
|
post_process_benchmark_results(
|
|
1255
1502
|
benchmark_tool,
|
|
@@ -1276,6 +1523,24 @@ def process_self_contained_coordinator_stream(
|
|
|
1276
1523
|
"r",
|
|
1277
1524
|
) as json_file:
|
|
1278
1525
|
results_dict = json.load(json_file)
|
|
1526
|
+
|
|
1527
|
+
# Validate benchmark metrics
|
|
1528
|
+
is_valid, validation_error = (
|
|
1529
|
+
validate_benchmark_metrics(
|
|
1530
|
+
results_dict,
|
|
1531
|
+
test_name,
|
|
1532
|
+
benchmark_config,
|
|
1533
|
+
default_metrics,
|
|
1534
|
+
)
|
|
1535
|
+
)
|
|
1536
|
+
if not is_valid:
|
|
1537
|
+
logging.error(
|
|
1538
|
+
f"Test {test_name} failed metric validation: {validation_error}"
|
|
1539
|
+
)
|
|
1540
|
+
test_result = False
|
|
1541
|
+
failed_tests += 1
|
|
1542
|
+
continue
|
|
1543
|
+
|
|
1279
1544
|
print_results_table_stdout(
|
|
1280
1545
|
benchmark_config,
|
|
1281
1546
|
default_metrics,
|
|
@@ -1430,6 +1695,9 @@ def process_self_contained_coordinator_stream(
|
|
|
1430
1695
|
|
|
1431
1696
|
overall_result &= test_result
|
|
1432
1697
|
|
|
1698
|
+
# Clean up system processes after test completion if in exclusive hardware mode
|
|
1699
|
+
cleanup_system_processes()
|
|
1700
|
+
|
|
1433
1701
|
github_event_conn.lrem(stream_test_list_running, 1, test_name)
|
|
1434
1702
|
github_event_conn.lpush(stream_test_list_completed, test_name)
|
|
1435
1703
|
github_event_conn.expire(
|
|
@@ -1592,7 +1860,7 @@ def process_self_contained_coordinator_stream(
|
|
|
1592
1860
|
e.__str__()
|
|
1593
1861
|
)
|
|
1594
1862
|
)
|
|
1595
|
-
logging.
|
|
1863
|
+
logging.debug(
|
|
1596
1864
|
f"Added test named {test_name} to the completed test list in key {stream_test_list_completed}"
|
|
1597
1865
|
)
|
|
1598
1866
|
else:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: redis-benchmarks-specification
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.330
|
|
4
4
|
Summary: The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute.
|
|
5
5
|
Author: filipecosta90
|
|
6
6
|
Author-email: filipecosta.90@gmail.com
|
|
@@ -15,9 +15,10 @@ redis_benchmarks_specification/__common__/builder_schema.py,sha256=kfDpRIk7NkJrb
|
|
|
15
15
|
redis_benchmarks_specification/__common__/env.py,sha256=kvJ8Ll-fvI_Tc0vynrzUEr22TqnJizzvJ4Lu9RjNr_M,3119
|
|
16
16
|
redis_benchmarks_specification/__common__/github.py,sha256=9TZtnISsSgXTSAN_VQejo5YRPDPhlU0gjxgKGPw_sP8,10699
|
|
17
17
|
redis_benchmarks_specification/__common__/package.py,sha256=4uVt1BAZ999LV2rZkq--Tk6otAVIf9YR3g3KGeUpiW4,834
|
|
18
|
-
redis_benchmarks_specification/__common__/runner.py,sha256=
|
|
18
|
+
redis_benchmarks_specification/__common__/runner.py,sha256=TKMUFJ3nLSfmSU7P_ok9oM5-pI4L4tFxsWLUWaUHhbI,16733
|
|
19
19
|
redis_benchmarks_specification/__common__/spec.py,sha256=D_SN48wg6NMthW_-OS1H5bydSDiuZpfd4WPPj7Vfwmc,5760
|
|
20
|
-
redis_benchmarks_specification/__common__/
|
|
20
|
+
redis_benchmarks_specification/__common__/suppress_warnings.py,sha256=xpOjJ_piGYWlGq9ITr-ZwSCl2GpreA9juZIBao4fDRs,691
|
|
21
|
+
redis_benchmarks_specification/__common__/timeseries.py,sha256=P0tbmH7leEMQwvqlr4lYZgr_I6EY3chh1Kf7XWe5fDQ,54048
|
|
21
22
|
redis_benchmarks_specification/__compare__/__init__.py,sha256=DtBXRp0Q01XgCFmY-1OIePMyyYihVNAjZ1Y8zwqSDN0,101
|
|
22
23
|
redis_benchmarks_specification/__compare__/args.py,sha256=CNtA7pI9CJDTBJPGL2pNVfis7VDdxLautwRyka7oUCI,8911
|
|
23
24
|
redis_benchmarks_specification/__compare__/compare.py,sha256=_AbuV3FZxtUZIdq4qq24LNzPNIdtQQaqrk8bUjn9blk,84327
|
|
@@ -25,9 +26,9 @@ redis_benchmarks_specification/__init__.py,sha256=YQIEx2sLPPA0JR9OuCuMNMNtm-f_gq
|
|
|
25
26
|
redis_benchmarks_specification/__runner__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
|
|
26
27
|
redis_benchmarks_specification/__runner__/args.py,sha256=K3VGmBC0-9lSv9H6VDp0N-6FGMWvc_4H0pG_TOXN5u8,11312
|
|
27
28
|
redis_benchmarks_specification/__runner__/remote_profiling.py,sha256=R7obNQju8mmY9oKkcndjI4aAuxi84OCLhDSqqaYu1SU,18610
|
|
28
|
-
redis_benchmarks_specification/__runner__/runner.py,sha256
|
|
29
|
+
redis_benchmarks_specification/__runner__/runner.py,sha256=-BDFxOLgkFe4LvVX1FnqmuszuyRMR8AJZW0SvPX0utw,155496
|
|
29
30
|
redis_benchmarks_specification/__self_contained_coordinator__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
|
|
30
|
-
redis_benchmarks_specification/__self_contained_coordinator__/args.py,sha256=
|
|
31
|
+
redis_benchmarks_specification/__self_contained_coordinator__/args.py,sha256=1LePhRkDsoMPFclM_DoXBIoMBN8zcVoQMnm9wTK5Uqw,6961
|
|
31
32
|
redis_benchmarks_specification/__self_contained_coordinator__/artifacts.py,sha256=OVHqJzDgeSSRfUSiKp1ZTAVv14PvSbk-5yJsAAoUfpw,936
|
|
32
33
|
redis_benchmarks_specification/__self_contained_coordinator__/build_info.py,sha256=vlg8H8Rxu2falW8xp1GvL1SV1fyBguSbz6Apxc7A2yM,2282
|
|
33
34
|
redis_benchmarks_specification/__self_contained_coordinator__/clients.py,sha256=EL1V4-i-tTav1mcF_CUosqPF3Q1qi9BZL0zFajEk70c,1878
|
|
@@ -35,8 +36,8 @@ redis_benchmarks_specification/__self_contained_coordinator__/cpuset.py,sha256=s
|
|
|
35
36
|
redis_benchmarks_specification/__self_contained_coordinator__/docker.py,sha256=09SyAfqlzs1KG9ZAajClNWtiNk4Jqzd--4-m3n1rLjU,3156
|
|
36
37
|
redis_benchmarks_specification/__self_contained_coordinator__/post_processing.py,sha256=sVLKNnWdAqYY9DjVdqRC5tDaIrVSaI3Ca7w8-DQ-LRM,776
|
|
37
38
|
redis_benchmarks_specification/__self_contained_coordinator__/prepopulation.py,sha256=1UeFr2T1ZQBcHCSd4W1ZtaWgXyFPfjLyDi_DgDc1eTA,2957
|
|
38
|
-
redis_benchmarks_specification/__self_contained_coordinator__/runners.py,sha256=
|
|
39
|
-
redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=
|
|
39
|
+
redis_benchmarks_specification/__self_contained_coordinator__/runners.py,sha256=F11zO_ILnpmiVwTeCQnP5nDHQk3kNnajPftwKsbhlXE,30209
|
|
40
|
+
redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=9pwluITlVlucGhzQoWyJ8HVRpENgwJiqdaT6z300BBI,94834
|
|
40
41
|
redis_benchmarks_specification/__setups__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
41
42
|
redis_benchmarks_specification/__setups__/topologies.py,sha256=xQ1IJkcTji_ZjLiJd3vOxZpvbNtBLZw9cPkw5hGJKHU,481
|
|
42
43
|
redis_benchmarks_specification/__spec__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
|
|
@@ -281,8 +282,8 @@ redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-st
|
|
|
281
282
|
redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-storage-1k-sessions.yml,sha256=2egtIxPxCze2jlbAfgsk4v9JSQHNMoPLbDWFEW8olDg,7006
|
|
282
283
|
redis_benchmarks_specification/test-suites/template.txt,sha256=ezqGiRPOvuSDO0iG7GEf-AGXNfHbgXI89_G0RUEzL88,481
|
|
283
284
|
redis_benchmarks_specification/vector-search-test-suites/vector_db_benchmark_test.yml,sha256=PD7ow-k4Ll2BkhEC3aIqiaCZt8Hc4aJIp96Lw3J3mcI,791
|
|
284
|
-
redis_benchmarks_specification-0.1.
|
|
285
|
-
redis_benchmarks_specification-0.1.
|
|
286
|
-
redis_benchmarks_specification-0.1.
|
|
287
|
-
redis_benchmarks_specification-0.1.
|
|
288
|
-
redis_benchmarks_specification-0.1.
|
|
285
|
+
redis_benchmarks_specification-0.1.330.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
286
|
+
redis_benchmarks_specification-0.1.330.dist-info/METADATA,sha256=pvr1eZrXi04EZXbtHfLKbdYc34UFuKSP35IB70XEGKo,22768
|
|
287
|
+
redis_benchmarks_specification-0.1.330.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
288
|
+
redis_benchmarks_specification-0.1.330.dist-info/entry_points.txt,sha256=x5WBXCZsnDRTZxV7SBGmC65L2k-ygdDOxV8vuKN00Nk,715
|
|
289
|
+
redis_benchmarks_specification-0.1.330.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|