redis-benchmarks-specification 0.1.331__py3-none-any.whl → 0.1.333__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of redis-benchmarks-specification might be problematic. Click here for more details.
- redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py +107 -6
- {redis_benchmarks_specification-0.1.331.dist-info → redis_benchmarks_specification-0.1.333.dist-info}/METADATA +1 -1
- {redis_benchmarks_specification-0.1.331.dist-info → redis_benchmarks_specification-0.1.333.dist-info}/RECORD +6 -6
- {redis_benchmarks_specification-0.1.331.dist-info → redis_benchmarks_specification-0.1.333.dist-info}/LICENSE +0 -0
- {redis_benchmarks_specification-0.1.331.dist-info → redis_benchmarks_specification-0.1.333.dist-info}/WHEEL +0 -0
- {redis_benchmarks_specification-0.1.331.dist-info → redis_benchmarks_specification-0.1.333.dist-info}/entry_points.txt +0 -0
|
@@ -122,6 +122,7 @@ _reset_queue_requested = False
|
|
|
122
122
|
_exclusive_hardware = False
|
|
123
123
|
_http_auth_username = None
|
|
124
124
|
_http_auth_password = None
|
|
125
|
+
_flush_timestamp = None
|
|
125
126
|
|
|
126
127
|
|
|
127
128
|
class CoordinatorHTTPHandler(BaseHTTPRequestHandler):
|
|
@@ -203,7 +204,7 @@ class CoordinatorHTTPHandler(BaseHTTPRequestHandler):
|
|
|
203
204
|
self._send_auth_required()
|
|
204
205
|
return
|
|
205
206
|
|
|
206
|
-
global _reset_queue_requested
|
|
207
|
+
global _reset_queue_requested, _flush_timestamp
|
|
207
208
|
|
|
208
209
|
parsed_path = urlparse(self.path)
|
|
209
210
|
|
|
@@ -240,12 +241,86 @@ class CoordinatorHTTPHandler(BaseHTTPRequestHandler):
|
|
|
240
241
|
self.send_header("Content-type", "application/json")
|
|
241
242
|
self.end_headers()
|
|
242
243
|
self.wfile.write(json.dumps({"error": str(e)}).encode())
|
|
244
|
+
|
|
245
|
+
elif parsed_path.path == "/flush":
|
|
246
|
+
try:
|
|
247
|
+
# Read request body (optional)
|
|
248
|
+
content_length = int(self.headers.get("Content-Length", 0))
|
|
249
|
+
if content_length > 0:
|
|
250
|
+
post_data = self.rfile.read(content_length)
|
|
251
|
+
try:
|
|
252
|
+
request_data = json.loads(post_data.decode())
|
|
253
|
+
except json.JSONDecodeError:
|
|
254
|
+
request_data = {}
|
|
255
|
+
else:
|
|
256
|
+
request_data = {}
|
|
257
|
+
|
|
258
|
+
# Record flush timestamp
|
|
259
|
+
flush_time = datetime.datetime.utcnow()
|
|
260
|
+
_flush_timestamp = flush_time
|
|
261
|
+
|
|
262
|
+
logging.info("Flush requested via HTTP endpoint - stopping all containers and processes")
|
|
263
|
+
|
|
264
|
+
# Perform flush cleanup
|
|
265
|
+
self._perform_flush_cleanup()
|
|
266
|
+
|
|
267
|
+
self.send_response(200)
|
|
268
|
+
self.send_header("Content-type", "application/json")
|
|
269
|
+
self.end_headers()
|
|
270
|
+
response = {
|
|
271
|
+
"status": "success",
|
|
272
|
+
"message": "Flush completed - all containers stopped and processes killed",
|
|
273
|
+
"flush_timestamp": flush_time.isoformat(),
|
|
274
|
+
"timestamp": datetime.datetime.utcnow().isoformat()
|
|
275
|
+
}
|
|
276
|
+
self.wfile.write(json.dumps(response).encode())
|
|
277
|
+
|
|
278
|
+
except Exception as e:
|
|
279
|
+
logging.error(f"Error during flush operation: {e}")
|
|
280
|
+
self.send_response(500)
|
|
281
|
+
self.send_header("Content-type", "application/json")
|
|
282
|
+
self.end_headers()
|
|
283
|
+
response = {
|
|
284
|
+
"status": "error",
|
|
285
|
+
"message": f"Flush failed: {str(e)}",
|
|
286
|
+
"timestamp": datetime.datetime.utcnow().isoformat()
|
|
287
|
+
}
|
|
288
|
+
self.wfile.write(json.dumps(response).encode())
|
|
289
|
+
|
|
243
290
|
else:
|
|
244
291
|
self.send_response(404)
|
|
245
292
|
self.send_header("Content-type", "application/json")
|
|
246
293
|
self.end_headers()
|
|
247
294
|
self.wfile.write(json.dumps({"error": "Not found"}).encode())
|
|
248
295
|
|
|
296
|
+
def _perform_flush_cleanup(self):
|
|
297
|
+
"""Perform flush cleanup: stop all containers and kill memtier processes"""
|
|
298
|
+
import subprocess
|
|
299
|
+
|
|
300
|
+
# Kill all memtier processes
|
|
301
|
+
try:
|
|
302
|
+
logging.info("Killing all memtier_benchmark processes")
|
|
303
|
+
subprocess.run(["pkill", "-f", "memtier_benchmark"], check=False)
|
|
304
|
+
subprocess.run(["pkill", "-f", "memtier"], check=False)
|
|
305
|
+
except Exception as e:
|
|
306
|
+
logging.warning(f"Error killing memtier processes: {e}")
|
|
307
|
+
|
|
308
|
+
# Stop all Docker containers
|
|
309
|
+
try:
|
|
310
|
+
logging.info("Stopping all Docker containers")
|
|
311
|
+
client = docker.from_env()
|
|
312
|
+
containers = client.containers.list()
|
|
313
|
+
for container in containers:
|
|
314
|
+
try:
|
|
315
|
+
logging.info(f"Stopping container: {container.name} ({container.id[:12]})")
|
|
316
|
+
container.stop(timeout=5)
|
|
317
|
+
except Exception as e:
|
|
318
|
+
logging.warning(f"Error stopping container {container.name}: {e}")
|
|
319
|
+
except Exception as e:
|
|
320
|
+
logging.warning(f"Error accessing Docker client: {e}")
|
|
321
|
+
|
|
322
|
+
logging.info("Flush cleanup completed")
|
|
323
|
+
|
|
249
324
|
|
|
250
325
|
def start_http_server(port=8080):
|
|
251
326
|
"""Start the HTTP server in a separate thread"""
|
|
@@ -259,6 +334,9 @@ def start_http_server(port=8080):
|
|
|
259
334
|
logging.info(
|
|
260
335
|
f" POST /reset-queue - Reset pending streams and skip running tests"
|
|
261
336
|
)
|
|
337
|
+
logging.info(
|
|
338
|
+
f" POST /flush - Stop all containers and processes, ignore work before flush time"
|
|
339
|
+
)
|
|
262
340
|
server.serve_forever()
|
|
263
341
|
except Exception as e:
|
|
264
342
|
logging.error(f"HTTP server error: {e}")
|
|
@@ -784,6 +862,19 @@ def process_self_contained_coordinator_stream(
|
|
|
784
862
|
git_timestamp_ms,
|
|
785
863
|
run_arch,
|
|
786
864
|
) = extract_build_info_from_streamdata(testDetails)
|
|
865
|
+
|
|
866
|
+
# Check if this work should be ignored due to flush
|
|
867
|
+
global _flush_timestamp
|
|
868
|
+
if _flush_timestamp is not None and use_git_timestamp and git_timestamp_ms is not None:
|
|
869
|
+
# Convert flush timestamp to milliseconds for comparison
|
|
870
|
+
flush_timestamp_ms = int(_flush_timestamp.timestamp() * 1000)
|
|
871
|
+
if git_timestamp_ms < flush_timestamp_ms:
|
|
872
|
+
logging.info(
|
|
873
|
+
f"Ignoring work with git_timestamp_ms {git_timestamp_ms} "
|
|
874
|
+
f"(before flush timestamp {flush_timestamp_ms}). Stream id: {stream_id}"
|
|
875
|
+
)
|
|
876
|
+
return stream_id, False, 0
|
|
877
|
+
|
|
787
878
|
tf_github_org = default_github_org
|
|
788
879
|
if b"github_org" in testDetails:
|
|
789
880
|
tf_github_org = testDetails[b"github_org"].decode()
|
|
@@ -954,6 +1045,12 @@ def process_self_contained_coordinator_stream(
|
|
|
954
1045
|
command_regexp,
|
|
955
1046
|
)
|
|
956
1047
|
|
|
1048
|
+
logging.info(f"Adding {len(filtered_test_files)} tests to pending test list")
|
|
1049
|
+
|
|
1050
|
+
# Use pipeline for efficient bulk operations
|
|
1051
|
+
pipeline = github_event_conn.pipeline()
|
|
1052
|
+
test_names_added = []
|
|
1053
|
+
|
|
957
1054
|
for test_file in filtered_test_files:
|
|
958
1055
|
with open(test_file, "r") as stream:
|
|
959
1056
|
(
|
|
@@ -961,14 +1058,18 @@ def process_self_contained_coordinator_stream(
|
|
|
961
1058
|
benchmark_config,
|
|
962
1059
|
test_name,
|
|
963
1060
|
) = get_final_benchmark_config(None, None, stream, "")
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
stream_test_list_pending, REDIS_BINS_EXPIRE_SECS
|
|
967
|
-
)
|
|
1061
|
+
pipeline.lpush(stream_test_list_pending, test_name)
|
|
1062
|
+
test_names_added.append(test_name)
|
|
968
1063
|
logging.debug(
|
|
969
|
-
f"
|
|
1064
|
+
f"Queued test named {test_name} for addition to pending test list"
|
|
970
1065
|
)
|
|
971
1066
|
|
|
1067
|
+
# Set expiration and execute pipeline
|
|
1068
|
+
pipeline.expire(stream_test_list_pending, REDIS_BINS_EXPIRE_SECS)
|
|
1069
|
+
pipeline.execute()
|
|
1070
|
+
|
|
1071
|
+
logging.info(f"Successfully added {len(test_names_added)} tests to pending test list in key {stream_test_list_pending}")
|
|
1072
|
+
|
|
972
1073
|
pending_tests = len(filtered_test_files)
|
|
973
1074
|
failed_tests = 0
|
|
974
1075
|
benchmark_suite_start_datetime = datetime.datetime.utcnow()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: redis-benchmarks-specification
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.333
|
|
4
4
|
Summary: The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute.
|
|
5
5
|
Author: filipecosta90
|
|
6
6
|
Author-email: filipecosta.90@gmail.com
|
|
@@ -37,7 +37,7 @@ redis_benchmarks_specification/__self_contained_coordinator__/docker.py,sha256=0
|
|
|
37
37
|
redis_benchmarks_specification/__self_contained_coordinator__/post_processing.py,sha256=sVLKNnWdAqYY9DjVdqRC5tDaIrVSaI3Ca7w8-DQ-LRM,776
|
|
38
38
|
redis_benchmarks_specification/__self_contained_coordinator__/prepopulation.py,sha256=1UeFr2T1ZQBcHCSd4W1ZtaWgXyFPfjLyDi_DgDc1eTA,2957
|
|
39
39
|
redis_benchmarks_specification/__self_contained_coordinator__/runners.py,sha256=F11zO_ILnpmiVwTeCQnP5nDHQk3kNnajPftwKsbhlXE,30209
|
|
40
|
-
redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=
|
|
40
|
+
redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=7MSys1oasTMTmOm_6EuGtoWft19L9LR2OkQLrlnB2w0,99326
|
|
41
41
|
redis_benchmarks_specification/__setups__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
42
42
|
redis_benchmarks_specification/__setups__/topologies.py,sha256=xQ1IJkcTji_ZjLiJd3vOxZpvbNtBLZw9cPkw5hGJKHU,481
|
|
43
43
|
redis_benchmarks_specification/__spec__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
|
|
@@ -282,8 +282,8 @@ redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-st
|
|
|
282
282
|
redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-storage-1k-sessions.yml,sha256=2egtIxPxCze2jlbAfgsk4v9JSQHNMoPLbDWFEW8olDg,7006
|
|
283
283
|
redis_benchmarks_specification/test-suites/template.txt,sha256=ezqGiRPOvuSDO0iG7GEf-AGXNfHbgXI89_G0RUEzL88,481
|
|
284
284
|
redis_benchmarks_specification/vector-search-test-suites/vector_db_benchmark_test.yml,sha256=PD7ow-k4Ll2BkhEC3aIqiaCZt8Hc4aJIp96Lw3J3mcI,791
|
|
285
|
-
redis_benchmarks_specification-0.1.
|
|
286
|
-
redis_benchmarks_specification-0.1.
|
|
287
|
-
redis_benchmarks_specification-0.1.
|
|
288
|
-
redis_benchmarks_specification-0.1.
|
|
289
|
-
redis_benchmarks_specification-0.1.
|
|
285
|
+
redis_benchmarks_specification-0.1.333.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
286
|
+
redis_benchmarks_specification-0.1.333.dist-info/METADATA,sha256=r3xqLRQZADSrPCQNjXzbeBh1bEmx56jXzEo0KFgGV2c,22768
|
|
287
|
+
redis_benchmarks_specification-0.1.333.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
288
|
+
redis_benchmarks_specification-0.1.333.dist-info/entry_points.txt,sha256=x5WBXCZsnDRTZxV7SBGmC65L2k-ygdDOxV8vuKN00Nk,715
|
|
289
|
+
redis_benchmarks_specification-0.1.333.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|