redis-benchmarks-specification 0.1.70__py3-none-any.whl → 0.1.73__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of redis-benchmarks-specification might be problematic. Click here for more details.
- redis_benchmarks_specification/__builder__/builder.py +2 -0
- redis_benchmarks_specification/__cli__/args.py +6 -0
- redis_benchmarks_specification/__cli__/cli.py +5 -1
- redis_benchmarks_specification/__compare__/__init__.py +5 -0
- redis_benchmarks_specification/__compare__/args.py +139 -0
- redis_benchmarks_specification/__compare__/compare.py +1153 -0
- redis_benchmarks_specification/__runner__/runner.py +120 -59
- redis_benchmarks_specification/__self_contained_coordinator__/args.py +3 -0
- redis_benchmarks_specification/__self_contained_coordinator__/build_info.py +7 -0
- redis_benchmarks_specification/__self_contained_coordinator__/runners.py +1 -0
- redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py +472 -418
- redis_benchmarks_specification/test-suites/create-re-string.py +286 -0
- redis_benchmarks_specification/test-suites/generate.py +108 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-hash-hexists.yml +1 -1
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-hash-hincrby.yml +2 -2
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-200KiB-values.yml +37 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-20KiB-values.yml +37 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-2MB-values.yml +37 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-decr.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-get-200KiB.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-get-20KiB.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-get-2MB.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-100-elements-zrangebyscore-all-elements-long-scores.yml +2 -2
- redis_benchmarks_specification/test-suites/my-new-test.yml +16 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-100Kkeys-load-string200c-with-20KiB-values-pipeline-10.yml +20 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-100Kkeys-load-string200c-with-20KiB-values.yml +20 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-100Kkeys-load-string50c-with-20KiB-values-pipeline-10.yml +20 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-100Kkeys-load-string50c-with-20KiB-values.yml +20 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-100Kkeys-string-setget200c-20KiB-pipeline-10.yml +26 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-100Kkeys-string-setget200c-20KiB.yml +26 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-100Kkeys-string-setget50c-20KiB-pipeline-10.yml +26 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-100Kkeys-string-setget50c-20KiB.yml +26 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-load-string200c-with-100B-values-pipeline-10.yml +20 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-load-string200c-with-100B-values.yml +20 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-load-string200c-with-1KiB-values-pipeline-10.yml +20 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-load-string200c-with-1KiB-values.yml +20 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-load-string50c-with-100B-values-pipeline-10.yml +20 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-load-string50c-with-100B-values.yml +20 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-load-string50c-with-1KiB-values-pipeline-10.yml +20 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-load-string50c-with-1KiB-values.yml +20 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-string-mget-1KiB.yml +27 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-string-setget200c-100B-pipeline-10.yml +26 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-string-setget200c-100B.yml +26 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-string-setget200c-1KiB-pipeline-10.yml +26 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-string-setget200c-1KiB.yml +26 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-string-setget50c-100B-pipeline-10.yml +26 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-string-setget50c-100B.yml +26 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-string-setget50c-1KiB-pipeline-10.yml +26 -0
- redis_benchmarks_specification/test-suites/string/memtier_benchmark-1Mkeys-string-setget50c-1KiB.yml +26 -0
- redis_benchmarks_specification/test-suites/template.txt +16 -0
- {redis_benchmarks_specification-0.1.70.dist-info → redis_benchmarks_specification-0.1.73.dist-info}/METADATA +3 -4
- {redis_benchmarks_specification-0.1.70.dist-info → redis_benchmarks_specification-0.1.73.dist-info}/RECORD +55 -16
- {redis_benchmarks_specification-0.1.70.dist-info → redis_benchmarks_specification-0.1.73.dist-info}/WHEEL +1 -1
- {redis_benchmarks_specification-0.1.70.dist-info → redis_benchmarks_specification-0.1.73.dist-info}/entry_points.txt +1 -0
- {redis_benchmarks_specification-0.1.70.dist-info → redis_benchmarks_specification-0.1.73.dist-info}/LICENSE +0 -0
|
@@ -214,6 +214,10 @@ def main():
|
|
|
214
214
|
consumer_pos = args.consumer_pos
|
|
215
215
|
logging.info("Consumer pos {}".format(consumer_pos))
|
|
216
216
|
|
|
217
|
+
# Arch
|
|
218
|
+
arch = args.arch
|
|
219
|
+
logging.info("Running for arch: {}".format(arch))
|
|
220
|
+
|
|
217
221
|
# Docker air gap usage
|
|
218
222
|
docker_air_gap = args.docker_air_gap
|
|
219
223
|
if docker_air_gap:
|
|
@@ -264,6 +268,7 @@ def main():
|
|
|
264
268
|
docker_air_gap,
|
|
265
269
|
override_memtier_test_time,
|
|
266
270
|
default_metrics,
|
|
271
|
+
arch,
|
|
267
272
|
)
|
|
268
273
|
|
|
269
274
|
|
|
@@ -286,6 +291,7 @@ def self_contained_coordinator_blocking_read(
|
|
|
286
291
|
docker_air_gap=False,
|
|
287
292
|
override_test_time=None,
|
|
288
293
|
default_metrics=None,
|
|
294
|
+
arch="amd64",
|
|
289
295
|
):
|
|
290
296
|
num_process_streams = 0
|
|
291
297
|
num_process_test_suites = 0
|
|
@@ -331,6 +337,7 @@ def self_contained_coordinator_blocking_read(
|
|
|
331
337
|
"defaults.yml",
|
|
332
338
|
None,
|
|
333
339
|
default_metrics,
|
|
340
|
+
arch,
|
|
334
341
|
)
|
|
335
342
|
num_process_streams = num_process_streams + 1
|
|
336
343
|
num_process_test_suites = num_process_test_suites + total_test_suite_runs
|
|
@@ -402,6 +409,7 @@ def process_self_contained_coordinator_stream(
|
|
|
402
409
|
defaults_filename="defaults.yml",
|
|
403
410
|
override_test_time=None,
|
|
404
411
|
default_metrics=[],
|
|
412
|
+
arch="amd64",
|
|
405
413
|
):
|
|
406
414
|
stream_id = "n/a"
|
|
407
415
|
overall_result = False
|
|
@@ -422,499 +430,545 @@ def process_self_contained_coordinator_stream(
|
|
|
422
430
|
run_image,
|
|
423
431
|
use_git_timestamp,
|
|
424
432
|
git_timestamp_ms,
|
|
433
|
+
run_arch,
|
|
425
434
|
) = extract_build_info_from_streamdata(testDetails)
|
|
426
435
|
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
436
|
+
skip_test = False
|
|
437
|
+
if b"platform" in testDetails:
|
|
438
|
+
platform = testDetails[b"platform"]
|
|
439
|
+
if running_platform != platform:
|
|
440
|
+
skip_test = True
|
|
441
|
+
logging.info(
|
|
442
|
+
"skipping stream_id {} given plaform {}!={}".format(
|
|
443
|
+
stream_id, running_platform, platform
|
|
444
|
+
)
|
|
445
|
+
)
|
|
446
|
+
|
|
447
|
+
if run_arch != arch:
|
|
448
|
+
skip_test = True
|
|
431
449
|
logging.info(
|
|
432
|
-
"
|
|
433
|
-
|
|
434
|
-
airgap_docker_image_bin = conn.get(airgap_key)
|
|
435
|
-
images_loaded = docker_client.images.load(airgap_docker_image_bin)
|
|
436
|
-
logging.info("Successfully loaded images {}".format(images_loaded))
|
|
437
|
-
|
|
438
|
-
for test_file in testsuite_spec_files:
|
|
439
|
-
if defaults_filename in test_file:
|
|
440
|
-
continue
|
|
441
|
-
redis_containers = []
|
|
442
|
-
client_containers = []
|
|
443
|
-
|
|
444
|
-
with open(test_file, "r") as stream:
|
|
445
|
-
result, benchmark_config, test_name = get_final_benchmark_config(
|
|
446
|
-
None, stream, ""
|
|
450
|
+
"skipping stream_id {} given arch {}!={}".format(
|
|
451
|
+
stream_id, run_arch, arch
|
|
447
452
|
)
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
+
)
|
|
454
|
+
|
|
455
|
+
if skip_test is False:
|
|
456
|
+
overall_result = True
|
|
457
|
+
profiler_dashboard_links = []
|
|
458
|
+
if docker_air_gap:
|
|
459
|
+
airgap_key = "docker:air-gap:{}".format(run_image)
|
|
460
|
+
logging.info(
|
|
461
|
+
"Restoring docker image: {} from {}".format(
|
|
462
|
+
run_image, airgap_key
|
|
453
463
|
)
|
|
464
|
+
)
|
|
465
|
+
airgap_docker_image_bin = conn.get(airgap_key)
|
|
466
|
+
images_loaded = docker_client.images.load(airgap_docker_image_bin)
|
|
467
|
+
logging.info("Successfully loaded images {}".format(images_loaded))
|
|
468
|
+
|
|
469
|
+
for test_file in testsuite_spec_files:
|
|
470
|
+
if defaults_filename in test_file:
|
|
454
471
|
continue
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
if build_variant_name not in build_variants:
|
|
472
|
+
redis_containers = []
|
|
473
|
+
client_containers = []
|
|
474
|
+
|
|
475
|
+
with open(test_file, "r") as stream:
|
|
476
|
+
(
|
|
477
|
+
result,
|
|
478
|
+
benchmark_config,
|
|
479
|
+
test_name,
|
|
480
|
+
) = get_final_benchmark_config(None, stream, "")
|
|
481
|
+
if result is False:
|
|
466
482
|
logging.error(
|
|
467
|
-
"Skipping {} given
|
|
468
|
-
|
|
483
|
+
"Skipping {} given there were errors while calling get_final_benchmark_config()".format(
|
|
484
|
+
test_file
|
|
469
485
|
)
|
|
470
486
|
)
|
|
471
487
|
continue
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
488
|
+
(
|
|
489
|
+
_,
|
|
490
|
+
_,
|
|
491
|
+
redis_configuration_parameters,
|
|
492
|
+
_,
|
|
493
|
+
_,
|
|
494
|
+
) = extract_redis_dbconfig_parameters(
|
|
495
|
+
benchmark_config, "dbconfig"
|
|
496
|
+
)
|
|
497
|
+
build_variants = extract_build_variant_variations(
|
|
498
|
+
benchmark_config
|
|
499
|
+
)
|
|
500
|
+
if build_variants is not None:
|
|
501
|
+
logging.info("Detected build variant filter")
|
|
502
|
+
if build_variant_name not in build_variants:
|
|
503
|
+
logging.info(
|
|
504
|
+
"Skipping {} given it's not part of build-variants for this test-suite {}".format(
|
|
505
|
+
build_variant_name, build_variants
|
|
506
|
+
)
|
|
476
507
|
)
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
ceil_db_cpu_limit = extract_db_cpu_limit(
|
|
484
|
-
topologies_map, topology_spec_name
|
|
485
|
-
)
|
|
486
|
-
temporary_dir = tempfile.mkdtemp(dir=home)
|
|
487
|
-
temporary_dir_client = tempfile.mkdtemp(dir=home)
|
|
488
|
-
logging.info(
|
|
489
|
-
"Using local temporary dir to persist redis build artifacts. Path: {}".format(
|
|
490
|
-
temporary_dir
|
|
508
|
+
continue
|
|
509
|
+
else:
|
|
510
|
+
logging.info(
|
|
511
|
+
"Running build variant {} given it's present on the build-variants spec {}".format(
|
|
512
|
+
build_variant_name, build_variants
|
|
513
|
+
)
|
|
491
514
|
)
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
515
|
+
for topology_spec_name in benchmark_config["redis-topologies"]:
|
|
516
|
+
test_result = False
|
|
517
|
+
redis_container = None
|
|
518
|
+
try:
|
|
519
|
+
current_cpu_pos = cpuset_start_pos
|
|
520
|
+
ceil_db_cpu_limit = extract_db_cpu_limit(
|
|
521
|
+
topologies_map, topology_spec_name
|
|
496
522
|
)
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
tf_triggering_env = "ci"
|
|
503
|
-
github_actor = "{}-{}".format(
|
|
504
|
-
tf_triggering_env, running_platform
|
|
505
|
-
)
|
|
506
|
-
dso = "redis-server"
|
|
507
|
-
profilers_artifacts_matrix = []
|
|
508
|
-
|
|
509
|
-
collection_summary_str = ""
|
|
510
|
-
if profilers_enabled:
|
|
511
|
-
collection_summary_str = (
|
|
512
|
-
local_profilers_platform_checks(
|
|
513
|
-
dso,
|
|
514
|
-
github_actor,
|
|
515
|
-
git_branch,
|
|
516
|
-
tf_github_repo,
|
|
517
|
-
git_hash,
|
|
523
|
+
temporary_dir = tempfile.mkdtemp(dir=home)
|
|
524
|
+
temporary_dir_client = tempfile.mkdtemp(dir=home)
|
|
525
|
+
logging.info(
|
|
526
|
+
"Using local temporary dir to persist redis build artifacts. Path: {}".format(
|
|
527
|
+
temporary_dir
|
|
518
528
|
)
|
|
519
529
|
)
|
|
520
530
|
logging.info(
|
|
521
|
-
"Using
|
|
522
|
-
|
|
531
|
+
"Using local temporary dir to persist client output files. Path: {}".format(
|
|
532
|
+
temporary_dir_client
|
|
523
533
|
)
|
|
524
534
|
)
|
|
535
|
+
tf_github_org = "redis"
|
|
536
|
+
tf_github_repo = "redis"
|
|
537
|
+
setup_name = "oss-standalone"
|
|
538
|
+
setup_type = "oss-standalone"
|
|
539
|
+
tf_triggering_env = "ci"
|
|
540
|
+
github_actor = "{}-{}".format(
|
|
541
|
+
tf_triggering_env, running_platform
|
|
542
|
+
)
|
|
543
|
+
dso = "redis-server"
|
|
544
|
+
profilers_artifacts_matrix = []
|
|
525
545
|
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
546
|
+
collection_summary_str = ""
|
|
547
|
+
if profilers_enabled:
|
|
548
|
+
collection_summary_str = (
|
|
549
|
+
local_profilers_platform_checks(
|
|
550
|
+
dso,
|
|
551
|
+
github_actor,
|
|
552
|
+
git_branch,
|
|
553
|
+
tf_github_repo,
|
|
554
|
+
git_hash,
|
|
555
|
+
)
|
|
556
|
+
)
|
|
557
|
+
logging.info(
|
|
558
|
+
"Using the following collection summary string for profiler description: {}".format(
|
|
559
|
+
collection_summary_str
|
|
560
|
+
)
|
|
561
|
+
)
|
|
562
|
+
|
|
563
|
+
restore_build_artifacts_from_test_details(
|
|
564
|
+
build_artifacts, conn, temporary_dir, testDetails
|
|
543
565
|
)
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
566
|
+
mnt_point = "/mnt/redis/"
|
|
567
|
+
command = generate_standalone_redis_server_args(
|
|
568
|
+
"{}redis-server".format(mnt_point),
|
|
569
|
+
redis_proc_start_port,
|
|
570
|
+
mnt_point,
|
|
571
|
+
redis_configuration_parameters,
|
|
572
|
+
)
|
|
573
|
+
command_str = " ".join(command)
|
|
574
|
+
db_cpuset_cpus, current_cpu_pos = generate_cpuset_cpus(
|
|
575
|
+
ceil_db_cpu_limit, current_cpu_pos
|
|
576
|
+
)
|
|
577
|
+
logging.info(
|
|
578
|
+
"Running redis-server on docker image {} (cpuset={}) with the following args: {}".format(
|
|
579
|
+
run_image, db_cpuset_cpus, command_str
|
|
580
|
+
)
|
|
581
|
+
)
|
|
582
|
+
redis_container = docker_client.containers.run(
|
|
583
|
+
image=run_image,
|
|
584
|
+
volumes={
|
|
585
|
+
temporary_dir: {
|
|
586
|
+
"bind": mnt_point,
|
|
587
|
+
"mode": "rw",
|
|
588
|
+
},
|
|
589
|
+
},
|
|
590
|
+
auto_remove=True,
|
|
591
|
+
privileged=True,
|
|
592
|
+
working_dir=mnt_point,
|
|
593
|
+
command=command_str,
|
|
594
|
+
network_mode="host",
|
|
595
|
+
detach=True,
|
|
596
|
+
cpuset_cpus=db_cpuset_cpus,
|
|
597
|
+
pid_mode="host",
|
|
598
|
+
)
|
|
599
|
+
redis_containers.append(redis_container)
|
|
576
600
|
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
601
|
+
r = redis.StrictRedis(port=redis_proc_start_port)
|
|
602
|
+
r.ping()
|
|
603
|
+
redis_conns = [r]
|
|
604
|
+
reset_commandstats(redis_conns)
|
|
605
|
+
redis_pids = []
|
|
606
|
+
first_redis_pid = r.info()["process_id"]
|
|
607
|
+
redis_pids.append(first_redis_pid)
|
|
608
|
+
ceil_client_cpu_limit = extract_client_cpu_limit(
|
|
609
|
+
benchmark_config
|
|
610
|
+
)
|
|
611
|
+
(
|
|
581
612
|
client_cpuset_cpus,
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
temporary_dir,
|
|
586
|
-
test_name,
|
|
613
|
+
current_cpu_pos,
|
|
614
|
+
) = generate_cpuset_cpus(
|
|
615
|
+
ceil_client_cpu_limit, current_cpu_pos
|
|
587
616
|
)
|
|
617
|
+
client_mnt_point = "/mnt/client/"
|
|
618
|
+
benchmark_tool_workdir = client_mnt_point
|
|
588
619
|
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
620
|
+
if "preload_tool" in benchmark_config["dbconfig"]:
|
|
621
|
+
data_prepopulation_step(
|
|
622
|
+
benchmark_config,
|
|
623
|
+
benchmark_tool_workdir,
|
|
624
|
+
client_cpuset_cpus,
|
|
625
|
+
docker_client,
|
|
626
|
+
git_hash,
|
|
627
|
+
redis_proc_start_port,
|
|
628
|
+
temporary_dir,
|
|
629
|
+
test_name,
|
|
630
|
+
)
|
|
592
631
|
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
benchmark_tool = "redis-benchmark"
|
|
597
|
-
full_benchmark_path = "/usr/local/bin/{}".format(
|
|
598
|
-
benchmark_tool
|
|
599
|
-
)
|
|
632
|
+
execute_init_commands(
|
|
633
|
+
benchmark_config, r, dbconfig_keyname="dbconfig"
|
|
634
|
+
)
|
|
600
635
|
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
636
|
+
benchmark_tool = extract_client_tool(benchmark_config)
|
|
637
|
+
# backwards compatible
|
|
638
|
+
if benchmark_tool is None:
|
|
639
|
+
benchmark_tool = "redis-benchmark"
|
|
640
|
+
full_benchmark_path = "/usr/local/bin/{}".format(
|
|
641
|
+
benchmark_tool
|
|
642
|
+
)
|
|
643
|
+
|
|
644
|
+
# setup the benchmark
|
|
645
|
+
(
|
|
646
|
+
start_time,
|
|
647
|
+
start_time_ms,
|
|
609
648
|
start_time_str,
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
649
|
+
) = get_start_time_vars()
|
|
650
|
+
local_benchmark_output_filename = (
|
|
651
|
+
get_local_run_full_filename(
|
|
652
|
+
start_time_str,
|
|
653
|
+
git_hash,
|
|
654
|
+
test_name,
|
|
655
|
+
"oss-standalone",
|
|
656
|
+
)
|
|
613
657
|
)
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
658
|
+
logging.info(
|
|
659
|
+
"Will store benchmark json output to local file {}".format(
|
|
660
|
+
local_benchmark_output_filename
|
|
661
|
+
)
|
|
618
662
|
)
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
663
|
+
if "memtier_benchmark" not in benchmark_tool:
|
|
664
|
+
# prepare the benchmark command
|
|
665
|
+
(
|
|
666
|
+
benchmark_command,
|
|
667
|
+
benchmark_command_str,
|
|
668
|
+
) = prepare_benchmark_parameters(
|
|
669
|
+
benchmark_config,
|
|
670
|
+
full_benchmark_path,
|
|
671
|
+
redis_proc_start_port,
|
|
672
|
+
"localhost",
|
|
673
|
+
local_benchmark_output_filename,
|
|
674
|
+
False,
|
|
675
|
+
benchmark_tool_workdir,
|
|
676
|
+
False,
|
|
677
|
+
)
|
|
678
|
+
else:
|
|
679
|
+
(
|
|
680
|
+
_,
|
|
681
|
+
benchmark_command_str,
|
|
682
|
+
) = prepare_memtier_benchmark_parameters(
|
|
683
|
+
benchmark_config["clientconfig"],
|
|
684
|
+
full_benchmark_path,
|
|
685
|
+
redis_proc_start_port,
|
|
686
|
+
"localhost",
|
|
687
|
+
local_benchmark_output_filename,
|
|
688
|
+
benchmark_tool_workdir,
|
|
689
|
+
)
|
|
690
|
+
|
|
691
|
+
client_container_image = extract_client_container_image(
|
|
692
|
+
benchmark_config
|
|
634
693
|
)
|
|
635
|
-
|
|
694
|
+
profiler_call_graph_mode = "dwarf"
|
|
695
|
+
profiler_frequency = 99
|
|
696
|
+
# start the profile
|
|
636
697
|
(
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
) =
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
698
|
+
profiler_name,
|
|
699
|
+
profilers_map,
|
|
700
|
+
) = profilers_start_if_required(
|
|
701
|
+
profilers_enabled,
|
|
702
|
+
profilers_list,
|
|
703
|
+
redis_pids,
|
|
704
|
+
setup_name,
|
|
705
|
+
start_time_str,
|
|
706
|
+
test_name,
|
|
707
|
+
profiler_frequency,
|
|
708
|
+
profiler_call_graph_mode,
|
|
646
709
|
)
|
|
647
710
|
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
(
|
|
655
|
-
profiler_name,
|
|
656
|
-
profilers_map,
|
|
657
|
-
) = profilers_start_if_required(
|
|
658
|
-
profilers_enabled,
|
|
659
|
-
profilers_list,
|
|
660
|
-
redis_pids,
|
|
661
|
-
setup_name,
|
|
662
|
-
start_time_str,
|
|
663
|
-
test_name,
|
|
664
|
-
profiler_frequency,
|
|
665
|
-
profiler_call_graph_mode,
|
|
666
|
-
)
|
|
667
|
-
|
|
668
|
-
logging.info(
|
|
669
|
-
"Using docker image {} as benchmark client image (cpuset={}) with the following args: {}".format(
|
|
670
|
-
client_container_image,
|
|
671
|
-
client_cpuset_cpus,
|
|
672
|
-
benchmark_command_str,
|
|
711
|
+
logging.info(
|
|
712
|
+
"Using docker image {} as benchmark client image (cpuset={}) with the following args: {}".format(
|
|
713
|
+
client_container_image,
|
|
714
|
+
client_cpuset_cpus,
|
|
715
|
+
benchmark_command_str,
|
|
716
|
+
)
|
|
673
717
|
)
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
718
|
+
# run the benchmark
|
|
719
|
+
benchmark_start_time = datetime.datetime.now()
|
|
720
|
+
|
|
721
|
+
client_container_stdout = docker_client.containers.run(
|
|
722
|
+
image=client_container_image,
|
|
723
|
+
volumes={
|
|
724
|
+
temporary_dir_client: {
|
|
725
|
+
"bind": client_mnt_point,
|
|
726
|
+
"mode": "rw",
|
|
727
|
+
},
|
|
684
728
|
},
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
)
|
|
729
|
+
auto_remove=True,
|
|
730
|
+
privileged=True,
|
|
731
|
+
working_dir=benchmark_tool_workdir,
|
|
732
|
+
command=benchmark_command_str,
|
|
733
|
+
network_mode="host",
|
|
734
|
+
detach=False,
|
|
735
|
+
cpuset_cpus=client_cpuset_cpus,
|
|
736
|
+
)
|
|
694
737
|
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
738
|
+
benchmark_end_time = datetime.datetime.now()
|
|
739
|
+
benchmark_duration_seconds = (
|
|
740
|
+
calculate_client_tool_duration_and_check(
|
|
741
|
+
benchmark_end_time, benchmark_start_time
|
|
742
|
+
)
|
|
699
743
|
)
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
profilers_enabled,
|
|
713
|
-
profilers_map,
|
|
714
|
-
redis_pids,
|
|
715
|
-
S3_BUCKET_NAME,
|
|
716
|
-
test_name,
|
|
717
|
-
)
|
|
718
|
-
if (
|
|
719
|
-
profilers_enabled
|
|
720
|
-
and datasink_push_results_redistimeseries
|
|
721
|
-
):
|
|
722
|
-
datasink_profile_tabular_data(
|
|
723
|
-
git_branch,
|
|
744
|
+
logging.info(
|
|
745
|
+
"output {}".format(client_container_stdout)
|
|
746
|
+
)
|
|
747
|
+
|
|
748
|
+
(
|
|
749
|
+
_,
|
|
750
|
+
overall_tabular_data_map,
|
|
751
|
+
) = profilers_stop_if_required(
|
|
752
|
+
datasink_push_results_redistimeseries,
|
|
753
|
+
benchmark_duration_seconds,
|
|
754
|
+
collection_summary_str,
|
|
755
|
+
dso,
|
|
724
756
|
tf_github_org,
|
|
725
757
|
tf_github_repo,
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
758
|
+
profiler_name,
|
|
759
|
+
profilers_artifacts_matrix,
|
|
760
|
+
profilers_enabled,
|
|
761
|
+
profilers_map,
|
|
762
|
+
redis_pids,
|
|
763
|
+
S3_BUCKET_NAME,
|
|
732
764
|
test_name,
|
|
733
|
-
tf_triggering_env,
|
|
734
765
|
)
|
|
735
|
-
if
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
}
|
|
747
|
-
)
|
|
748
|
-
https_link = generate_artifacts_table_grafana_redis(
|
|
749
|
-
datasink_push_results_redistimeseries,
|
|
750
|
-
grafana_profile_dashboard,
|
|
751
|
-
profilers_artifacts,
|
|
752
|
-
datasink_conn,
|
|
766
|
+
if (
|
|
767
|
+
profilers_enabled
|
|
768
|
+
and datasink_push_results_redistimeseries
|
|
769
|
+
):
|
|
770
|
+
datasink_profile_tabular_data(
|
|
771
|
+
git_branch,
|
|
772
|
+
tf_github_org,
|
|
773
|
+
tf_github_repo,
|
|
774
|
+
git_hash,
|
|
775
|
+
overall_tabular_data_map,
|
|
776
|
+
conn,
|
|
753
777
|
setup_name,
|
|
754
778
|
start_time_ms,
|
|
755
779
|
start_time_str,
|
|
756
780
|
test_name,
|
|
757
|
-
|
|
758
|
-
tf_github_repo,
|
|
759
|
-
git_hash,
|
|
760
|
-
git_branch,
|
|
761
|
-
)
|
|
762
|
-
profiler_dashboard_links.append(
|
|
763
|
-
[
|
|
764
|
-
setup_name,
|
|
765
|
-
test_name,
|
|
766
|
-
" {} ".format(https_link),
|
|
767
|
-
]
|
|
781
|
+
tf_triggering_env,
|
|
768
782
|
)
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
783
|
+
if len(profilers_artifacts_matrix) == 0:
|
|
784
|
+
logging.error(
|
|
785
|
+
"No profiler artifact was retrieved"
|
|
786
|
+
)
|
|
787
|
+
else:
|
|
788
|
+
profilers_artifacts = []
|
|
789
|
+
for line in profilers_artifacts_matrix:
|
|
790
|
+
artifact_name = line[2]
|
|
791
|
+
s3_link = line[4]
|
|
792
|
+
profilers_artifacts.append(
|
|
793
|
+
{
|
|
794
|
+
"artifact_name": artifact_name,
|
|
795
|
+
"s3_link": s3_link,
|
|
796
|
+
}
|
|
797
|
+
)
|
|
798
|
+
https_link = (
|
|
799
|
+
generate_artifacts_table_grafana_redis(
|
|
800
|
+
datasink_push_results_redistimeseries,
|
|
801
|
+
grafana_profile_dashboard,
|
|
802
|
+
profilers_artifacts,
|
|
803
|
+
datasink_conn,
|
|
804
|
+
setup_name,
|
|
805
|
+
start_time_ms,
|
|
806
|
+
start_time_str,
|
|
807
|
+
test_name,
|
|
808
|
+
tf_github_org,
|
|
809
|
+
tf_github_repo,
|
|
810
|
+
git_hash,
|
|
811
|
+
git_branch,
|
|
812
|
+
)
|
|
813
|
+
)
|
|
814
|
+
profiler_dashboard_links.append(
|
|
815
|
+
[
|
|
816
|
+
setup_name,
|
|
817
|
+
test_name,
|
|
818
|
+
" {} ".format(https_link),
|
|
819
|
+
]
|
|
820
|
+
)
|
|
821
|
+
logging.info(
|
|
822
|
+
"Published new profile info for this testcase. Access it via: {}".format(
|
|
823
|
+
https_link
|
|
824
|
+
)
|
|
772
825
|
)
|
|
773
|
-
)
|
|
774
826
|
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
827
|
+
# Delete all the perf artifacts, now that they are uploaded to S3.
|
|
828
|
+
# The .script and .script.mainthread files are not part of the artifacts_matrix and thus have to be deleted separately
|
|
829
|
+
line = profilers_artifacts_matrix[0]
|
|
830
|
+
logging.info(
|
|
831
|
+
"Deleting perf file {}".format(
|
|
832
|
+
line[3].split(".")[0]
|
|
833
|
+
+ ".out.script.mainthread"
|
|
834
|
+
)
|
|
835
|
+
)
|
|
836
|
+
os.remove(
|
|
780
837
|
line[3].split(".")[0]
|
|
781
838
|
+ ".out.script.mainthread"
|
|
782
839
|
)
|
|
783
|
-
)
|
|
784
|
-
os.remove(
|
|
785
|
-
line[3].split(".")[0] + ".out.script.mainthread"
|
|
786
|
-
)
|
|
787
|
-
logging.info(
|
|
788
|
-
"Deleteing perf file {}".format(
|
|
789
|
-
line[3].split(".")[0] + ".out.script"
|
|
790
|
-
)
|
|
791
|
-
)
|
|
792
|
-
os.remove(line[3].split(".")[0] + ".out.script")
|
|
793
|
-
for line in profilers_artifacts_matrix:
|
|
794
840
|
logging.info(
|
|
795
|
-
"
|
|
841
|
+
"Deleteing perf file {}".format(
|
|
842
|
+
line[3].split(".")[0] + ".out.script"
|
|
843
|
+
)
|
|
796
844
|
)
|
|
797
|
-
os.remove(line[3])
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
datapoint_time_ms =
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
)
|
|
813
|
-
full_result_path = local_benchmark_output_filename
|
|
814
|
-
if "memtier_benchmark" in benchmark_tool:
|
|
815
|
-
full_result_path = "{}/{}".format(
|
|
816
|
-
temporary_dir_client,
|
|
845
|
+
os.remove(line[3].split(".")[0] + ".out.script")
|
|
846
|
+
for line in profilers_artifacts_matrix:
|
|
847
|
+
logging.info(
|
|
848
|
+
"Deleting perf file {}".format(line[3])
|
|
849
|
+
)
|
|
850
|
+
os.remove(line[3])
|
|
851
|
+
|
|
852
|
+
datapoint_time_ms = start_time_ms
|
|
853
|
+
if (
|
|
854
|
+
use_git_timestamp is True
|
|
855
|
+
and git_timestamp_ms is not None
|
|
856
|
+
):
|
|
857
|
+
datapoint_time_ms = git_timestamp_ms
|
|
858
|
+
post_process_benchmark_results(
|
|
859
|
+
benchmark_tool,
|
|
817
860
|
local_benchmark_output_filename,
|
|
861
|
+
datapoint_time_ms,
|
|
862
|
+
start_time_str,
|
|
863
|
+
client_container_stdout,
|
|
864
|
+
None,
|
|
865
|
+
)
|
|
866
|
+
full_result_path = local_benchmark_output_filename
|
|
867
|
+
if "memtier_benchmark" in benchmark_tool:
|
|
868
|
+
full_result_path = "{}/{}".format(
|
|
869
|
+
temporary_dir_client,
|
|
870
|
+
local_benchmark_output_filename,
|
|
871
|
+
)
|
|
872
|
+
logging.info(
|
|
873
|
+
"Reading results json from {}".format(
|
|
874
|
+
full_result_path
|
|
875
|
+
)
|
|
818
876
|
)
|
|
819
|
-
logging.critical(
|
|
820
|
-
"Reading results json from {}".format(full_result_path)
|
|
821
|
-
)
|
|
822
877
|
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
878
|
+
with open(
|
|
879
|
+
full_result_path,
|
|
880
|
+
"r",
|
|
881
|
+
) as json_file:
|
|
882
|
+
results_dict = json.load(json_file)
|
|
883
|
+
print_results_table_stdout(
|
|
884
|
+
benchmark_config,
|
|
885
|
+
default_metrics,
|
|
886
|
+
results_dict,
|
|
887
|
+
setup_type,
|
|
888
|
+
test_name,
|
|
889
|
+
None,
|
|
890
|
+
)
|
|
891
|
+
|
|
892
|
+
dataset_load_duration_seconds = 0
|
|
893
|
+
|
|
894
|
+
exporter_datasink_common(
|
|
829
895
|
benchmark_config,
|
|
830
|
-
|
|
896
|
+
benchmark_duration_seconds,
|
|
897
|
+
build_variant_name,
|
|
898
|
+
datapoint_time_ms,
|
|
899
|
+
dataset_load_duration_seconds,
|
|
900
|
+
datasink_conn,
|
|
901
|
+
datasink_push_results_redistimeseries,
|
|
902
|
+
git_branch,
|
|
903
|
+
git_version,
|
|
904
|
+
metadata,
|
|
905
|
+
redis_conns,
|
|
831
906
|
results_dict,
|
|
907
|
+
running_platform,
|
|
908
|
+
setup_name,
|
|
832
909
|
setup_type,
|
|
833
910
|
test_name,
|
|
834
|
-
|
|
911
|
+
tf_github_org,
|
|
912
|
+
tf_github_repo,
|
|
913
|
+
tf_triggering_env,
|
|
914
|
+
topology_spec_name,
|
|
915
|
+
default_metrics,
|
|
835
916
|
)
|
|
917
|
+
r.shutdown(save=False)
|
|
918
|
+
test_result = True
|
|
919
|
+
total_test_suite_runs = total_test_suite_runs + 1
|
|
836
920
|
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
benchmark_duration_seconds,
|
|
842
|
-
build_variant_name,
|
|
843
|
-
datapoint_time_ms,
|
|
844
|
-
dataset_load_duration_seconds,
|
|
845
|
-
datasink_conn,
|
|
846
|
-
datasink_push_results_redistimeseries,
|
|
847
|
-
git_branch,
|
|
848
|
-
git_version,
|
|
849
|
-
metadata,
|
|
850
|
-
redis_conns,
|
|
851
|
-
results_dict,
|
|
852
|
-
running_platform,
|
|
853
|
-
setup_name,
|
|
854
|
-
setup_type,
|
|
855
|
-
test_name,
|
|
856
|
-
tf_github_org,
|
|
857
|
-
tf_github_repo,
|
|
858
|
-
tf_triggering_env,
|
|
859
|
-
topology_spec_name,
|
|
860
|
-
default_metrics,
|
|
861
|
-
)
|
|
862
|
-
r.shutdown(save=False)
|
|
863
|
-
test_result = True
|
|
864
|
-
total_test_suite_runs = total_test_suite_runs + 1
|
|
865
|
-
|
|
866
|
-
except:
|
|
867
|
-
logging.critical(
|
|
868
|
-
"Some unexpected exception was caught "
|
|
869
|
-
"during local work. Failing test...."
|
|
870
|
-
)
|
|
871
|
-
logging.critical(sys.exc_info()[0])
|
|
872
|
-
print("-" * 60)
|
|
873
|
-
traceback.print_exc(file=sys.stdout)
|
|
874
|
-
print("-" * 60)
|
|
875
|
-
if redis_container is not None:
|
|
876
|
-
logging.critical("Printing redis container log....")
|
|
877
|
-
print("-" * 60)
|
|
878
|
-
print(
|
|
879
|
-
redis_container.logs(
|
|
880
|
-
stdout=True, stderr=True, logs=True
|
|
881
|
-
)
|
|
921
|
+
except:
|
|
922
|
+
logging.critical(
|
|
923
|
+
"Some unexpected exception was caught "
|
|
924
|
+
"during local work. Failing test...."
|
|
882
925
|
)
|
|
926
|
+
logging.critical(sys.exc_info()[0])
|
|
883
927
|
print("-" * 60)
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
logging.info(
|
|
892
|
-
"When trying to stop DB container with id {} and image {} it was already stopped".format(
|
|
893
|
-
redis_container.id, redis_container.image
|
|
928
|
+
traceback.print_exc(file=sys.stdout)
|
|
929
|
+
print("-" * 60)
|
|
930
|
+
if redis_container is not None:
|
|
931
|
+
logging.critical("Printing redis container log....")
|
|
932
|
+
print("-" * 60)
|
|
933
|
+
print(
|
|
934
|
+
redis_container.logs(stdout=True, stderr=True)
|
|
894
935
|
)
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
936
|
+
print("-" * 60)
|
|
937
|
+
test_result = False
|
|
938
|
+
# tear-down
|
|
939
|
+
logging.info("Tearing down setup")
|
|
940
|
+
for redis_container in redis_containers:
|
|
900
941
|
try:
|
|
901
942
|
redis_container.stop()
|
|
902
943
|
except docker.errors.NotFound:
|
|
903
944
|
logging.info(
|
|
904
|
-
"When trying to stop
|
|
945
|
+
"When trying to stop DB container with id {} and image {} it was already stopped".format(
|
|
905
946
|
redis_container.id, redis_container.image
|
|
906
947
|
)
|
|
907
948
|
)
|
|
908
949
|
pass
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
950
|
+
|
|
951
|
+
for redis_container in client_containers:
|
|
952
|
+
if type(redis_container) == Container:
|
|
953
|
+
try:
|
|
954
|
+
redis_container.stop()
|
|
955
|
+
except docker.errors.NotFound:
|
|
956
|
+
logging.info(
|
|
957
|
+
"When trying to stop Client container with id {} and image {} it was already stopped".format(
|
|
958
|
+
redis_container.id,
|
|
959
|
+
redis_container.image,
|
|
960
|
+
)
|
|
961
|
+
)
|
|
962
|
+
pass
|
|
963
|
+
logging.info(
|
|
964
|
+
"Removing temporary dirs {} and {}".format(
|
|
965
|
+
temporary_dir, temporary_dir_client
|
|
966
|
+
)
|
|
912
967
|
)
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
shutil.rmtree(temporary_dir_client, ignore_errors=True)
|
|
968
|
+
shutil.rmtree(temporary_dir, ignore_errors=True)
|
|
969
|
+
shutil.rmtree(temporary_dir_client, ignore_errors=True)
|
|
916
970
|
|
|
917
|
-
|
|
971
|
+
overall_result &= test_result
|
|
918
972
|
|
|
919
973
|
else:
|
|
920
974
|
logging.error("Missing commit information within received message.")
|