redisbench-admin 0.10.20__py3-none-any.whl → 0.10.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- redisbench_admin/compare/compare.py +1 -1
- redisbench_admin/run/args.py +8 -1
- redisbench_admin/run_local/args.py +9 -0
- redisbench_admin/run_local/local_db.py +101 -70
- redisbench_admin/run_local/run_local.py +54 -7
- redisbench_admin/run_remote/args.py +0 -7
- redisbench_admin/utils/remote.py +2 -2
- {redisbench_admin-0.10.20.dist-info → redisbench_admin-0.10.22.dist-info}/METADATA +1 -1
- {redisbench_admin-0.10.20.dist-info → redisbench_admin-0.10.22.dist-info}/RECORD +12 -12
- {redisbench_admin-0.10.20.dist-info → redisbench_admin-0.10.22.dist-info}/LICENSE +0 -0
- {redisbench_admin-0.10.20.dist-info → redisbench_admin-0.10.22.dist-info}/WHEEL +0 -0
- {redisbench_admin-0.10.20.dist-info → redisbench_admin-0.10.22.dist-info}/entry_points.txt +0 -0
|
@@ -722,7 +722,7 @@ def from_rts_to_regression_table(
|
|
|
722
722
|
total_comparison_points = 0
|
|
723
723
|
noise_waterline = 3
|
|
724
724
|
progress = tqdm(unit="benchmark time-series", total=len(test_names))
|
|
725
|
-
at_comparison=0
|
|
725
|
+
at_comparison = 0
|
|
726
726
|
for test_name in test_names:
|
|
727
727
|
multi_value_baseline = check_multi_value_filter(baseline_str)
|
|
728
728
|
multi_value_comparison = check_multi_value_filter(comparison_str)
|
redisbench_admin/run/args.py
CHANGED
|
@@ -40,9 +40,10 @@ COMMANDSTATS_ENABLED = bool(int(os.getenv("COMMANDSTATS_ENABLED", 1)))
|
|
|
40
40
|
PROFILERS = os.getenv("PROFILERS", PROFILERS_DEFAULT)
|
|
41
41
|
MAX_PROFILERS_PER_TYPE = int(os.getenv("MAX_PROFILERS", 1))
|
|
42
42
|
PROFILE_FREQ = os.getenv("PROFILE_FREQ", PROFILE_FREQ_DEFAULT)
|
|
43
|
-
KEEP_ENV = bool(os.getenv("KEEP_ENV",
|
|
43
|
+
KEEP_ENV = bool(int(os.getenv("KEEP_ENV", "0")))
|
|
44
44
|
ALLOWED_TOOLS_DEFAULT = "memtier_benchmark,redis-benchmark,redisgraph-benchmark-go,ycsb,go-ycsb,tsbs_run_queries_redistimeseries,tsbs_load_redistimeseries,ftsb_redisearch,aibench_run_inference_redisai_vision,ann-benchmarks"
|
|
45
45
|
ALLOWED_BENCH_TOOLS = os.getenv("ALLOWED_BENCH_TOOLS", ALLOWED_TOOLS_DEFAULT)
|
|
46
|
+
SKIP_DB_SETUP = bool(int(os.getenv("SKIP_DB_SETUP", "0")))
|
|
46
47
|
|
|
47
48
|
|
|
48
49
|
def common_run_args(parser):
|
|
@@ -53,6 +54,12 @@ def common_run_args(parser):
|
|
|
53
54
|
action="store_true",
|
|
54
55
|
help="Keep environment and topology up after benchmark.",
|
|
55
56
|
)
|
|
57
|
+
parser.add_argument(
|
|
58
|
+
"--skip-db-setup",
|
|
59
|
+
type=bool,
|
|
60
|
+
default=SKIP_DB_SETUP,
|
|
61
|
+
help="skip db setup/teardown steps. Usefull when you want to target an existing DB",
|
|
62
|
+
)
|
|
56
63
|
parser.add_argument(
|
|
57
64
|
"--fail_fast",
|
|
58
65
|
required=False,
|
|
@@ -3,13 +3,22 @@
|
|
|
3
3
|
# Copyright (c) 2021., Redis Labs Modules
|
|
4
4
|
# All rights reserved.
|
|
5
5
|
#
|
|
6
|
+
import os
|
|
6
7
|
|
|
7
8
|
from redisbench_admin.run.args import common_run_args
|
|
8
9
|
from redisbench_admin.run.common import REDIS_BINARY
|
|
9
10
|
|
|
11
|
+
FLUSHALL_AT_START = bool(int(os.getenv("FLUSHALL_AT_START", "0")))
|
|
12
|
+
|
|
10
13
|
|
|
11
14
|
def create_run_local_arguments(parser):
|
|
12
15
|
parser = common_run_args(parser)
|
|
13
16
|
parser.add_argument("--port", type=int, default=6379)
|
|
14
17
|
parser.add_argument("--redis-binary", type=str, default=REDIS_BINARY)
|
|
18
|
+
parser.add_argument(
|
|
19
|
+
"--flushall_on_every_test_start",
|
|
20
|
+
type=bool,
|
|
21
|
+
default=FLUSHALL_AT_START,
|
|
22
|
+
help="At the start of every test send a FLUSHALL",
|
|
23
|
+
)
|
|
15
24
|
return parser
|
|
@@ -46,59 +46,15 @@ def local_db_spin(
|
|
|
46
46
|
required_modules,
|
|
47
47
|
setup_type,
|
|
48
48
|
shard_count,
|
|
49
|
+
flushall_on_every_test_start=False,
|
|
49
50
|
):
|
|
50
|
-
|
|
51
|
-
|
|
51
|
+
redis_conns = []
|
|
52
|
+
artifact_version = "n/a"
|
|
53
|
+
result = True
|
|
52
54
|
temporary_dir = tempfile.mkdtemp()
|
|
53
|
-
redis_7 = args.redis_7
|
|
54
|
-
logging.info(
|
|
55
|
-
"Using local temporary dir to spin up Redis Instance. Path: {}".format(
|
|
56
|
-
temporary_dir
|
|
57
|
-
)
|
|
58
|
-
)
|
|
59
|
-
if dbdir_folder is not None:
|
|
60
|
-
from distutils.dir_util import copy_tree
|
|
61
|
-
|
|
62
|
-
copy_tree(dbdir_folder, temporary_dir)
|
|
63
|
-
logging.info(
|
|
64
|
-
"Copied entire content of {} into temporary path: {}".format(
|
|
65
|
-
dbdir_folder, temporary_dir
|
|
66
|
-
)
|
|
67
|
-
)
|
|
68
|
-
(
|
|
69
|
-
_,
|
|
70
|
-
_,
|
|
71
|
-
redis_configuration_parameters,
|
|
72
|
-
dataset_load_timeout_secs,
|
|
73
|
-
modules_configuration_parameters_map,
|
|
74
|
-
) = extract_redis_dbconfig_parameters(benchmark_config, "dbconfig")
|
|
75
55
|
cluster_api_enabled = False
|
|
76
|
-
logging.info(
|
|
77
|
-
"Using a dataset load timeout of {} seconds.".format(dataset_load_timeout_secs)
|
|
78
|
-
)
|
|
79
|
-
redis_conns = []
|
|
80
56
|
if setup_type == "oss-cluster":
|
|
81
57
|
cluster_api_enabled = True
|
|
82
|
-
shard_host = "127.0.0.1"
|
|
83
|
-
redis_processes, redis_conns = spin_up_local_redis_cluster(
|
|
84
|
-
binary,
|
|
85
|
-
temporary_dir,
|
|
86
|
-
shard_count,
|
|
87
|
-
shard_host,
|
|
88
|
-
args.port,
|
|
89
|
-
local_module_file,
|
|
90
|
-
redis_configuration_parameters,
|
|
91
|
-
dataset_load_timeout_secs,
|
|
92
|
-
modules_configuration_parameters_map,
|
|
93
|
-
redis_7,
|
|
94
|
-
)
|
|
95
|
-
|
|
96
|
-
status = setup_redis_cluster_from_conns(
|
|
97
|
-
redis_conns, shard_count, shard_host, args.port
|
|
98
|
-
)
|
|
99
|
-
if status is False:
|
|
100
|
-
raise Exception("Redis cluster setup failed. Failing test.")
|
|
101
|
-
|
|
102
58
|
dataset, dataset_name, _, _ = check_dataset_local_requirements(
|
|
103
59
|
benchmark_config,
|
|
104
60
|
temporary_dir,
|
|
@@ -108,35 +64,108 @@ def local_db_spin(
|
|
|
108
64
|
shard_count,
|
|
109
65
|
cluster_api_enabled,
|
|
110
66
|
)
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
67
|
+
|
|
68
|
+
if args.skip_db_setup:
|
|
69
|
+
logging.info("Skipping DB Setup...")
|
|
70
|
+
if dataset is not None:
|
|
71
|
+
logging.info("Given this benchmark requires an RDB load will skip it...")
|
|
72
|
+
result = False
|
|
73
|
+
return (
|
|
74
|
+
result,
|
|
75
|
+
artifact_version,
|
|
76
|
+
cluster_api_enabled,
|
|
77
|
+
redis_conns,
|
|
78
|
+
redis_processes,
|
|
79
|
+
)
|
|
80
|
+
else:
|
|
81
|
+
# setup Redis
|
|
82
|
+
# copy the rdb to DB machine
|
|
83
|
+
redis_7 = args.redis_7
|
|
84
|
+
logging.info(
|
|
85
|
+
"Using local temporary dir to spin up Redis Instance. Path: {}".format(
|
|
86
|
+
temporary_dir
|
|
87
|
+
)
|
|
88
|
+
)
|
|
89
|
+
if dbdir_folder is not None:
|
|
90
|
+
from distutils.dir_util import copy_tree
|
|
91
|
+
|
|
92
|
+
copy_tree(dbdir_folder, temporary_dir)
|
|
93
|
+
logging.info(
|
|
94
|
+
"Copied entire content of {} into temporary path: {}".format(
|
|
95
|
+
dbdir_folder, temporary_dir
|
|
96
|
+
)
|
|
97
|
+
)
|
|
98
|
+
(
|
|
99
|
+
_,
|
|
100
|
+
_,
|
|
117
101
|
redis_configuration_parameters,
|
|
118
|
-
dbdir_folder,
|
|
119
102
|
dataset_load_timeout_secs,
|
|
120
103
|
modules_configuration_parameters_map,
|
|
121
|
-
|
|
104
|
+
) = extract_redis_dbconfig_parameters(benchmark_config, "dbconfig")
|
|
105
|
+
|
|
106
|
+
logging.info(
|
|
107
|
+
"Using a dataset load timeout of {} seconds.".format(
|
|
108
|
+
dataset_load_timeout_secs
|
|
109
|
+
)
|
|
122
110
|
)
|
|
123
111
|
|
|
112
|
+
if setup_type == "oss-cluster":
|
|
113
|
+
cluster_api_enabled = True
|
|
114
|
+
shard_host = "127.0.0.1"
|
|
115
|
+
redis_processes, redis_conns = spin_up_local_redis_cluster(
|
|
116
|
+
binary,
|
|
117
|
+
temporary_dir,
|
|
118
|
+
shard_count,
|
|
119
|
+
shard_host,
|
|
120
|
+
args.port,
|
|
121
|
+
local_module_file,
|
|
122
|
+
redis_configuration_parameters,
|
|
123
|
+
dataset_load_timeout_secs,
|
|
124
|
+
modules_configuration_parameters_map,
|
|
125
|
+
redis_7,
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
status = setup_redis_cluster_from_conns(
|
|
129
|
+
redis_conns, shard_count, shard_host, args.port
|
|
130
|
+
)
|
|
131
|
+
if status is False:
|
|
132
|
+
raise Exception("Redis cluster setup failed. Failing test.")
|
|
133
|
+
|
|
134
|
+
if setup_type == "oss-standalone":
|
|
135
|
+
redis_processes = spin_up_local_redis(
|
|
136
|
+
binary,
|
|
137
|
+
args.port,
|
|
138
|
+
temporary_dir,
|
|
139
|
+
local_module_file,
|
|
140
|
+
redis_configuration_parameters,
|
|
141
|
+
dbdir_folder,
|
|
142
|
+
dataset_load_timeout_secs,
|
|
143
|
+
modules_configuration_parameters_map,
|
|
144
|
+
redis_7,
|
|
145
|
+
)
|
|
146
|
+
if setup_type == "oss-cluster":
|
|
147
|
+
for shardn, redis_process in enumerate(redis_processes):
|
|
148
|
+
logging.info(
|
|
149
|
+
"Checking if shard #{} process with pid={} is alive".format(
|
|
150
|
+
shardn + 1, redis_process.pid
|
|
151
|
+
)
|
|
152
|
+
)
|
|
153
|
+
if is_process_alive(redis_process) is False:
|
|
154
|
+
raise Exception("Redis process is not alive. Failing test.")
|
|
155
|
+
cluster_init_steps(clusterconfig, redis_conns, local_module_file)
|
|
156
|
+
|
|
157
|
+
if setup_type == "oss-standalone":
|
|
124
158
|
r = redis.Redis(port=args.port)
|
|
125
159
|
r.ping()
|
|
126
|
-
r.client_setname("redisbench-admin-
|
|
160
|
+
r.client_setname("redisbench-admin-standalone")
|
|
127
161
|
redis_conns.append(r)
|
|
128
|
-
if setup_type == "oss-cluster":
|
|
129
|
-
for shardn, redis_process in enumerate(redis_processes):
|
|
130
|
-
logging.info(
|
|
131
|
-
"Checking if shard #{} process with pid={} is alive".format(
|
|
132
|
-
shardn + 1, redis_process.pid
|
|
133
|
-
)
|
|
134
|
-
)
|
|
135
|
-
if is_process_alive(redis_process) is False:
|
|
136
|
-
raise Exception("Redis process is not alive. Failing test.")
|
|
137
162
|
|
|
138
|
-
if
|
|
139
|
-
|
|
163
|
+
if dataset is None:
|
|
164
|
+
if flushall_on_every_test_start:
|
|
165
|
+
logging.info("Will flush all data at test start...")
|
|
166
|
+
for shard_n, shard_conn in enumerate(redis_conns):
|
|
167
|
+
logging.info(f"Flushing all in shard {shard_n}...")
|
|
168
|
+
shard_conn.flushall()
|
|
140
169
|
|
|
141
170
|
if check_dbconfig_tool_requirement(benchmark_config):
|
|
142
171
|
logging.info("Detected the requirements to load data via client tool")
|
|
@@ -180,6 +209,8 @@ def local_db_spin(
|
|
|
180
209
|
redis_conns,
|
|
181
210
|
)
|
|
182
211
|
|
|
183
|
-
run_redis_pre_steps(
|
|
212
|
+
artifact_version = run_redis_pre_steps(
|
|
213
|
+
benchmark_config, redis_conns[0], required_modules
|
|
214
|
+
)
|
|
184
215
|
|
|
185
|
-
return cluster_api_enabled, redis_conns, redis_processes
|
|
216
|
+
return result, artifact_version, cluster_api_enabled, redis_conns, redis_processes
|
|
@@ -11,6 +11,7 @@ import sys
|
|
|
11
11
|
import datetime
|
|
12
12
|
import traceback
|
|
13
13
|
import redis
|
|
14
|
+
from redisbench_admin.run.git import git_vars_crosscheck
|
|
14
15
|
|
|
15
16
|
import redisbench_admin.run.metrics
|
|
16
17
|
from redisbench_admin.profilers.perf import PERF_CALLGRAPH_MODE
|
|
@@ -30,7 +31,10 @@ from redisbench_admin.run.metrics import (
|
|
|
30
31
|
from_info_to_overall_shard_cpu,
|
|
31
32
|
collect_cpu_data,
|
|
32
33
|
)
|
|
33
|
-
from redisbench_admin.run.redistimeseries import
|
|
34
|
+
from redisbench_admin.run.redistimeseries import (
|
|
35
|
+
datasink_profile_tabular_data,
|
|
36
|
+
timeseries_test_sucess_flow,
|
|
37
|
+
)
|
|
34
38
|
from redisbench_admin.run.run import (
|
|
35
39
|
calculate_client_tool_duration_and_check,
|
|
36
40
|
define_benchmark_plan,
|
|
@@ -49,6 +53,7 @@ from redisbench_admin.profilers.profilers_local import (
|
|
|
49
53
|
from redisbench_admin.utils.benchmark_config import (
|
|
50
54
|
prepare_benchmark_definitions,
|
|
51
55
|
results_dict_kpi_check,
|
|
56
|
+
get_metadata_tags,
|
|
52
57
|
)
|
|
53
58
|
from redisbench_admin.utils.local import (
|
|
54
59
|
get_local_run_full_filename,
|
|
@@ -67,20 +72,28 @@ def run_local_command_logic(args, project_name, project_version):
|
|
|
67
72
|
project_name=project_name, project_version=project_version
|
|
68
73
|
)
|
|
69
74
|
)
|
|
75
|
+
tf_github_org = args.github_org
|
|
76
|
+
tf_github_actor = args.github_actor
|
|
77
|
+
tf_github_repo = args.github_repo
|
|
78
|
+
tf_github_sha = args.github_sha
|
|
79
|
+
tf_github_branch = args.github_branch
|
|
80
|
+
|
|
70
81
|
(
|
|
82
|
+
github_actor,
|
|
83
|
+
github_branch,
|
|
71
84
|
github_org_name,
|
|
72
85
|
github_repo_name,
|
|
73
86
|
github_sha,
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
) = extract_git_vars()
|
|
87
|
+
) = git_vars_crosscheck(
|
|
88
|
+
tf_github_actor, tf_github_branch, tf_github_org, tf_github_repo, tf_github_sha
|
|
89
|
+
)
|
|
78
90
|
|
|
79
91
|
dbdir_folder = args.dbdir_folder
|
|
80
92
|
os.path.abspath(".")
|
|
81
93
|
required_modules = args.required_module
|
|
82
94
|
profilers_enabled = args.enable_profilers
|
|
83
95
|
s3_bucket_name = args.s3_bucket_name
|
|
96
|
+
flushall_on_every_test_start = args.flushall_on_every_test_start
|
|
84
97
|
profilers_list = []
|
|
85
98
|
if profilers_enabled:
|
|
86
99
|
profilers_list = args.profilers.split(",")
|
|
@@ -125,7 +138,7 @@ def run_local_command_logic(args, project_name, project_version):
|
|
|
125
138
|
_,
|
|
126
139
|
benchmark_definitions,
|
|
127
140
|
default_metrics,
|
|
128
|
-
|
|
141
|
+
exporter_timemetric_path,
|
|
129
142
|
default_specs,
|
|
130
143
|
clusterconfig,
|
|
131
144
|
) = prepare_benchmark_definitions(args)
|
|
@@ -189,6 +202,8 @@ def run_local_command_logic(args, project_name, project_version):
|
|
|
189
202
|
if " " in binary:
|
|
190
203
|
binary = binary.split(" ")
|
|
191
204
|
(
|
|
205
|
+
result_db_spin,
|
|
206
|
+
artifact_version,
|
|
192
207
|
cluster_api_enabled,
|
|
193
208
|
redis_conns,
|
|
194
209
|
redis_processes,
|
|
@@ -204,7 +219,13 @@ def run_local_command_logic(args, project_name, project_version):
|
|
|
204
219
|
required_modules,
|
|
205
220
|
setup_type,
|
|
206
221
|
shard_count,
|
|
222
|
+
flushall_on_every_test_start,
|
|
207
223
|
)
|
|
224
|
+
if result_db_spin is False:
|
|
225
|
+
logging.warning(
|
|
226
|
+
"Skipping this test given DB spin stage failed..."
|
|
227
|
+
)
|
|
228
|
+
continue
|
|
208
229
|
if benchmark_type == "read-only":
|
|
209
230
|
logging.info(
|
|
210
231
|
"Given the benchmark for this setup is ready-only we will prepare to reuse it on the next read-only benchmarks (if any )."
|
|
@@ -352,6 +373,7 @@ def run_local_command_logic(args, project_name, project_version):
|
|
|
352
373
|
s3_bucket_name,
|
|
353
374
|
test_name,
|
|
354
375
|
)
|
|
376
|
+
|
|
355
377
|
if (
|
|
356
378
|
profilers_enabled
|
|
357
379
|
and args.push_results_redistimeseries
|
|
@@ -377,7 +399,7 @@ def run_local_command_logic(args, project_name, project_version):
|
|
|
377
399
|
start_time_str,
|
|
378
400
|
stdout,
|
|
379
401
|
)
|
|
380
|
-
|
|
402
|
+
results_dict = {}
|
|
381
403
|
with open(
|
|
382
404
|
local_benchmark_output_filename, "r"
|
|
383
405
|
) as json_file:
|
|
@@ -395,6 +417,31 @@ def run_local_command_logic(args, project_name, project_version):
|
|
|
395
417
|
return_code = results_dict_kpi_check(
|
|
396
418
|
benchmark_config, results_dict, return_code
|
|
397
419
|
)
|
|
420
|
+
|
|
421
|
+
metadata_tags = get_metadata_tags(benchmark_config)
|
|
422
|
+
(
|
|
423
|
+
_,
|
|
424
|
+
branch_target_tables,
|
|
425
|
+
) = timeseries_test_sucess_flow(
|
|
426
|
+
args.push_results_redistimeseries,
|
|
427
|
+
artifact_version,
|
|
428
|
+
benchmark_config,
|
|
429
|
+
benchmark_duration_seconds,
|
|
430
|
+
0,
|
|
431
|
+
default_metrics,
|
|
432
|
+
setup_name,
|
|
433
|
+
setup_type,
|
|
434
|
+
exporter_timemetric_path,
|
|
435
|
+
results_dict,
|
|
436
|
+
rts,
|
|
437
|
+
start_time_ms,
|
|
438
|
+
test_name,
|
|
439
|
+
github_branch,
|
|
440
|
+
github_org_name,
|
|
441
|
+
github_repo_name,
|
|
442
|
+
tf_triggering_env,
|
|
443
|
+
)
|
|
444
|
+
|
|
398
445
|
if setup_details["env"] is None:
|
|
399
446
|
if args.keep_env_and_topo is False:
|
|
400
447
|
for conn in redis_conns:
|
|
@@ -21,7 +21,6 @@ TF_OVERRIDE_NAME = os.getenv("TF_OVERRIDE_NAME", None)
|
|
|
21
21
|
REMOTE_DB_PORT = int(os.getenv("REMOTE_DB_PORT", "6379"))
|
|
22
22
|
REMOTE_DB_PASS = os.getenv("REMOTE_DB_PASS", None)
|
|
23
23
|
REMOTE_PRIVATE_KEYNAME = os.getenv("REMOTE_PRIVATE_KEYNAME", DEFAULT_PRIVATE_KEY)
|
|
24
|
-
REMOTE_SKIP_DB_SETUP = bool(int(os.getenv("REMOTE_SKIP_DB_SETUP", "0")))
|
|
25
24
|
FLUSHALL_AT_START = bool(int(os.getenv("FLUSHALL_AT_START", "0")))
|
|
26
25
|
FLUSHALL_AT_END = bool(int(os.getenv("FLUSHALL_AT_END", "0")))
|
|
27
26
|
IGNORE_KEYSPACE_ERRORS = bool(int(os.getenv("IGNORE_KEYSPACE_ERRORS", "0")))
|
|
@@ -60,12 +59,6 @@ def create_run_remote_arguments(parser):
|
|
|
60
59
|
)
|
|
61
60
|
parser.add_argument("--db_port", type=int, default=REMOTE_DB_PORT)
|
|
62
61
|
parser.add_argument("--db_pass", type=str, default=REMOTE_DB_PASS)
|
|
63
|
-
parser.add_argument(
|
|
64
|
-
"--skip-db-setup",
|
|
65
|
-
type=bool,
|
|
66
|
-
default=REMOTE_SKIP_DB_SETUP,
|
|
67
|
-
help="skip db setup/teardown steps. Usefull when you want to target an existing DB",
|
|
68
|
-
)
|
|
69
62
|
parser.add_argument(
|
|
70
63
|
"--flushall_on_every_test_start",
|
|
71
64
|
type=bool,
|
redisbench_admin/utils/remote.py
CHANGED
|
@@ -30,7 +30,7 @@ from redisbench_admin.utils.utils import (
|
|
|
30
30
|
)
|
|
31
31
|
|
|
32
32
|
# environment variables
|
|
33
|
-
PERFORMANCE_RTS_PUSH = bool(os.getenv("PUSH_RTS",
|
|
33
|
+
PERFORMANCE_RTS_PUSH = bool(int(os.getenv("PUSH_RTS", "0")))
|
|
34
34
|
PERFORMANCE_RTS_AUTH = os.getenv("PERFORMANCE_RTS_AUTH", None)
|
|
35
35
|
PERFORMANCE_RTS_USER = os.getenv("PERFORMANCE_RTS_USER", None)
|
|
36
36
|
PERFORMANCE_RTS_HOST = os.getenv("PERFORMANCE_RTS_HOST", "localhost")
|
|
@@ -51,7 +51,7 @@ def get_git_root(path):
|
|
|
51
51
|
|
|
52
52
|
def view_bar_simple(a, b):
|
|
53
53
|
res = a / int(b) * 100
|
|
54
|
-
sys.stdout.write("\r Complete
|
|
54
|
+
sys.stdout.write("\r Complete percent: %.2f %%" % res)
|
|
55
55
|
sys.stdout.flush()
|
|
56
56
|
|
|
57
57
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: redisbench-admin
|
|
3
|
-
Version: 0.10.
|
|
3
|
+
Version: 0.10.22
|
|
4
4
|
Summary: Redis benchmark run helper. A wrapper around Redis and Redis Modules benchmark tools ( ftsb_redisearch, memtier_benchmark, redis-benchmark, aibench, etc... ).
|
|
5
5
|
Author: filipecosta90
|
|
6
6
|
Author-email: filipecosta.90@gmail.com
|
|
@@ -4,7 +4,7 @@ redisbench_admin/commands/__init__.py,sha256=mzVrEtqefFdopyzR-W6xx3How95dyZfToGK
|
|
|
4
4
|
redisbench_admin/commands/commands.json.py,sha256=mzVrEtqefFdopyzR-W6xx3How95dyZfToGKm1-_YzeY,95
|
|
5
5
|
redisbench_admin/compare/__init__.py,sha256=DtBXRp0Q01XgCFmY-1OIePMyyYihVNAjZ1Y8zwqSDN0,101
|
|
6
6
|
redisbench_admin/compare/args.py,sha256=10zbiT8roeTGnAubvoVFZTbJNbVxuRaRPtCKjxV8iNE,5226
|
|
7
|
-
redisbench_admin/compare/compare.py,sha256
|
|
7
|
+
redisbench_admin/compare/compare.py,sha256=-m6yor7d1z3Lv-kOf36G0qt0P9EyoiTqM-ZD0oEhLJ0,37747
|
|
8
8
|
redisbench_admin/deploy/__init__.py,sha256=DtBXRp0Q01XgCFmY-1OIePMyyYihVNAjZ1Y8zwqSDN0,101
|
|
9
9
|
redisbench_admin/deploy/args.py,sha256=neLUcQqI__HkJItkQg2C293hl5g3yHG40t171r7-E5Y,1732
|
|
10
10
|
redisbench_admin/deploy/deploy.py,sha256=c1srxDMaUHuyh6wGdgLqzTz3ljZFtGqiumtAmguVyuk,3791
|
|
@@ -174,7 +174,7 @@ redisbench_admin/run/ann/pkg/templates/summary.html,sha256=Y-jePm8EGM41ensKay18R
|
|
|
174
174
|
redisbench_admin/run/ann/pkg/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
175
175
|
redisbench_admin/run/ann/pkg/test/test-jaccard.py,sha256=oIhaQCQKrQokwv3fvgLSwPlRwkY0MNppG9Fc08oS3ZI,462
|
|
176
176
|
redisbench_admin/run/ann/pkg/test/test-metrics.py,sha256=vJdS8Kuk8bAnpB65Uqb-9rUUI35XrHwaO3cNwKX5gxc,3057
|
|
177
|
-
redisbench_admin/run/args.py,sha256=
|
|
177
|
+
redisbench_admin/run/args.py,sha256=Sspv4eusQs9HMVMIyv_hB9vQ3S_s0lHYGAL1gPb2WGM,7864
|
|
178
178
|
redisbench_admin/run/cluster.py,sha256=QuptSW-IhdyFIoQ3hzY613jtQWxBrVIlgc8OarcEK20,6072
|
|
179
179
|
redisbench_admin/run/common.py,sha256=-TU0ncwCV-RpU7xHiz1W0UJfvxVjm9EJqHUmX5qFAzo,25194
|
|
180
180
|
redisbench_admin/run/ftsb/__init__.py,sha256=DtBXRp0Q01XgCFmY-1OIePMyyYihVNAjZ1Y8zwqSDN0,101
|
|
@@ -205,13 +205,13 @@ redisbench_admin/run_async/log.py,sha256=cD7zfXt0VEmy0b7452HvcAxX_9kVj6Vm213yNdU
|
|
|
205
205
|
redisbench_admin/run_async/render_files.py,sha256=OMPy3-GnU14tQ4HNlF5utOnmzpRAXURwG_h8UDkTmYs,2674
|
|
206
206
|
redisbench_admin/run_async/run_async.py,sha256=g2ZOQqj9vXZYaRyNpJZtgfYyY9tMuRmEv3Hh3qWOUs8,14525
|
|
207
207
|
redisbench_admin/run_local/__init__.py,sha256=DtBXRp0Q01XgCFmY-1OIePMyyYihVNAjZ1Y8zwqSDN0,101
|
|
208
|
-
redisbench_admin/run_local/args.py,sha256=
|
|
208
|
+
redisbench_admin/run_local/args.py,sha256=WrJtKkQeaYkdDScgLPMXIOo6QdTAWh6ziXe-EVPpris,702
|
|
209
209
|
redisbench_admin/run_local/local_client.py,sha256=gwawMDOBrf7m--uyxu8kMZC5LBiLjbUBSKvzVOdOAas,124
|
|
210
|
-
redisbench_admin/run_local/local_db.py,sha256=
|
|
210
|
+
redisbench_admin/run_local/local_db.py,sha256=l2qzR38TmV-tCQwpDJchjFcSAr7gnpW-kkbOi7HRg6Y,7001
|
|
211
211
|
redisbench_admin/run_local/local_helpers.py,sha256=JyqLW2-Sbm35BXjxxfOB1yK7ADdLfcVrq08NLNdIwac,7026
|
|
212
|
-
redisbench_admin/run_local/run_local.py,sha256=
|
|
212
|
+
redisbench_admin/run_local/run_local.py,sha256=Kzq0WjH7OmxD6RQt7RDynDNdhJ2fAa8i_mCtny9cmMQ,23455
|
|
213
213
|
redisbench_admin/run_remote/__init__.py,sha256=DtBXRp0Q01XgCFmY-1OIePMyyYihVNAjZ1Y8zwqSDN0,101
|
|
214
|
-
redisbench_admin/run_remote/args.py,sha256=
|
|
214
|
+
redisbench_admin/run_remote/args.py,sha256=vhV87avBwXL8c2QLqrAkIyWD53MYhN06F-3wRv3l5xE,3829
|
|
215
215
|
redisbench_admin/run_remote/consts.py,sha256=bCMkwyeBD-EmOpoHKni7LjWy5WuaxGJhGhqpi4AL0RQ,386
|
|
216
216
|
redisbench_admin/run_remote/log.py,sha256=cD7zfXt0VEmy0b7452HvcAxX_9kVj6Vm213yNdUHP20,95
|
|
217
217
|
redisbench_admin/run_remote/notifications.py,sha256=-W9fLaftEFNfplBl2clHk37jbYxliDbHftQ62khN31k,2157
|
|
@@ -228,15 +228,15 @@ redisbench_admin/utils/benchmark_config.py,sha256=bvhqKFENdDXpSZvF41EDUwMMhJJ4WQ
|
|
|
228
228
|
redisbench_admin/utils/local.py,sha256=zUvyVI9LZMT3qyxs1pO3mXL6Bt_1z9EZUGppaRcWNRA,3890
|
|
229
229
|
redisbench_admin/utils/redisearch.py,sha256=lchUEzpt0zB1rHwlDlw9LLifAnxFWcLP-PePw7TjL-0,1602
|
|
230
230
|
redisbench_admin/utils/redisgraph_benchmark_go.py,sha256=os7EJt6kBxsFJLKkSoANbjMT7-cEq4-Ns-49alk2Tf8,2048
|
|
231
|
-
redisbench_admin/utils/remote.py,sha256=
|
|
231
|
+
redisbench_admin/utils/remote.py,sha256=sCGQxRdXsAlCBWDBDOGg0wg3buPfmsBpDVE7dibRxqg,38133
|
|
232
232
|
redisbench_admin/utils/results.py,sha256=uKk3uNJ--bSXlUj_HGQ2OaV6MVqmXJVM8xTzFV6EOw4,3267
|
|
233
233
|
redisbench_admin/utils/ssh.py,sha256=QW4AwlocMHJt05QMdN_4f8WeDmxiEwR80ny8VBThq6k,6533
|
|
234
234
|
redisbench_admin/utils/utils.py,sha256=FLDjhGkW0PWwcu_nlTnIW6aZtHzJGz4LIwvu1CpCajw,14160
|
|
235
235
|
redisbench_admin/watchdog/__init__.py,sha256=cD7zfXt0VEmy0b7452HvcAxX_9kVj6Vm213yNdUHP20,95
|
|
236
236
|
redisbench_admin/watchdog/args.py,sha256=nKsG1G6ATOZlAMHMtT9u3kXxduKCbejSZ5x8oB_ynZ8,1312
|
|
237
237
|
redisbench_admin/watchdog/watchdog.py,sha256=jFGtm5ktjKuXKWvH7lnmf3pp-ch1WBJUOomXILJMDAg,6158
|
|
238
|
-
redisbench_admin-0.10.
|
|
239
|
-
redisbench_admin-0.10.
|
|
240
|
-
redisbench_admin-0.10.
|
|
241
|
-
redisbench_admin-0.10.
|
|
242
|
-
redisbench_admin-0.10.
|
|
238
|
+
redisbench_admin-0.10.22.dist-info/LICENSE,sha256=AAMtfs82zOOvmG68vILivm6lxi2rcOlGObmA8jzxQvw,10768
|
|
239
|
+
redisbench_admin-0.10.22.dist-info/entry_points.txt,sha256=UUawXk_AS-PlieKJ1QxPQXGsRLb6OW_F0MtmA1W0KE8,113
|
|
240
|
+
redisbench_admin-0.10.22.dist-info/WHEEL,sha256=vVCvjcmxuUltf8cYhJ0sJMRDLr1XsPuxEId8YDzbyCY,88
|
|
241
|
+
redisbench_admin-0.10.22.dist-info/METADATA,sha256=lEpx709afWMCeTYmbiwjafpBsifuAqqL5eZXVjmq3wI,5336
|
|
242
|
+
redisbench_admin-0.10.22.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|