redis-benchmarks-specification 0.1.215__py3-none-any.whl → 0.1.217__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of redis-benchmarks-specification might be problematic. Click here for more details.

@@ -234,6 +234,7 @@ def builder_process_stream(
234
234
  if b"git_hash" in testDetails:
235
235
  git_hash = testDetails[b"git_hash"]
236
236
  logging.info("Received commit hash specifier {}.".format(git_hash))
237
+ logging.info(f"Received the following build stream: {testDetails}.")
237
238
  binary_zip_key = testDetails[b"zip_archive_key"]
238
239
  logging.info(
239
240
  "Retriving zipped source from key {}.".format(
@@ -271,6 +272,16 @@ def builder_process_stream(
271
272
  if b"tests_groups_regexp" in testDetails:
272
273
  tests_groups_regexp = testDetails[b"tests_groups_regexp"].decode()
273
274
 
275
+ github_org = "redis"
276
+ if b"github_org" in testDetails:
277
+ github_org = testDetails[b"github_org"].decode()
278
+ logging.info(f"detected github_org info on build stream {github_org}")
279
+
280
+ github_repo = "redis"
281
+ if b"github_repo" in testDetails:
282
+ github_repo = testDetails[b"github_repo"].decode()
283
+ logging.info(f"detected github_repo info on build stream {github_repo}")
284
+
274
285
  # github updates
275
286
  is_actionable_pr = False
276
287
  contains_regression_comment = False
@@ -321,6 +332,14 @@ def builder_process_stream(
321
332
  build_artifacts = ["redis-server"]
322
333
  if "build_artifacts" in build_config:
323
334
  build_artifacts = build_config["build_artifacts"]
335
+ if b"build_artifacts" in testDetails:
336
+ new_build_artifacts = (
337
+ testDetails[b"build_artifacts"].decode().split(",")
338
+ )
339
+ logging.info(
340
+ f"overriding default build artifacts {build_artifacts} by {new_build_artifacts}"
341
+ )
342
+ build_artifacts = new_build_artifacts
324
343
  build_vars_str = ""
325
344
  if "env" in build_config:
326
345
  if build_config["env"] is not None:
@@ -343,6 +362,8 @@ def builder_process_stream(
343
362
  "linenoise",
344
363
  "lua",
345
364
  ]
365
+ if "fast_float" in deps_dir:
366
+ deps_list.append("fast_float")
346
367
  if "hdr_histogram" in deps_dir:
347
368
  deps_list.append("hdr_histogram")
348
369
  if "fpconv" in deps_dir:
@@ -359,6 +380,12 @@ def builder_process_stream(
359
380
  "redis-server",
360
381
  build_vars_str,
361
382
  )
383
+ if b"build_command" in testDetails:
384
+ build_command = testDetails[b"build_command"].decode()
385
+ server_name = "redis"
386
+ if b"server_name" in testDetails:
387
+ server_name = testDetails[b"server_name"].decode()
388
+
362
389
  build_start_datetime = datetime.datetime.utcnow()
363
390
  logging.info(
364
391
  "Using the following build command {}.".format(build_command)
@@ -433,6 +460,9 @@ def builder_process_stream(
433
460
  tests_priority_upper_limit,
434
461
  tests_regexp,
435
462
  use_git_timestamp,
463
+ server_name,
464
+ github_org,
465
+ github_repo,
436
466
  )
437
467
  if result is True:
438
468
  benchmark_stream_id = conn.xadd(
@@ -570,6 +600,9 @@ def generate_benchmark_stream_request(
570
600
  tests_priority_upper_limit=10000,
571
601
  tests_regexp=".*",
572
602
  use_git_timestamp=False,
603
+ server_name="redis",
604
+ github_org="redis",
605
+ github_repo="redis",
573
606
  ):
574
607
  build_stream_fields = {
575
608
  "id": id,
@@ -582,6 +615,9 @@ def generate_benchmark_stream_request(
582
615
  "tests_priority_upper_limit": tests_priority_upper_limit,
583
616
  "tests_priority_lower_limit": tests_priority_lower_limit,
584
617
  "tests_groups_regexp": tests_groups_regexp,
618
+ "server_name": server_name,
619
+ "github_org": github_org,
620
+ "github_repo": github_repo,
585
621
  }
586
622
  if build_config_metadata is not None:
587
623
  build_stream_fields["metadata"] = json.dumps(build_config_metadata)
@@ -592,6 +628,7 @@ def generate_benchmark_stream_request(
592
628
  if build_vars_str is not None:
593
629
  build_stream_fields["build_vars"] = build_vars_str
594
630
  if build_command is not None:
631
+ logging.info(f"adding build_command: {build_command}")
595
632
  build_stream_fields["build_command"] = build_command
596
633
  if build_image is not None:
597
634
  build_stream_fields["build_image"] = build_image
@@ -166,6 +166,21 @@ def spec_cli_args(parser):
166
166
  action="store_true",
167
167
  help="Iterate over the git commits.",
168
168
  )
169
+ parser.add_argument(
170
+ "--build_artifacts",
171
+ type=str,
172
+ default="",
173
+ )
174
+ parser.add_argument(
175
+ "--build_command",
176
+ type=str,
177
+ default="",
178
+ )
179
+ parser.add_argument(
180
+ "--git_hash",
181
+ type=str,
182
+ default="",
183
+ )
169
184
  parser.add_argument(
170
185
  "--dry-run",
171
186
  default=False,
@@ -87,6 +87,23 @@ def trigger_tests_dockerhub_cli_command_logic(args, project_name, project_versio
87
87
  args.build_arch,
88
88
  testDetails,
89
89
  "n/a",
90
+ [],
91
+ None,
92
+ None,
93
+ None,
94
+ None,
95
+ None,
96
+ None,
97
+ None,
98
+ None,
99
+ None,
100
+ None,
101
+ None,
102
+ None,
103
+ ".*",
104
+ 0,
105
+ 10000,
106
+ args.tests_regexp,
90
107
  )
91
108
  build_stream_fields["github_repo"] = args.gh_repo
92
109
  build_stream_fields["github_org"] = args.gh_org
@@ -338,6 +355,14 @@ def trigger_tests_cli_command_logic(args, project_name, project_version):
338
355
  filtered_hash_commits = []
339
356
  for cdict in commits:
340
357
  commit_hash = cdict["git_hash"]
358
+ if args.git_hash != "":
359
+ if args.git_hash != commit_hash:
360
+ logging.info(
361
+ "Skipping {} given it does not match commit hash {}".format(
362
+ commit_hash, args.git_hash
363
+ )
364
+ )
365
+ continue
341
366
  commit_summary = cdict["commit_summary"]
342
367
  commit_datetime = cdict["commit_datetime"]
343
368
  match_obj = re.search(hash_regexp_string, commit_hash)
@@ -395,6 +420,14 @@ def trigger_tests_cli_command_logic(args, project_name, project_version):
395
420
  commit_dict["tests_priority_lower_limit"] = tests_priority_lower_limit
396
421
  commit_dict["tests_regexp"] = tests_regexp
397
422
  commit_dict["tests_groups_regexp"] = tests_groups_regexp
423
+ commit_dict["github_org"] = args.gh_org
424
+ commit_dict["github_repo"] = args.gh_repo
425
+ if args.server_name is not None and args.server_name != "":
426
+ commit_dict["server_name"] = args.server_name
427
+ if args.build_artifacts != "":
428
+ commit_dict["build_artifacts"] = args.build_artifacts
429
+ if args.build_command != "":
430
+ commit_dict["build_command"] = args.build_command
398
431
  if pull_request is not None:
399
432
  logging.info(
400
433
  f"Have a pull request info to include in build request {pull_request}"
@@ -150,7 +150,7 @@ def exporter_datasink_common(
150
150
  git_hash=None,
151
151
  ):
152
152
  logging.info(
153
- f"Using datapoint_time_ms: {datapoint_time_ms}. git_has={git_hash}, git_branch={git_branch}, git_version={git_version}"
153
+ f"Using datapoint_time_ms: {datapoint_time_ms}. git_hash={git_hash}, git_branch={git_branch}, git_version={git_version}. gh_org={tf_github_org}, gh_repo={tf_github_repo}"
154
154
  )
155
155
  timeseries_test_sucess_flow(
156
156
  datasink_push_results_redistimeseries,
@@ -20,7 +20,7 @@ def generate_standalone_redis_server_args(
20
20
  "{}".format(port),
21
21
  ]
22
22
  if dbdir != "":
23
- command.extend(["--dbdir", dbdir])
23
+ command.extend(["--dir", dbdir])
24
24
  if configuration_parameters is not None:
25
25
  for parameter, parameter_value in configuration_parameters.items():
26
26
  if parameter not in added_params:
@@ -547,6 +547,11 @@ def process_self_contained_coordinator_stream(
547
547
  logging.info(
548
548
  f"detected a server_name definition on the streamdata: {server_name}."
549
549
  )
550
+ new_executable = f"{mnt_point}{server_name}-server"
551
+ logging.info(
552
+ "changing executable from {executable} to {new_executable}"
553
+ )
554
+ executable = new_executable
550
555
 
551
556
  if b"restore_build_artifacts" in testDetails:
552
557
  restore_build_artifacts = bool(
@@ -832,39 +837,16 @@ def process_self_contained_coordinator_stream(
832
837
  db_cpuset_cpus, current_cpu_pos = generate_cpuset_cpus(
833
838
  ceil_db_cpu_limit, current_cpu_pos
834
839
  )
835
- logging.info(
836
- "Running redis-server on docker image {} (cpuset={}) with the following args: {}".format(
837
- run_image, db_cpuset_cpus, command_str
838
- )
839
- )
840
- volumes = {}
841
- working_dir = "/"
842
- if mnt_point != "":
843
- volumes = {
844
- temporary_dir: {
845
- "bind": mnt_point,
846
- "mode": "rw",
847
- },
848
- }
849
- working_dir = mnt_point
850
- redis_container = docker_client.containers.run(
851
- image=run_image,
852
- volumes=volumes,
853
- auto_remove=True,
854
- privileged=True,
855
- working_dir=mnt_point,
856
- command=command_str,
857
- network_mode="host",
858
- detach=True,
859
- cpuset_cpus=db_cpuset_cpus,
860
- pid_mode="host",
861
- publish_all_ports=True,
840
+ redis_container = start_redis_container(
841
+ command_str,
842
+ db_cpuset_cpus,
843
+ docker_client,
844
+ mnt_point,
845
+ redis_containers,
846
+ run_image,
847
+ temporary_dir,
862
848
  )
863
849
 
864
- time.sleep(5)
865
-
866
- redis_containers.append(redis_container)
867
-
868
850
  r = redis.StrictRedis(port=redis_proc_start_port)
869
851
  r.ping()
870
852
  redis_conns = [r]
@@ -1251,9 +1233,10 @@ def process_self_contained_coordinator_stream(
1251
1233
  stdout=True, stderr=True
1252
1234
  )
1253
1235
  )
1236
+ redis_container.remove()
1254
1237
  except docker.errors.NotFound:
1255
1238
  logging.info(
1256
- "When trying to stop DB container with id {} and image {} it was already stopped".format(
1239
+ "When trying to fetch logs from DB container with id {} and image {} it was already stopped".format(
1257
1240
  redis_container.id,
1258
1241
  redis_container.image,
1259
1242
  )
@@ -1269,6 +1252,7 @@ def process_self_contained_coordinator_stream(
1269
1252
  for redis_container in redis_containers:
1270
1253
  try:
1271
1254
  redis_container.stop()
1255
+ redis_container.remove()
1272
1256
  except docker.errors.NotFound:
1273
1257
  logging.info(
1274
1258
  "When trying to stop DB container with id {} and image {} it was already stopped".format(
@@ -1282,6 +1266,7 @@ def process_self_contained_coordinator_stream(
1282
1266
  if type(redis_container) == Container:
1283
1267
  try:
1284
1268
  redis_container.stop()
1269
+ redis_container.remove()
1285
1270
  except docker.errors.NotFound:
1286
1271
  logging.info(
1287
1272
  "When trying to stop Client container with id {} and image {} it was already stopped".format(
@@ -1295,6 +1280,7 @@ def process_self_contained_coordinator_stream(
1295
1280
  temporary_dir, temporary_dir_client
1296
1281
  )
1297
1282
  )
1283
+
1298
1284
  shutil.rmtree(temporary_dir, ignore_errors=True)
1299
1285
  shutil.rmtree(temporary_dir_client, ignore_errors=True)
1300
1286
 
@@ -1475,6 +1461,50 @@ def process_self_contained_coordinator_stream(
1475
1461
  return stream_id, overall_result, total_test_suite_runs
1476
1462
 
1477
1463
 
1464
+ def start_redis_container(
1465
+ command_str,
1466
+ db_cpuset_cpus,
1467
+ docker_client,
1468
+ mnt_point,
1469
+ redis_containers,
1470
+ run_image,
1471
+ temporary_dir,
1472
+ auto_remove=False,
1473
+ ):
1474
+ logging.info(
1475
+ "Running redis-server on docker image {} (cpuset={}) with the following args: {}".format(
1476
+ run_image, db_cpuset_cpus, command_str
1477
+ )
1478
+ )
1479
+ volumes = {}
1480
+ working_dir = "/"
1481
+ if mnt_point != "":
1482
+ volumes = {
1483
+ temporary_dir: {
1484
+ "bind": mnt_point,
1485
+ "mode": "rw",
1486
+ },
1487
+ }
1488
+ logging.info(f"setting volume as follow: {volumes}. working_dir={mnt_point}")
1489
+ working_dir = mnt_point
1490
+ redis_container = docker_client.containers.run(
1491
+ image=run_image,
1492
+ volumes=volumes,
1493
+ auto_remove=auto_remove,
1494
+ privileged=True,
1495
+ working_dir=mnt_point,
1496
+ command=command_str,
1497
+ network_mode="host",
1498
+ detach=True,
1499
+ cpuset_cpus=db_cpuset_cpus,
1500
+ pid_mode="host",
1501
+ publish_all_ports=True,
1502
+ )
1503
+ time.sleep(5)
1504
+ redis_containers.append(redis_container)
1505
+ return redis_container
1506
+
1507
+
1478
1508
  def filter_test_files(
1479
1509
  defaults_filename,
1480
1510
  priority_lower_limit,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: redis-benchmarks-specification
3
- Version: 0.1.215
3
+ Version: 0.1.217
4
4
  Summary: The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute.
5
5
  Author: filipecosta90
6
6
  Author-email: filipecosta.90@gmail.com
@@ -4,18 +4,18 @@ redis_benchmarks_specification/__api__/api.py,sha256=k_CMICtMm1z8jY3hByaL0hIr_5v
4
4
  redis_benchmarks_specification/__api__/app.py,sha256=JzQm84DjIVdfLbDO423BJbrds6gFzMbA0syRkHE_aUU,7063
5
5
  redis_benchmarks_specification/__builder__/Readme.md,sha256=O6MV_J3OSgzW-ir2TbukP8Vhkm_LOzQJJndG1Cykqic,111
6
6
  redis_benchmarks_specification/__builder__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
7
- redis_benchmarks_specification/__builder__/builder.py,sha256=7y_dey7EKKF50Dxk2WOlEnAPhO564TkgHr9uXSw8aeA,26081
7
+ redis_benchmarks_specification/__builder__/builder.py,sha256=T5dTB_qgX1FdPA_qQfFTHtnBL9O6w6TSI5YCKNJrxKM,27769
8
8
  redis_benchmarks_specification/__builder__/schema.py,sha256=1wcmyVJBcWrBvK58pghN9NCoWLCO3BzPsmdKWYfkVog,584
9
9
  redis_benchmarks_specification/__cli__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
10
- redis_benchmarks_specification/__cli__/args.py,sha256=ggjDVunIe8LbJ8QGg6Td2ZRgD8qUjX_xCOf4cP780k8,6728
11
- redis_benchmarks_specification/__cli__/cli.py,sha256=wMRlIa8t2hbQabtq3sYqBYGBGfxgvqhaHRsacHlRTqk,20105
10
+ redis_benchmarks_specification/__cli__/args.py,sha256=9uP9p2hqxr60k0XjpDl0TS22d3bHYdxsVZdgrgf5dg0,7013
11
+ redis_benchmarks_specification/__cli__/cli.py,sha256=vswKHtFCvWoxSW9bgehqjMTSfhtC8D5rwZ9w_OhtgU4,21116
12
12
  redis_benchmarks_specification/__cli__/stats.py,sha256=wahzZRbpfokv8dQU8O4BH5JFrOZk-l6k8LWdKfue9_0,20204
13
13
  redis_benchmarks_specification/__common__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
14
  redis_benchmarks_specification/__common__/builder_schema.py,sha256=oU6zFrBsmPRgCr2c1zf7bM0o9bCyuqUpooJo0-nIHrE,5747
15
15
  redis_benchmarks_specification/__common__/env.py,sha256=kvJ8Ll-fvI_Tc0vynrzUEr22TqnJizzvJ4Lu9RjNr_M,3119
16
16
  redis_benchmarks_specification/__common__/github.py,sha256=QEHG05DgvJXkJPMMprLa7E1fwn3PrizfR-_N3_Ff93Q,10398
17
17
  redis_benchmarks_specification/__common__/package.py,sha256=4uVt1BAZ999LV2rZkq--Tk6otAVIf9YR3g3KGeUpiW4,834
18
- redis_benchmarks_specification/__common__/runner.py,sha256=24OIOLdnFYAzJ2mU_pbm4nc8wDJhaP-3tEIpTuDGpYs,6827
18
+ redis_benchmarks_specification/__common__/runner.py,sha256=7DBI09eu_4RibK6MwcYyIsOI0f-gmv7I-v9OOm2Y5QY,6878
19
19
  redis_benchmarks_specification/__common__/spec.py,sha256=eTF5559epBB0FrJPx-jRDQVeP_ZVOgyC7Vjxr2xk6fo,3262
20
20
  redis_benchmarks_specification/__common__/timeseries.py,sha256=_LJFtC5sVP7DTaLZaIzv5g7wRxPTQZRwFIYvWX4p4N8,50533
21
21
  redis_benchmarks_specification/__compare__/__init__.py,sha256=DtBXRp0Q01XgCFmY-1OIePMyyYihVNAjZ1Y8zwqSDN0,101
@@ -31,10 +31,10 @@ redis_benchmarks_specification/__self_contained_coordinator__/artifacts.py,sha25
31
31
  redis_benchmarks_specification/__self_contained_coordinator__/build_info.py,sha256=vlg8H8Rxu2falW8xp1GvL1SV1fyBguSbz6Apxc7A2yM,2282
32
32
  redis_benchmarks_specification/__self_contained_coordinator__/clients.py,sha256=voL6zP3RenpZ1A7JKGVkvEWVXI9KYwmnSgVJr6l8o-4,710
33
33
  redis_benchmarks_specification/__self_contained_coordinator__/cpuset.py,sha256=sRvtoJIitppcOpm3R5LbVmSfPEAqPumOqVATnF5Wbek,594
34
- redis_benchmarks_specification/__self_contained_coordinator__/docker.py,sha256=anBPdVDWYKGz2ladfdcP4dBDwcjCYhvOp0By2mbMTOI,2759
34
+ redis_benchmarks_specification/__self_contained_coordinator__/docker.py,sha256=Alf9Y1dfuOMoD4u_Dv3jTodkwZfSrlo2_YceevaWNFo,2757
35
35
  redis_benchmarks_specification/__self_contained_coordinator__/prepopulation.py,sha256=qB1rwqkROfuyFotB7MfUQiYS4Gzafd8dd2ca7lT4l2I,2909
36
36
  redis_benchmarks_specification/__self_contained_coordinator__/runners.py,sha256=FqVVvbXPsmq2I7pSH-JEklb1SDRdS7rG6ZHc6xTCUE0,28611
37
- redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=BWRc4tRAI769oNVcug_TCjNoRIfjhWM9AftpZym7aeE,74364
37
+ redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py,sha256=xyUex4kGWFP7vWtdrPQF92Pcs2ujCsTTPmtvTUMWFPg,74733
38
38
  redis_benchmarks_specification/__setups__/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
39
  redis_benchmarks_specification/__setups__/topologies.py,sha256=xQ1IJkcTji_ZjLiJd3vOxZpvbNtBLZw9cPkw5hGJKHU,481
40
40
  redis_benchmarks_specification/__spec__/__init__.py,sha256=l-G1z-t6twUgi8QLueqoTQLvJmv3hJoEYskGm6H7L6M,83
@@ -152,8 +152,8 @@ redis_benchmarks_specification/test-suites/memtier_benchmark-2keys-stream-5-entr
152
152
  redis_benchmarks_specification/test-suites/memtier_benchmark-2keys-stream-5-entries-xread-all-entries.yml,sha256=Z6T75dIbjRb4YO1tFIV9K4S_KFzRHfAa4q3kOg0vcHw,1112
153
153
  redis_benchmarks_specification/test-suites/memtier_benchmark-3Mkeys-load-string-with-512B-values.yml,sha256=XAIFlbR6VJnmQRwedLGBGenbIsMC_I3uA35Mz_bkTTc,1028
154
154
  redis_benchmarks_specification/test-suites/template.txt,sha256=d_edIE7Sxa5X7I2yG-Io0bPdbDIHR0oWFoCA3XUt_EU,435
155
- redis_benchmarks_specification-0.1.215.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
156
- redis_benchmarks_specification-0.1.215.dist-info/METADATA,sha256=1zud-90JXXG-pP3YUG1wfNkI-9aLAIbQz24Mex24ELg,22726
157
- redis_benchmarks_specification-0.1.215.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
158
- redis_benchmarks_specification-0.1.215.dist-info/entry_points.txt,sha256=x5WBXCZsnDRTZxV7SBGmC65L2k-ygdDOxV8vuKN00Nk,715
159
- redis_benchmarks_specification-0.1.215.dist-info/RECORD,,
155
+ redis_benchmarks_specification-0.1.217.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
156
+ redis_benchmarks_specification-0.1.217.dist-info/METADATA,sha256=Xl-aBoQ5ko_6URB4Keum16X-QCAd18fRcytsWRoaM9k,22726
157
+ redis_benchmarks_specification-0.1.217.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
158
+ redis_benchmarks_specification-0.1.217.dist-info/entry_points.txt,sha256=x5WBXCZsnDRTZxV7SBGmC65L2k-ygdDOxV8vuKN00Nk,715
159
+ redis_benchmarks_specification-0.1.217.dist-info/RECORD,,