redis-benchmarks-specification 0.2.42__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- redis_benchmarks_specification/__api__/Readme.md +7 -0
- redis_benchmarks_specification/__api__/__init__.py +5 -0
- redis_benchmarks_specification/__api__/api.py +87 -0
- redis_benchmarks_specification/__api__/app.py +191 -0
- redis_benchmarks_specification/__builder__/Readme.md +7 -0
- redis_benchmarks_specification/__builder__/__init__.py +5 -0
- redis_benchmarks_specification/__builder__/builder.py +1010 -0
- redis_benchmarks_specification/__builder__/schema.py +23 -0
- redis_benchmarks_specification/__cli__/__init__.py +5 -0
- redis_benchmarks_specification/__cli__/args.py +226 -0
- redis_benchmarks_specification/__cli__/cli.py +624 -0
- redis_benchmarks_specification/__cli__/stats.py +1304 -0
- redis_benchmarks_specification/__common__/__init__.py +0 -0
- redis_benchmarks_specification/__common__/builder_schema.py +256 -0
- redis_benchmarks_specification/__common__/env.py +96 -0
- redis_benchmarks_specification/__common__/github.py +280 -0
- redis_benchmarks_specification/__common__/package.py +28 -0
- redis_benchmarks_specification/__common__/runner.py +485 -0
- redis_benchmarks_specification/__common__/spec.py +143 -0
- redis_benchmarks_specification/__common__/suppress_warnings.py +20 -0
- redis_benchmarks_specification/__common__/timeseries.py +1621 -0
- redis_benchmarks_specification/__compare__/__init__.py +5 -0
- redis_benchmarks_specification/__compare__/args.py +240 -0
- redis_benchmarks_specification/__compare__/compare.py +3322 -0
- redis_benchmarks_specification/__init__.py +15 -0
- redis_benchmarks_specification/__runner__/__init__.py +5 -0
- redis_benchmarks_specification/__runner__/args.py +334 -0
- redis_benchmarks_specification/__runner__/remote_profiling.py +535 -0
- redis_benchmarks_specification/__runner__/runner.py +3837 -0
- redis_benchmarks_specification/__self_contained_coordinator__/__init__.py +5 -0
- redis_benchmarks_specification/__self_contained_coordinator__/args.py +210 -0
- redis_benchmarks_specification/__self_contained_coordinator__/artifacts.py +27 -0
- redis_benchmarks_specification/__self_contained_coordinator__/build_info.py +61 -0
- redis_benchmarks_specification/__self_contained_coordinator__/clients.py +58 -0
- redis_benchmarks_specification/__self_contained_coordinator__/cpuset.py +17 -0
- redis_benchmarks_specification/__self_contained_coordinator__/docker.py +108 -0
- redis_benchmarks_specification/__self_contained_coordinator__/post_processing.py +19 -0
- redis_benchmarks_specification/__self_contained_coordinator__/prepopulation.py +96 -0
- redis_benchmarks_specification/__self_contained_coordinator__/runners.py +740 -0
- redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py +2554 -0
- redis_benchmarks_specification/__setups__/__init__.py +0 -0
- redis_benchmarks_specification/__setups__/topologies.py +17 -0
- redis_benchmarks_specification/__spec__/__init__.py +5 -0
- redis_benchmarks_specification/__spec__/args.py +78 -0
- redis_benchmarks_specification/__spec__/cli.py +259 -0
- redis_benchmarks_specification/__watchdog__/__init__.py +5 -0
- redis_benchmarks_specification/__watchdog__/args.py +54 -0
- redis_benchmarks_specification/__watchdog__/watchdog.py +175 -0
- redis_benchmarks_specification/commands/__init__.py +0 -0
- redis_benchmarks_specification/commands/commands.py +15 -0
- redis_benchmarks_specification/setups/builders/gcc:15.2.0-amd64-debian-bookworm-default.yml +20 -0
- redis_benchmarks_specification/setups/builders/gcc:15.2.0-arm64-debian-bookworm-default.yml +20 -0
- redis_benchmarks_specification/setups/platforms/aws-ec2-1node-c5.4xlarge.yml +27 -0
- redis_benchmarks_specification/setups/topologies/topologies.yml +153 -0
- redis_benchmarks_specification/test-suites/defaults.yml +32 -0
- redis_benchmarks_specification/test-suites/generate.py +114 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-hash-hexpire-5-fields-10B-values.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-hash-hexpire-50-fields-10B-values.yml +53 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-hash-hexpireat-5-fields-10B-values.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-hash-hexpireat-50-fields-10B-values.yml +53 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-hash-hgetall-50-fields-100B-values.yml +52 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-hash-hgetex-5-fields-10B-values.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-hash-hgetex-50-fields-10B-values.yml +53 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-hash-hgetex-persist-50-fields-10B-values.yml +53 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-hash-hpexpire-5-fields-10B-values.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-hash-hpexpire-50-fields-10B-values.yml +53 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-hash-hpexpireat-5-fields-10B-values.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-hash-hpexpireat-50-fields-10B-values.yml +53 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-hash-htll-50-fields-10B-values.yml +53 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-1-fields-with-1000B-values-expiration.yml +35 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-1-fields-with-10B-values-expiration.yml +34 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-1-fields-with-10B-values-long-expiration.yml +35 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-1-fields-with-10B-values-short-expiration.yml +35 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-20-fields-with-1B-values-pipeline-30.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-5-fields-with-1000B-values-expiration.yml +36 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-5-fields-with-10B-values-expiration.yml +35 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-5-fields-with-10B-values-long-expiration.yml +36 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-5-fields-with-10B-values-short-expiration.yml +36 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-50-fields-with-1000B-values-expiration.yml +45 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-50-fields-with-1000B-values.yml +44 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-50-fields-with-100B-values.yml +44 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-50-fields-with-10B-values-expiration.yml +44 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-50-fields-with-10B-values-long-expiration.yml +45 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-50-fields-with-10B-values-short-expiration.yml +45 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-100Kkeys-load-hash-50-fields-with-10B-values.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-10Kkeys-load-hash-50-fields-with-10000B-values.yml +44 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-10Kkeys-load-list-rpush-bulkload-pipeline-50.yml +39 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-10Kkeys-load-list-with-10B-values-pipeline-50.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-100B-values-pipeline-10.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-100B-values.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-10B-values-pipeline-10.yml +34 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-load-hash-5-fields-with-10B-values.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-10Mkeys-string-get-10B-pipeline-100-nokeyprefix.yml +38 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Kkeys-hash-listpack-500-fields-update-20-fields-with-1B-to-64B-values.yml +75 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-100B-expire-use-case.yml +50 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-10B-expire-use-case.yml +50 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-10B-psetex-expire-use-case.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-10B-setex-expire-use-case.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-1KiB-expire-use-case.yml +49 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-4KiB-expire-use-case.yml +50 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-bitmap-getbit-pipeline-10.yml +42 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-generic-exists-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-generic-expire-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-generic-expireat-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-generic-pexpire-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-generic-scan-count-500-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-generic-scan-cursor-count-500-pipeline-10.yml +42 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-generic-scan-cursor-count-5000-pipeline-10.yml +42 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-generic-scan-cursor-pipeline-10.yml +42 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-generic-scan-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-generic-scan-type-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-generic-touch-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-generic-ttl-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-hash-hexists.yml +45 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-hash-hget-hgetall-hkeys-hvals-with-100B-values.yml +48 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-hash-hgetall-50-fields-10B-values.yml +53 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-hash-hincrby.yml +42 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-hash-hincrbyfloat.yml +42 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-hash-hkeys-10-fields-with-10B-values-with-expiration-pipeline-10.yml +45 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-hash-hkeys-5-fields-with-100B-values-with-expiration-pipeline-10.yml +44 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-hash-hkeys-5-fields-with-10B-values-with-expiration-pipeline-10.yml +44 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-hash-hkeys-50-fields-with-10B-values-with-expiration-pipeline-10.yml +54 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-hash-hmget-5-fields-with-100B-values-pipeline-10.yml +44 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-hash-transactions-multi-exec-pipeline-20.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-list-lpop-rpop-with-100B-values.yml +44 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-list-lpop-rpop-with-10B-values.yml +44 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-list-lpop-rpop-with-1KiB-values.yml +44 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-list-rpoplpush-with-10B-values.yml +42 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-hash-5-fields-with-1000B-values-pipeline-10.yml +34 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-hash-5-fields-with-1000B-values.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-hash-50-fields-with-10B-values-long-expiration-pipeline-10.yml +46 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-hash-hmset-5-fields-with-1000B-values.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-list-rpush-with-10B-values.yml +32 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-list-with-100B-values.yml +32 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-list-with-10B-values-pipeline-10.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-list-with-10B-values.yml +32 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-list-with-1KiB-values.yml +32 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-set-intset-with-100-elements-19-digits-pipeline-10.yml +58 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-set-intset-with-100-elements-19-digits.yml +58 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-set-intset-with-100-elements-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-set-intset-with-100-elements.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-stream-1-fields-with-100B-values-pipeline-10.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-stream-1-fields-with-100B-values.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-stream-5-fields-with-100B-values-pipeline-10.yml +34 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-stream-5-fields-with-100B-values.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-100B-values-pipeline-10.yml +32 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-100B-values.yml +35 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-10B-values-pipeline-10.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-10B-values-pipeline-100-nokeyprefix.yml +29 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-10B-values-pipeline-100.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-10B-values-pipeline-50.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-10B-values-pipeline-500.yml +33 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-10B-values.yml +32 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-1KiB-values-pipeline-10.yml +32 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-1KiB-values.yml +32 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-string-with-20KiB-values.yml +35 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-zset-listpack-with-100-elements-double-score.yml +91 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-zset-with-10-elements-double-score.yml +35 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-load-zset-with-10-elements-int-score.yml +34 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-append-1-100B-pipeline-10.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-append-1-100B.yml +42 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-decr.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-get-100B-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-get-100B.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-get-10B-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-get-10B-pipeline-100-nokeyprefix.yml +38 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-get-10B-pipeline-100.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-get-10B-pipeline-50.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-get-10B-pipeline-500.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-get-10B.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-get-1KiB-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-get-1KiB.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-get-32B-pipeline-10.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-get-32B.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-incr-pipeline-10.yml +30 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-incrby-pipeline-10.yml +30 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-incrby.yml +30 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-incrbyfloat-pipeline-10.yml +30 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-incrbyfloat.yml +30 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-int-encoding-strlen-pipeline-10.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-mget-1KiB.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-mixed-50-50-set-get-100B-expire-pipeline-10.yml +45 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-mixed-50-50-set-get-100B-expire.yml +45 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-mixed-50-50-set-get-100B-pipeline-10.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-mixed-50-50-set-get-100B.yml +42 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-mixed-50-50-set-get-1KB-pipeline-10.yml +42 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-mixed-50-50-set-get-1KB.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-mixed-50-50-set-get-32B-pipeline-10.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-mixed-50-50-set-get-32B.yml +42 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-mixed-50-50-set-get-512B-pipeline-10.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-mixed-50-50-set-get-512B.yml +42 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-mixed-50-50-set-get-with-expiration-240B-400_conns.yml +47 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-set-with-ex-100B-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-setex-100B-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-setget200c-1KiB-pipeline-1.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-setget200c-1KiB-pipeline-10.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-setget200c-4KiB-pipeline-1.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-setget200c-4KiB-pipeline-10.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-setget200c-512B-pipeline-1.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-setget200c-512B-pipeline-10.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-setrange-100B-pipeline-10.yml +42 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1Mkeys-string-setrange-100B.yml +42 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-100M-bits-bitmap-bitcount.yml +45 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-1Billion-bits-bitmap-bitcount.yml +45 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-geo-2-elements-geopos.yml +38 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-geo-2-elements-geosearch-fromlonlat-withcoord.yml +39 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-geo-60M-elements-geodist-pipeline-10.yml +36 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-geo-60M-elements-geodist.yml +36 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-geo-60M-elements-geohash-pipeline-10.yml +35 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-geo-60M-elements-geohash.yml +34 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-geo-60M-elements-geopos-pipeline-10.yml +35 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-geo-60M-elements-geopos.yml +34 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-geo-60M-elements-geosearch-fromlonlat-bybox.yml +36 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-geo-60M-elements-geosearch-fromlonlat-pipeline-10.yml +36 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-geo-60M-elements-geosearch-fromlonlat.yml +36 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-hash-1K-fields-hgetall-pipeline-10.yml +285 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-hash-1K-fields-hgetall.yml +284 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-hash-hscan-1K-fields-100B-values-cursor-count-1000.yml +291 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-hash-hscan-1K-fields-10B-values-cursor-count-100.yml +291 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-hash-hscan-1K-fields-10B-values.yml +290 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-hash-hscan-50-fields-10B-values.yml +54 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-10-elements-lrange-all-elements-pipeline-10.yml +37 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-10-elements-lrange-all-elements.yml +36 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-100-elements-int-7bit-uint-lrange-all-elements-pipeline-10.yml +44 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-100-elements-int-lrange-all-elements-pipeline-10.yml +52 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-100-elements-llen-pipeline-10.yml +52 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-100-elements-lrange-all-elements-pipeline-10.yml +52 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-100-elements-lrange-all-elements.yml +51 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-10K-elements-lindex-integer.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-10K-elements-lindex-string-pipeline-10.yml +42 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-10K-elements-lindex-string.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-10K-elements-linsert-lrem-integer.yml +45 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-10K-elements-linsert-lrem-string.yml +45 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-10K-elements-lpos-integer.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-10K-elements-lpos-string.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-1K-elements-lrange-all-elements-pipeline-10.yml +202 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-1K-elements-lrange-all-elements.yml +201 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-list-2K-elements-quicklist-lrange-all-elements-longs.yml +258 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-load-hash-1K-fields-with-5B-values.yml +282 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-load-zset-with-5-elements-parsing-float-score.yml +36 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-load-zset-with-5-elements-parsing-hexa-score.yml +36 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-pfadd-4KB-values-pipeline-10.yml +32 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-10-elements-smembers-pipeline-10.yml +37 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-10-elements-smembers.yml +36 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-10-elements-smismember.yml +38 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-100-elements-sismember-is-a-member.yml +53 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-100-elements-sismember-not-a-member.yml +53 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-100-elements-smembers.yml +50 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-100-elements-smismember.yml +54 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-100-elements-sscan.yml +50 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-10M-elements-sismember-50pct-chance.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-10M-elements-srem-50pct-chance.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-1K-elements-smembers.yml +200 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-1K-elements-sscan-cursor-count-100.yml +201 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-1K-elements-sscan.yml +200 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-1M-elements-sismember-50pct-chance.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-200K-elements-sadd-constant.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-set-2M-elements-sadd-increasing.yml +32 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zincrby-1M-elements-pipeline-1.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zrank-100K-elements-pipeline-1.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zrank-10M-elements-pipeline-1.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zrank-1M-elements-pipeline-1.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zrem-5M-elements-pipeline-1.yml +47 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zrevrangebyscore-256K-elements-pipeline-1.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zrevrangebyscore-256K-elements-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zrevrank-1M-elements-pipeline-1.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-10-elements-zrange-all-elements-long-scores.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-10-elements-zrange-all-elements.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-100-elements-zrange-all-elements.yml +66 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-100-elements-zrangebyscore-all-elements-long-scores.yml +66 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-100-elements-zrangebyscore-all-elements.yml +66 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-100-elements-zscan.yml +65 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-1K-elements-zrange-all-elements.yml +322 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-1K-elements-zscan.yml +321 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-1M-elements-zcard-pipeline-10.yml +39 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-1M-elements-zremrangebyscore-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-1M-elements-zrevrange-5-elements.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-1M-elements-zrevrange-withscores-5-elements-pipeline-10.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-1M-elements-zscore-pipeline-10.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-600K-elements-zrangestore-1K-elements.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-600K-elements-zrangestore-300K-elements.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-1key-zset-listpack-zrank-100-elements-pipeline-1.yml +50 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-2keys-lua-eval-hset-expire.yml +37 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-2keys-lua-evalsha-hset-expire.yml +41 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-2keys-set-10-100-elements-sdiff.yml +57 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-2keys-set-10-100-elements-sinter.yml +57 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-2keys-set-10-100-elements-sunion.yml +57 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-2keys-stream-5-entries-xread-all-entries-pipeline-10.yml +46 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-2keys-stream-5-entries-xread-all-entries.yml +46 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-2keys-zset-300-elements-skiplist-encoded-zunion.yml +434 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-2keys-zset-300-elements-skiplist-encoded-zunionstore.yml +434 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-3Mkeys-load-string-with-512B-values-pipeline-10.yml +37 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-3Mkeys-load-string-with-512B-values.yml +37 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-3Mkeys-string-get-with-1KiB-values-400_conns.yml +45 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-3Mkeys-string-get-with-1KiB-values-40_conns.yml +45 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-3Mkeys-string-get-with-1KiB-values-pipeline-10-2000_conns.yml +46 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-3Mkeys-string-get-with-1KiB-values-pipeline-10-400_conns.yml +46 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-3Mkeys-string-get-with-1KiB-values-pipeline-10-40_conns.yml +46 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-3Mkeys-string-mixed-20-80-with-512B-values-400_conns.yml +45 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-3Mkeys-string-mixed-20-80-with-512B-values-pipeline-10-2000_conns.yml +46 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-3Mkeys-string-mixed-20-80-with-512B-values-pipeline-10-400_conns.yml +46 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-3Mkeys-string-mixed-20-80-with-512B-values-pipeline-10-5200_conns.yml +46 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-3Mkeys-string-mixed-50-50-with-512B-values-with-expiration-pipeline-10-400_conns.yml +43 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-connection-hello-pipeline-10.yml +32 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-connection-hello.yml +32 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-multiple-hll-pfcount-100B-values.yml +34 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-multiple-hll-pfmerge-100B-values.yml +34 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-nokeys-connection-ping-pipeline-10.yml +29 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-nokeys-pubsub-mixed-100-channels-128B-100-publishers-100-subscribers.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-nokeys-pubsub-mixed-100-channels-128B-100-publishers-1000-subscribers.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-nokeys-pubsub-mixed-100-channels-128B-100-publishers-5000-subscribers.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-nokeys-pubsub-mixed-100-channels-128B-100-publishers-50K-subscribers-5k-conns.yml +40 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-nokeys-pubsub-publish-1K-channels-10B-no-subscribers.yml +30 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-nokeys-server-time-pipeline-10.yml +29 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-leaderboard-top-10.yml +68 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-leaderboard-top-100.yml +69 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-leaderboard-top-1000.yml +68 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-rate-limiting-lua-100k-sessions.yml +64 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-realtime-analytics-membership-pipeline-10.yml +56 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-realtime-analytics-membership.yml +56 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-caching-hash-100k-sessions.yml +108 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-caching-json-100k-sessions.yml +109 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-caching-string-100k-sessions.yml +98 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-storage-100k-sessions.yml +205 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-playbook-session-storage-1k-sessions.yml +205 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-stream-10M-entries-xread-count-100.yml +36 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-stream-10M-entries-xreadgroup-count-100-noack.yml +38 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-stream-10M-entries-xreadgroup-count-100.yml +38 -0
- redis_benchmarks_specification/test-suites/memtier_benchmark-stream-concurrent-xadd-xreadgroup-70-30.yml +50 -0
- redis_benchmarks_specification/test-suites/template.txt +18 -0
- redis_benchmarks_specification/vector-search-test-suites/vector_db_benchmark_test.yml +41 -0
- redis_benchmarks_specification-0.2.42.dist-info/LICENSE +201 -0
- redis_benchmarks_specification-0.2.42.dist-info/METADATA +434 -0
- redis_benchmarks_specification-0.2.42.dist-info/RECORD +336 -0
- redis_benchmarks_specification-0.2.42.dist-info/WHEEL +4 -0
- redis_benchmarks_specification-0.2.42.dist-info/entry_points.txt +10 -0
|
@@ -0,0 +1,1010 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import datetime
|
|
3
|
+
import io
|
|
4
|
+
import json
|
|
5
|
+
import logging
|
|
6
|
+
import tempfile
|
|
7
|
+
import shutil
|
|
8
|
+
import docker
|
|
9
|
+
import redis
|
|
10
|
+
import os
|
|
11
|
+
from zipfile import ZipFile, ZipInfo
|
|
12
|
+
|
|
13
|
+
from redis_benchmarks_specification.__builder__.schema import (
|
|
14
|
+
get_build_config,
|
|
15
|
+
get_build_config_metadata,
|
|
16
|
+
)
|
|
17
|
+
from redis_benchmarks_specification.__common__.builder_schema import (
|
|
18
|
+
get_branch_version_from_test_details,
|
|
19
|
+
)
|
|
20
|
+
from redis_benchmarks_specification.__common__.env import (
|
|
21
|
+
STREAM_KEYNAME_GH_EVENTS_COMMIT,
|
|
22
|
+
GH_REDIS_SERVER_HOST,
|
|
23
|
+
GH_REDIS_SERVER_PORT,
|
|
24
|
+
GH_REDIS_SERVER_AUTH,
|
|
25
|
+
LOG_FORMAT,
|
|
26
|
+
LOG_DATEFMT,
|
|
27
|
+
LOG_LEVEL,
|
|
28
|
+
SPECS_PATH_SETUPS,
|
|
29
|
+
STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
|
|
30
|
+
STREAM_KEYNAME_NEW_BUILD_EVENTS,
|
|
31
|
+
get_arch_specific_stream_name,
|
|
32
|
+
REDIS_HEALTH_CHECK_INTERVAL,
|
|
33
|
+
REDIS_SOCKET_TIMEOUT,
|
|
34
|
+
REDIS_BINS_EXPIRE_SECS,
|
|
35
|
+
)
|
|
36
|
+
from redis_benchmarks_specification.__common__.github import (
|
|
37
|
+
check_github_available_and_actionable,
|
|
38
|
+
generate_build_finished_pr_comment,
|
|
39
|
+
update_comment_if_needed,
|
|
40
|
+
create_new_pr_comment,
|
|
41
|
+
generate_build_started_pr_comment,
|
|
42
|
+
)
|
|
43
|
+
from redis_benchmarks_specification.__common__.package import (
|
|
44
|
+
populate_with_poetry_data,
|
|
45
|
+
get_version_string,
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
PERFORMANCE_GH_TOKEN = os.getenv("PERFORMANCE_GH_TOKEN", None)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def clear_pending_messages_for_builder_consumer(conn, builder_group, builder_id):
|
|
52
|
+
"""Clear all pending messages for a specific builder consumer on startup"""
|
|
53
|
+
consumer_name = f"{builder_group}-proc#{builder_id}"
|
|
54
|
+
|
|
55
|
+
try:
|
|
56
|
+
# Get pending messages for this specific consumer
|
|
57
|
+
pending_info = conn.xpending_range(
|
|
58
|
+
STREAM_KEYNAME_GH_EVENTS_COMMIT,
|
|
59
|
+
builder_group,
|
|
60
|
+
min="-",
|
|
61
|
+
max="+",
|
|
62
|
+
count=1000, # Get up to 1000 pending messages
|
|
63
|
+
consumername=consumer_name,
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
if pending_info:
|
|
67
|
+
message_ids = [msg["message_id"] for msg in pending_info]
|
|
68
|
+
logging.info(
|
|
69
|
+
f"Found {len(message_ids)} pending messages for builder consumer {consumer_name}. Clearing them..."
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
# Acknowledge all pending messages to clear them
|
|
73
|
+
ack_count = conn.xack(
|
|
74
|
+
STREAM_KEYNAME_GH_EVENTS_COMMIT, builder_group, *message_ids
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
logging.info(
|
|
78
|
+
f"Successfully cleared {ack_count} pending messages for builder consumer {consumer_name}"
|
|
79
|
+
)
|
|
80
|
+
else:
|
|
81
|
+
logging.info(
|
|
82
|
+
f"No pending messages found for builder consumer {consumer_name}"
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
except redis.exceptions.ResponseError as e:
|
|
86
|
+
if "NOGROUP" in str(e):
|
|
87
|
+
logging.info(f"Builder consumer group {builder_group} does not exist yet")
|
|
88
|
+
else:
|
|
89
|
+
logging.warning(f"Error clearing pending messages: {e}")
|
|
90
|
+
except Exception as e:
|
|
91
|
+
logging.error(f"Unexpected error clearing pending messages: {e}")
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def reset_builder_consumer_group_to_latest(conn, builder_group):
|
|
95
|
+
"""Reset the builder consumer group position to only read new messages (skip old ones)"""
|
|
96
|
+
try:
|
|
97
|
+
# Set the consumer group position to '$' (latest) to skip all existing messages
|
|
98
|
+
conn.xgroup_setid(STREAM_KEYNAME_GH_EVENTS_COMMIT, builder_group, id="$")
|
|
99
|
+
logging.info(
|
|
100
|
+
f"Reset builder consumer group {builder_group} position to latest - will only process new messages"
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
except redis.exceptions.ResponseError as e:
|
|
104
|
+
if "NOGROUP" in str(e):
|
|
105
|
+
logging.info(f"Builder consumer group {builder_group} does not exist yet")
|
|
106
|
+
else:
|
|
107
|
+
logging.warning(f"Error resetting builder consumer group position: {e}")
|
|
108
|
+
except Exception as e:
|
|
109
|
+
logging.error(
|
|
110
|
+
f"Unexpected error resetting builder consumer group position: {e}"
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
class ZipFileWithPermissions(ZipFile):
|
|
115
|
+
def _extract_member(self, member, targetpath, pwd):
|
|
116
|
+
if not isinstance(member, ZipInfo):
|
|
117
|
+
member = self.getinfo(member)
|
|
118
|
+
|
|
119
|
+
targetpath = super()._extract_member(member, targetpath, pwd)
|
|
120
|
+
|
|
121
|
+
attr = member.external_attr >> 16
|
|
122
|
+
if attr != 0:
|
|
123
|
+
os.chmod(targetpath, attr)
|
|
124
|
+
return targetpath
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def main():
|
|
128
|
+
_, _, project_version = populate_with_poetry_data()
|
|
129
|
+
project_name = "redis-benchmarks-spec builder"
|
|
130
|
+
parser = argparse.ArgumentParser(
|
|
131
|
+
description=get_version_string(project_name, project_version),
|
|
132
|
+
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
|
133
|
+
)
|
|
134
|
+
parser.add_argument(
|
|
135
|
+
"--logname", type=str, default=None, help="logname to write the logs to"
|
|
136
|
+
)
|
|
137
|
+
parser.add_argument(
|
|
138
|
+
"--arch", type=str, default="amd64", help="arch to build artifacts"
|
|
139
|
+
)
|
|
140
|
+
parser.add_argument(
|
|
141
|
+
"--builder-group",
|
|
142
|
+
type=str,
|
|
143
|
+
default=STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
|
|
144
|
+
help="Consumer group name to read from the stream",
|
|
145
|
+
)
|
|
146
|
+
parser.add_argument(
|
|
147
|
+
"--builder-id",
|
|
148
|
+
type=str,
|
|
149
|
+
default="1",
|
|
150
|
+
help="Consumer id to read from the stream",
|
|
151
|
+
)
|
|
152
|
+
parser.add_argument(
|
|
153
|
+
"--setups-folder",
|
|
154
|
+
type=str,
|
|
155
|
+
default=SPECS_PATH_SETUPS,
|
|
156
|
+
help="Setups folder, containing the build environment variations sub-folder that we use to trigger different build artifacts",
|
|
157
|
+
)
|
|
158
|
+
parser.add_argument(
|
|
159
|
+
"--consumer-start-id",
|
|
160
|
+
type=str,
|
|
161
|
+
default=">",
|
|
162
|
+
)
|
|
163
|
+
parser.add_argument(
|
|
164
|
+
"--docker-air-gap",
|
|
165
|
+
default=False,
|
|
166
|
+
action="store_true",
|
|
167
|
+
help="Store the docker images in redis keys.",
|
|
168
|
+
)
|
|
169
|
+
parser.add_argument("--github_token", type=str, default=PERFORMANCE_GH_TOKEN)
|
|
170
|
+
parser.add_argument("--pull-request", type=str, default=None, nargs="?", const="")
|
|
171
|
+
parser.add_argument(
|
|
172
|
+
"--skip-clear-pending-on-startup",
|
|
173
|
+
default=False,
|
|
174
|
+
action="store_true",
|
|
175
|
+
help="Skip automatically clearing pending messages and resetting consumer group position on startup. By default, pending messages are cleared and consumer group is reset to latest position to skip old work and recover from crashes.",
|
|
176
|
+
)
|
|
177
|
+
args = parser.parse_args()
|
|
178
|
+
if args.logname is not None:
|
|
179
|
+
print("Writting log to {}".format(args.logname))
|
|
180
|
+
logging.basicConfig(
|
|
181
|
+
filename=args.logname,
|
|
182
|
+
filemode="a",
|
|
183
|
+
format=LOG_FORMAT,
|
|
184
|
+
datefmt=LOG_DATEFMT,
|
|
185
|
+
level=LOG_LEVEL,
|
|
186
|
+
)
|
|
187
|
+
else:
|
|
188
|
+
# logging settings
|
|
189
|
+
logging.basicConfig(
|
|
190
|
+
format=LOG_FORMAT,
|
|
191
|
+
level=LOG_LEVEL,
|
|
192
|
+
datefmt=LOG_DATEFMT,
|
|
193
|
+
)
|
|
194
|
+
logging.info(get_version_string(project_name, project_version))
|
|
195
|
+
builders_folder = os.path.abspath(args.setups_folder + "/builders")
|
|
196
|
+
logging.info("Using package dir {} for inner file paths".format(builders_folder))
|
|
197
|
+
different_build_specs = os.listdir(builders_folder)
|
|
198
|
+
logging.info(
|
|
199
|
+
"Using the following build specs folder {}, containing {} different specs.".format(
|
|
200
|
+
builders_folder, len(different_build_specs)
|
|
201
|
+
)
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
logging.info(
|
|
205
|
+
"Using redis available at: {}:{} to read the event streams".format(
|
|
206
|
+
GH_REDIS_SERVER_HOST, GH_REDIS_SERVER_PORT
|
|
207
|
+
)
|
|
208
|
+
)
|
|
209
|
+
try:
|
|
210
|
+
conn = redis.StrictRedis(
|
|
211
|
+
host=GH_REDIS_SERVER_HOST,
|
|
212
|
+
port=GH_REDIS_SERVER_PORT,
|
|
213
|
+
decode_responses=False, # dont decode due to zip archive
|
|
214
|
+
password=GH_REDIS_SERVER_AUTH,
|
|
215
|
+
health_check_interval=REDIS_HEALTH_CHECK_INTERVAL,
|
|
216
|
+
socket_connect_timeout=REDIS_SOCKET_TIMEOUT,
|
|
217
|
+
socket_keepalive=True,
|
|
218
|
+
)
|
|
219
|
+
conn.ping()
|
|
220
|
+
except redis.exceptions.ConnectionError as e:
|
|
221
|
+
logging.error(
|
|
222
|
+
"Unable to connect to redis available at: {}:{} to read the event streams".format(
|
|
223
|
+
GH_REDIS_SERVER_HOST, GH_REDIS_SERVER_PORT
|
|
224
|
+
)
|
|
225
|
+
)
|
|
226
|
+
logging.error("Error message {}".format(e.__str__()))
|
|
227
|
+
exit(1)
|
|
228
|
+
|
|
229
|
+
arch = args.arch
|
|
230
|
+
logging.info("Building for arch: {}".format(arch))
|
|
231
|
+
|
|
232
|
+
build_spec_image_prefetch(builders_folder, different_build_specs)
|
|
233
|
+
|
|
234
|
+
builder_group = args.builder_group
|
|
235
|
+
builder_id = args.builder_id
|
|
236
|
+
if builder_group is None:
|
|
237
|
+
builder_group = STREAM_GH_EVENTS_COMMIT_BUILDERS_CG
|
|
238
|
+
if builder_id is None:
|
|
239
|
+
builder_id = "1"
|
|
240
|
+
|
|
241
|
+
builder_consumer_group_create(conn, builder_group)
|
|
242
|
+
|
|
243
|
+
# Clear pending messages and reset consumer group position by default (unless explicitly skipped)
|
|
244
|
+
if not args.skip_clear_pending_on_startup:
|
|
245
|
+
logging.info(
|
|
246
|
+
"Clearing pending messages and resetting builder consumer group position on startup (default behavior)"
|
|
247
|
+
)
|
|
248
|
+
clear_pending_messages_for_builder_consumer(conn, builder_group, builder_id)
|
|
249
|
+
reset_builder_consumer_group_to_latest(conn, builder_group)
|
|
250
|
+
else:
|
|
251
|
+
logging.info(
|
|
252
|
+
"Skipping pending message cleanup and builder consumer group reset as requested"
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
if args.github_token is not None:
|
|
256
|
+
logging.info("detected a github token. will update as much as possible!!! =)")
|
|
257
|
+
previous_id = args.consumer_start_id
|
|
258
|
+
while True:
|
|
259
|
+
previous_id, new_builds_count, _ = builder_process_stream(
|
|
260
|
+
builders_folder,
|
|
261
|
+
conn,
|
|
262
|
+
different_build_specs,
|
|
263
|
+
previous_id,
|
|
264
|
+
args.docker_air_gap,
|
|
265
|
+
arch,
|
|
266
|
+
args.github_token,
|
|
267
|
+
builder_group,
|
|
268
|
+
builder_id,
|
|
269
|
+
)
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def builder_consumer_group_create(
|
|
273
|
+
conn, builder_group=STREAM_GH_EVENTS_COMMIT_BUILDERS_CG, id="$"
|
|
274
|
+
):
|
|
275
|
+
try:
|
|
276
|
+
conn.xgroup_create(
|
|
277
|
+
STREAM_KEYNAME_GH_EVENTS_COMMIT,
|
|
278
|
+
builder_group,
|
|
279
|
+
mkstream=True,
|
|
280
|
+
id=id,
|
|
281
|
+
)
|
|
282
|
+
logging.info(
|
|
283
|
+
"Created consumer group named {} to distribute work.".format(builder_group)
|
|
284
|
+
)
|
|
285
|
+
except redis.exceptions.ResponseError:
|
|
286
|
+
logging.info("Consumer group named {} already existed.".format(builder_group))
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
def check_benchmark_build_comment(comments):
|
|
290
|
+
res = False
|
|
291
|
+
pos = -1
|
|
292
|
+
for n, comment in enumerate(comments):
|
|
293
|
+
body = comment.body
|
|
294
|
+
if "CE Performance Automation : step 1 of 2" in body:
|
|
295
|
+
res = True
|
|
296
|
+
pos = n
|
|
297
|
+
return res, pos
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
def builder_process_stream(
|
|
301
|
+
builders_folder,
|
|
302
|
+
conn,
|
|
303
|
+
different_build_specs,
|
|
304
|
+
previous_id,
|
|
305
|
+
docker_air_gap=False,
|
|
306
|
+
arch="amd64",
|
|
307
|
+
github_token=None,
|
|
308
|
+
builder_group=None,
|
|
309
|
+
builder_id=None,
|
|
310
|
+
):
|
|
311
|
+
new_builds_count = 0
|
|
312
|
+
auto_approve_github_comments = True
|
|
313
|
+
build_stream_fields_arr = []
|
|
314
|
+
if builder_group is None:
|
|
315
|
+
builder_group = STREAM_GH_EVENTS_COMMIT_BUILDERS_CG
|
|
316
|
+
if builder_id is None:
|
|
317
|
+
builder_id = "1"
|
|
318
|
+
consumer_name = "{}-proc#{}".format(builder_group, builder_id)
|
|
319
|
+
logging.info(
|
|
320
|
+
f"Entering blocking read waiting for work. building for arch: {arch}. Using consumer id {consumer_name}"
|
|
321
|
+
)
|
|
322
|
+
newTestInfo = conn.xreadgroup(
|
|
323
|
+
builder_group,
|
|
324
|
+
consumer_name,
|
|
325
|
+
{STREAM_KEYNAME_GH_EVENTS_COMMIT: previous_id},
|
|
326
|
+
count=1,
|
|
327
|
+
block=0,
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
if len(newTestInfo[0]) < 2 or len(newTestInfo[0][1]) < 1:
|
|
331
|
+
previous_id = ">"
|
|
332
|
+
else:
|
|
333
|
+
streamId, testDetails = newTestInfo[0][1][0]
|
|
334
|
+
logging.info("Received work . Stream id {}.".format(streamId))
|
|
335
|
+
# commit = None
|
|
336
|
+
# commited_date = ""
|
|
337
|
+
# tag = ""
|
|
338
|
+
docker_client = docker.from_env()
|
|
339
|
+
from pathlib import Path
|
|
340
|
+
|
|
341
|
+
build_request_arch = None
|
|
342
|
+
if b"arch" in testDetails:
|
|
343
|
+
build_request_arch = testDetails[b"arch"].decode()
|
|
344
|
+
elif b"build_arch" in testDetails:
|
|
345
|
+
build_request_arch = testDetails[b"build_arch"].decode()
|
|
346
|
+
else:
|
|
347
|
+
logging.info("No arch info found on the stream.")
|
|
348
|
+
if build_request_arch is not None and build_request_arch != arch:
|
|
349
|
+
logging.info(
|
|
350
|
+
"skipping build request given requested build arch {}!={}".format(
|
|
351
|
+
build_request_arch, arch
|
|
352
|
+
)
|
|
353
|
+
)
|
|
354
|
+
# Acknowledge the message even though we're skipping it
|
|
355
|
+
ack_reply = conn.xack(
|
|
356
|
+
STREAM_KEYNAME_GH_EVENTS_COMMIT,
|
|
357
|
+
STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
|
|
358
|
+
streamId,
|
|
359
|
+
)
|
|
360
|
+
if type(ack_reply) == bytes:
|
|
361
|
+
ack_reply = ack_reply.decode()
|
|
362
|
+
if ack_reply == "1" or ack_reply == 1:
|
|
363
|
+
logging.info(
|
|
364
|
+
"Successfully acknowledged build variation stream with id {} (filtered by arch).".format(
|
|
365
|
+
streamId
|
|
366
|
+
)
|
|
367
|
+
)
|
|
368
|
+
else:
|
|
369
|
+
logging.error(
|
|
370
|
+
"Unable to acknowledge build variation stream with id {}. XACK reply {}".format(
|
|
371
|
+
streamId, ack_reply
|
|
372
|
+
)
|
|
373
|
+
)
|
|
374
|
+
return previous_id, new_builds_count, build_stream_fields_arr
|
|
375
|
+
else:
|
|
376
|
+
logging.info(
|
|
377
|
+
"No arch info found on the stream. Using default arch {}.".format(arch)
|
|
378
|
+
)
|
|
379
|
+
build_request_arch = arch
|
|
380
|
+
|
|
381
|
+
home = str(Path.home())
|
|
382
|
+
if b"git_hash" in testDetails:
|
|
383
|
+
git_hash = testDetails[b"git_hash"]
|
|
384
|
+
logging.info("Received commit hash specifier {}.".format(git_hash))
|
|
385
|
+
logging.info(f"Received the following build stream: {testDetails}.")
|
|
386
|
+
binary_zip_key = testDetails[b"zip_archive_key"]
|
|
387
|
+
logging.info(
|
|
388
|
+
"Retriving zipped source from key {}.".format(
|
|
389
|
+
testDetails[b"zip_archive_key"]
|
|
390
|
+
)
|
|
391
|
+
)
|
|
392
|
+
buffer = conn.get(binary_zip_key)
|
|
393
|
+
git_timestamp_ms = None
|
|
394
|
+
use_git_timestamp = False
|
|
395
|
+
commit_datetime = "n/a"
|
|
396
|
+
if b"commit_datetime" in testDetails:
|
|
397
|
+
commit_datetime = testDetails[b"commit_datetime"].decode()
|
|
398
|
+
commit_summary = "n/a"
|
|
399
|
+
if b"commit_summary" in testDetails:
|
|
400
|
+
commit_summary = testDetails[b"commit_summary"].decode()
|
|
401
|
+
git_branch, git_version = get_branch_version_from_test_details(testDetails)
|
|
402
|
+
if b"use_git_timestamp" in testDetails:
|
|
403
|
+
use_git_timestamp = bool(testDetails[b"use_git_timestamp"])
|
|
404
|
+
if b"git_timestamp_ms" in testDetails:
|
|
405
|
+
git_timestamp_ms = int(testDetails[b"git_timestamp_ms"].decode())
|
|
406
|
+
tests_regexp = ".*"
|
|
407
|
+
if b"tests_regexp" in testDetails:
|
|
408
|
+
tests_regexp = testDetails[b"tests_regexp"].decode()
|
|
409
|
+
tests_priority_upper_limit = 10000
|
|
410
|
+
if b"tests_priority_upper_limit" in testDetails:
|
|
411
|
+
tests_priority_upper_limit = int(
|
|
412
|
+
testDetails[b"tests_priority_upper_limit"].decode()
|
|
413
|
+
)
|
|
414
|
+
tests_priority_lower_limit = 0
|
|
415
|
+
if b"tests_priority_lower_limit" in testDetails:
|
|
416
|
+
tests_priority_lower_limit = int(
|
|
417
|
+
testDetails[b"tests_priority_lower_limit"].decode()
|
|
418
|
+
)
|
|
419
|
+
tests_groups_regexp = ".*"
|
|
420
|
+
if b"tests_groups_regexp" in testDetails:
|
|
421
|
+
tests_groups_regexp = testDetails[b"tests_groups_regexp"].decode()
|
|
422
|
+
|
|
423
|
+
github_org = "redis"
|
|
424
|
+
if b"github_org" in testDetails:
|
|
425
|
+
github_org = testDetails[b"github_org"].decode()
|
|
426
|
+
logging.info(f"detected github_org info on build stream {github_org}")
|
|
427
|
+
|
|
428
|
+
github_repo = "redis"
|
|
429
|
+
if b"github_repo" in testDetails:
|
|
430
|
+
github_repo = testDetails[b"github_repo"].decode()
|
|
431
|
+
logging.info(f"detected github_repo info on build stream {github_repo}")
|
|
432
|
+
|
|
433
|
+
# github updates
|
|
434
|
+
is_actionable_pr = False
|
|
435
|
+
contains_regression_comment = False
|
|
436
|
+
github_pr = None
|
|
437
|
+
old_regression_comment_body = ""
|
|
438
|
+
pr_link = ""
|
|
439
|
+
regression_comment = ""
|
|
440
|
+
pull_request = None
|
|
441
|
+
if b"pull_request" in testDetails:
|
|
442
|
+
pull_request = testDetails[b"pull_request"].decode()
|
|
443
|
+
logging.info(f"Detected PR info in builder. PR: {pull_request}")
|
|
444
|
+
verbose = True
|
|
445
|
+
|
|
446
|
+
fn = check_benchmark_build_comment
|
|
447
|
+
(
|
|
448
|
+
contains_regression_comment,
|
|
449
|
+
github_pr,
|
|
450
|
+
is_actionable_pr,
|
|
451
|
+
old_regression_comment_body,
|
|
452
|
+
pr_link,
|
|
453
|
+
regression_comment,
|
|
454
|
+
) = check_github_available_and_actionable(
|
|
455
|
+
fn, github_token, pull_request, "redis", "redis", verbose
|
|
456
|
+
)
|
|
457
|
+
for build_spec in different_build_specs:
|
|
458
|
+
build_config, id = get_build_config(builders_folder + "/" + build_spec)
|
|
459
|
+
build_config_metadata = get_build_config_metadata(build_config)
|
|
460
|
+
|
|
461
|
+
build_image = build_config["build_image"]
|
|
462
|
+
build_arch = build_config["arch"]
|
|
463
|
+
if build_arch != arch:
|
|
464
|
+
logging.info(
|
|
465
|
+
"skipping build spec {} given arch {}!={}".format(
|
|
466
|
+
build_spec, build_arch, arch
|
|
467
|
+
)
|
|
468
|
+
)
|
|
469
|
+
continue
|
|
470
|
+
run_image = build_image
|
|
471
|
+
if "run_image" in build_config:
|
|
472
|
+
run_image = build_config["run_image"]
|
|
473
|
+
if docker_air_gap:
|
|
474
|
+
store_airgap_image_redis(conn, docker_client, run_image)
|
|
475
|
+
|
|
476
|
+
compiler = build_config["compiler"]
|
|
477
|
+
cpp_compiler = build_config["cpp_compiler"]
|
|
478
|
+
build_os = build_config["os"]
|
|
479
|
+
|
|
480
|
+
build_artifacts = ["redis-server"]
|
|
481
|
+
if "build_artifacts" in build_config:
|
|
482
|
+
build_artifacts = build_config["build_artifacts"]
|
|
483
|
+
if b"build_artifacts" in testDetails:
|
|
484
|
+
new_build_artifacts = (
|
|
485
|
+
testDetails[b"build_artifacts"].decode().split(",")
|
|
486
|
+
)
|
|
487
|
+
logging.info(
|
|
488
|
+
f"overriding default build artifacts {build_artifacts} by {new_build_artifacts}"
|
|
489
|
+
)
|
|
490
|
+
build_artifacts = new_build_artifacts
|
|
491
|
+
build_vars_str = ""
|
|
492
|
+
if "env" in build_config:
|
|
493
|
+
if build_config["env"] is not None:
|
|
494
|
+
for k, v in build_config["env"].items():
|
|
495
|
+
build_vars_str += '{}="{}" '.format(k, v)
|
|
496
|
+
|
|
497
|
+
temporary_dir = tempfile.mkdtemp(dir=home)
|
|
498
|
+
logging.info(
|
|
499
|
+
"Using local temporary dir to persist redis build artifacts. Path: {}".format(
|
|
500
|
+
temporary_dir
|
|
501
|
+
)
|
|
502
|
+
)
|
|
503
|
+
z = ZipFileWithPermissions(io.BytesIO(buffer))
|
|
504
|
+
z.extractall(temporary_dir)
|
|
505
|
+
redis_dir = os.listdir(temporary_dir + "/")[0]
|
|
506
|
+
deps_dir = os.listdir(temporary_dir + "/" + redis_dir + "/deps")
|
|
507
|
+
deps_list = [
|
|
508
|
+
"hiredis",
|
|
509
|
+
"jemalloc",
|
|
510
|
+
"linenoise",
|
|
511
|
+
"lua",
|
|
512
|
+
]
|
|
513
|
+
if "fast_float" in deps_dir:
|
|
514
|
+
deps_list.append("fast_float")
|
|
515
|
+
if "hdr_histogram" in deps_dir:
|
|
516
|
+
deps_list.append("hdr_histogram")
|
|
517
|
+
if "fpconv" in deps_dir:
|
|
518
|
+
deps_list.append("fpconv")
|
|
519
|
+
redis_temporary_dir = temporary_dir + "/" + redis_dir + "/"
|
|
520
|
+
logging.info("Using redis temporary dir {}".format(redis_temporary_dir))
|
|
521
|
+
# build_command = "bash -c 'make Makefile.dep && cd ./deps && CXX={} CC={} make {} {} -j && cd .. && CXX={} CC={} make {} {} -j'".format(
|
|
522
|
+
# cpp_compiler,
|
|
523
|
+
# compiler,
|
|
524
|
+
# " ".join(deps_list),
|
|
525
|
+
# build_vars_str,
|
|
526
|
+
# cpp_compiler,
|
|
527
|
+
# compiler,
|
|
528
|
+
# "redis-server",
|
|
529
|
+
# build_vars_str,
|
|
530
|
+
# )
|
|
531
|
+
build_command = "sh -c 'make -j'"
|
|
532
|
+
if "build_command" in build_config:
|
|
533
|
+
build_command = build_config["build_command"]
|
|
534
|
+
if b"build_command" in testDetails:
|
|
535
|
+
build_command = testDetails[b"build_command"].decode()
|
|
536
|
+
server_name = "redis"
|
|
537
|
+
if b"server_name" in testDetails:
|
|
538
|
+
server_name = testDetails[b"server_name"].decode()
|
|
539
|
+
|
|
540
|
+
# Check if artifacts already exist before building
|
|
541
|
+
prefix = f"build_spec={build_spec}/github_org={github_org}/github_repo={github_repo}/git_branch={str(git_branch)}/git_version={str(git_version)}/git_hash={str(git_hash)}"
|
|
542
|
+
|
|
543
|
+
# Create a comprehensive build signature that includes all build-affecting parameters
|
|
544
|
+
import hashlib
|
|
545
|
+
|
|
546
|
+
build_signature_parts = [
|
|
547
|
+
str(id), # build config ID
|
|
548
|
+
str(build_command), # build command
|
|
549
|
+
str(build_vars_str), # environment variables
|
|
550
|
+
str(compiler), # compiler
|
|
551
|
+
str(cpp_compiler), # C++ compiler
|
|
552
|
+
str(build_image), # build image
|
|
553
|
+
str(build_os), # OS
|
|
554
|
+
str(build_arch), # architecture
|
|
555
|
+
",".join(sorted(build_artifacts)), # artifacts list
|
|
556
|
+
]
|
|
557
|
+
build_signature = hashlib.sha256(
|
|
558
|
+
":".join(build_signature_parts).encode()
|
|
559
|
+
).hexdigest()[:16]
|
|
560
|
+
|
|
561
|
+
# Check if all artifacts already exist
|
|
562
|
+
all_artifacts_exist = True
|
|
563
|
+
artifact_keys = {}
|
|
564
|
+
for artifact in build_artifacts:
|
|
565
|
+
bin_key = f"zipped:artifacts:{prefix}:{id}:{build_signature}:{artifact}.zip"
|
|
566
|
+
artifact_keys[artifact] = bin_key
|
|
567
|
+
if not conn.exists(bin_key):
|
|
568
|
+
all_artifacts_exist = False
|
|
569
|
+
break
|
|
570
|
+
|
|
571
|
+
if all_artifacts_exist:
|
|
572
|
+
logging.info(
|
|
573
|
+
f"Artifacts for {git_hash}:{id} with build signature {build_signature} already exist, reusing them"
|
|
574
|
+
)
|
|
575
|
+
# Skip build and reuse existing artifacts
|
|
576
|
+
build_stream_fields, result = generate_benchmark_stream_request(
|
|
577
|
+
id,
|
|
578
|
+
conn,
|
|
579
|
+
run_image,
|
|
580
|
+
build_arch,
|
|
581
|
+
testDetails,
|
|
582
|
+
build_os,
|
|
583
|
+
build_artifacts,
|
|
584
|
+
build_command,
|
|
585
|
+
build_config_metadata,
|
|
586
|
+
build_image,
|
|
587
|
+
build_vars_str,
|
|
588
|
+
compiler,
|
|
589
|
+
cpp_compiler,
|
|
590
|
+
git_branch,
|
|
591
|
+
git_hash,
|
|
592
|
+
git_timestamp_ms,
|
|
593
|
+
git_version,
|
|
594
|
+
pull_request,
|
|
595
|
+
None, # redis_temporary_dir not needed for reuse
|
|
596
|
+
tests_groups_regexp,
|
|
597
|
+
tests_priority_lower_limit,
|
|
598
|
+
tests_priority_upper_limit,
|
|
599
|
+
tests_regexp,
|
|
600
|
+
".*", # command_regexp - default to all commands
|
|
601
|
+
use_git_timestamp,
|
|
602
|
+
server_name,
|
|
603
|
+
github_org,
|
|
604
|
+
github_repo,
|
|
605
|
+
artifact_keys, # Pass existing artifact keys
|
|
606
|
+
)
|
|
607
|
+
# Add to benchmark stream even when reusing artifacts
|
|
608
|
+
if result is True:
|
|
609
|
+
arch_specific_stream = get_arch_specific_stream_name(build_arch)
|
|
610
|
+
logging.info(
|
|
611
|
+
f"Adding reused build work to architecture-specific stream: {arch_specific_stream}"
|
|
612
|
+
)
|
|
613
|
+
benchmark_stream_id = conn.xadd(
|
|
614
|
+
arch_specific_stream, build_stream_fields
|
|
615
|
+
)
|
|
616
|
+
logging.info(
|
|
617
|
+
"successfully reused build variant {} for redis git_sha {}. Stream id: {}".format(
|
|
618
|
+
id, git_hash, benchmark_stream_id
|
|
619
|
+
)
|
|
620
|
+
)
|
|
621
|
+
streamId_decoded = streamId.decode()
|
|
622
|
+
benchmark_stream_id_decoded = benchmark_stream_id.decode()
|
|
623
|
+
builder_list_completed = (
|
|
624
|
+
f"builder:{streamId_decoded}:builds_completed"
|
|
625
|
+
)
|
|
626
|
+
conn.lpush(builder_list_completed, benchmark_stream_id_decoded)
|
|
627
|
+
conn.expire(builder_list_completed, REDIS_BINS_EXPIRE_SECS)
|
|
628
|
+
logging.info(
|
|
629
|
+
f"Adding information of build->benchmark stream info in list {builder_list_completed}. Adding benchmark stream id: {benchmark_stream_id_decoded}"
|
|
630
|
+
)
|
|
631
|
+
build_stream_fields_arr.append(build_stream_fields)
|
|
632
|
+
new_builds_count = new_builds_count + 1
|
|
633
|
+
continue # Skip to next build spec
|
|
634
|
+
|
|
635
|
+
logging.info(
|
|
636
|
+
f"Building artifacts for {git_hash}:{id} with build signature {build_signature}"
|
|
637
|
+
)
|
|
638
|
+
|
|
639
|
+
build_start_datetime = datetime.datetime.utcnow()
|
|
640
|
+
logging.info(
|
|
641
|
+
"Using the following build command {}.".format(build_command)
|
|
642
|
+
)
|
|
643
|
+
if is_actionable_pr:
|
|
644
|
+
logging.info(
|
|
645
|
+
f"updating on github we'll start the build at {build_start_datetime}"
|
|
646
|
+
)
|
|
647
|
+
comment_body = generate_build_started_pr_comment(
|
|
648
|
+
build_start_datetime,
|
|
649
|
+
commit_datetime,
|
|
650
|
+
commit_summary,
|
|
651
|
+
git_branch,
|
|
652
|
+
git_hash,
|
|
653
|
+
tests_groups_regexp,
|
|
654
|
+
tests_priority_lower_limit,
|
|
655
|
+
tests_priority_upper_limit,
|
|
656
|
+
tests_regexp,
|
|
657
|
+
)
|
|
658
|
+
if contains_regression_comment:
|
|
659
|
+
update_comment_if_needed(
|
|
660
|
+
auto_approve_github_comments,
|
|
661
|
+
comment_body,
|
|
662
|
+
old_regression_comment_body,
|
|
663
|
+
regression_comment,
|
|
664
|
+
verbose,
|
|
665
|
+
)
|
|
666
|
+
else:
|
|
667
|
+
regression_comment = create_new_pr_comment(
|
|
668
|
+
auto_approve_github_comments,
|
|
669
|
+
comment_body,
|
|
670
|
+
github_pr,
|
|
671
|
+
pr_link,
|
|
672
|
+
)
|
|
673
|
+
|
|
674
|
+
docker_client.containers.run(
|
|
675
|
+
image=build_image,
|
|
676
|
+
volumes={
|
|
677
|
+
redis_temporary_dir: {"bind": "/mnt/redis/", "mode": "rw"},
|
|
678
|
+
},
|
|
679
|
+
auto_remove=True,
|
|
680
|
+
privileged=True,
|
|
681
|
+
working_dir="/mnt/redis/",
|
|
682
|
+
command=build_command,
|
|
683
|
+
)
|
|
684
|
+
build_end_datetime = datetime.datetime.utcnow()
|
|
685
|
+
build_duration = build_end_datetime - build_start_datetime
|
|
686
|
+
build_duration_secs = build_duration.total_seconds()
|
|
687
|
+
|
|
688
|
+
build_stream_fields, result = generate_benchmark_stream_request(
|
|
689
|
+
id,
|
|
690
|
+
conn,
|
|
691
|
+
run_image,
|
|
692
|
+
build_arch,
|
|
693
|
+
testDetails,
|
|
694
|
+
build_os,
|
|
695
|
+
build_artifacts,
|
|
696
|
+
build_command,
|
|
697
|
+
build_config_metadata,
|
|
698
|
+
build_image,
|
|
699
|
+
build_vars_str,
|
|
700
|
+
compiler,
|
|
701
|
+
cpp_compiler,
|
|
702
|
+
git_branch,
|
|
703
|
+
git_hash,
|
|
704
|
+
git_timestamp_ms,
|
|
705
|
+
git_version,
|
|
706
|
+
pull_request,
|
|
707
|
+
redis_temporary_dir,
|
|
708
|
+
tests_groups_regexp,
|
|
709
|
+
tests_priority_lower_limit,
|
|
710
|
+
tests_priority_upper_limit,
|
|
711
|
+
tests_regexp,
|
|
712
|
+
".*", # command_regexp - default to all commands
|
|
713
|
+
use_git_timestamp,
|
|
714
|
+
server_name,
|
|
715
|
+
github_org,
|
|
716
|
+
github_repo,
|
|
717
|
+
None, # existing_artifact_keys - None for new builds
|
|
718
|
+
)
|
|
719
|
+
if result is True:
|
|
720
|
+
arch_specific_stream = get_arch_specific_stream_name(build_arch)
|
|
721
|
+
logging.info(
|
|
722
|
+
f"Adding new build work to architecture-specific stream: {arch_specific_stream}"
|
|
723
|
+
)
|
|
724
|
+
benchmark_stream_id = conn.xadd(
|
|
725
|
+
arch_specific_stream, build_stream_fields
|
|
726
|
+
)
|
|
727
|
+
logging.info(
|
|
728
|
+
"sucessfully built build variant {} for redis git_sha {}. Stream id: {}".format(
|
|
729
|
+
id, git_hash, benchmark_stream_id
|
|
730
|
+
)
|
|
731
|
+
)
|
|
732
|
+
streamId_decoded = streamId.decode()
|
|
733
|
+
benchmark_stream_id_decoded = benchmark_stream_id.decode()
|
|
734
|
+
builder_list_completed = (
|
|
735
|
+
f"builder:{streamId_decoded}:builds_completed"
|
|
736
|
+
)
|
|
737
|
+
conn.lpush(builder_list_completed, benchmark_stream_id_decoded)
|
|
738
|
+
conn.expire(builder_list_completed, REDIS_BINS_EXPIRE_SECS)
|
|
739
|
+
logging.info(
|
|
740
|
+
f"Adding information of build->benchmark stream info in list {builder_list_completed}. Adding benchmark stream id: {benchmark_stream_id_decoded}"
|
|
741
|
+
)
|
|
742
|
+
benchmark_stream_ids = [benchmark_stream_id_decoded]
|
|
743
|
+
|
|
744
|
+
if is_actionable_pr:
|
|
745
|
+
logging.info(
|
|
746
|
+
f"updating on github that the build finished after {build_duration_secs} seconds"
|
|
747
|
+
)
|
|
748
|
+
comment_body = generate_build_finished_pr_comment(
|
|
749
|
+
benchmark_stream_ids,
|
|
750
|
+
commit_datetime,
|
|
751
|
+
commit_summary,
|
|
752
|
+
git_branch,
|
|
753
|
+
git_hash,
|
|
754
|
+
tests_groups_regexp,
|
|
755
|
+
tests_priority_lower_limit,
|
|
756
|
+
tests_priority_upper_limit,
|
|
757
|
+
tests_regexp,
|
|
758
|
+
build_start_datetime,
|
|
759
|
+
build_duration_secs,
|
|
760
|
+
)
|
|
761
|
+
if contains_regression_comment:
|
|
762
|
+
update_comment_if_needed(
|
|
763
|
+
auto_approve_github_comments,
|
|
764
|
+
comment_body,
|
|
765
|
+
old_regression_comment_body,
|
|
766
|
+
regression_comment,
|
|
767
|
+
verbose,
|
|
768
|
+
)
|
|
769
|
+
else:
|
|
770
|
+
create_new_pr_comment(
|
|
771
|
+
auto_approve_github_comments,
|
|
772
|
+
comment_body,
|
|
773
|
+
github_pr,
|
|
774
|
+
pr_link,
|
|
775
|
+
)
|
|
776
|
+
shutil.rmtree(temporary_dir, ignore_errors=True)
|
|
777
|
+
new_builds_count = new_builds_count + 1
|
|
778
|
+
build_stream_fields_arr.append(build_stream_fields)
|
|
779
|
+
ack_reply = conn.xack(
|
|
780
|
+
STREAM_KEYNAME_GH_EVENTS_COMMIT,
|
|
781
|
+
STREAM_GH_EVENTS_COMMIT_BUILDERS_CG,
|
|
782
|
+
streamId,
|
|
783
|
+
)
|
|
784
|
+
if type(ack_reply) == bytes:
|
|
785
|
+
ack_reply = ack_reply.decode()
|
|
786
|
+
if ack_reply == "1" or ack_reply == 1:
|
|
787
|
+
logging.info(
|
|
788
|
+
"Sucessfully acknowledge build variation stream with id {}.".format(
|
|
789
|
+
streamId
|
|
790
|
+
)
|
|
791
|
+
)
|
|
792
|
+
else:
|
|
793
|
+
logging.error(
|
|
794
|
+
"Unable to acknowledge build variation stream with id {}. XACK reply {}".format(
|
|
795
|
+
streamId, ack_reply
|
|
796
|
+
)
|
|
797
|
+
)
|
|
798
|
+
else:
|
|
799
|
+
logging.error("Missing commit information within received message.")
|
|
800
|
+
return previous_id, new_builds_count, build_stream_fields_arr
|
|
801
|
+
|
|
802
|
+
|
|
803
|
+
def store_airgap_image_redis(conn, docker_client, run_image):
|
|
804
|
+
airgap_key = "docker:air-gap:{}".format(run_image)
|
|
805
|
+
logging.info(
|
|
806
|
+
"DOCKER AIR GAP: storing run image named: {} in redis key {}".format(
|
|
807
|
+
run_image, airgap_key
|
|
808
|
+
)
|
|
809
|
+
)
|
|
810
|
+
# 7 days expire
|
|
811
|
+
binary_exp_secs = 24 * 60 * 60 * 7
|
|
812
|
+
if conn.exists(airgap_key):
|
|
813
|
+
logging.info(
|
|
814
|
+
f"DOCKER AIRGAP KEY ALREADY EXISTS: {airgap_key}. Updating only the expire time"
|
|
815
|
+
)
|
|
816
|
+
conn.expire(airgap_key, binary_exp_secs)
|
|
817
|
+
else:
|
|
818
|
+
run_image_binary_stream = io.BytesIO()
|
|
819
|
+
run_image_docker = docker_client.images.get(run_image)
|
|
820
|
+
for chunk in run_image_docker.save():
|
|
821
|
+
run_image_binary_stream.write(chunk)
|
|
822
|
+
res_airgap = conn.set(
|
|
823
|
+
airgap_key,
|
|
824
|
+
run_image_binary_stream.getbuffer(),
|
|
825
|
+
ex=binary_exp_secs,
|
|
826
|
+
)
|
|
827
|
+
logging.info(
|
|
828
|
+
"DOCKER AIR GAP: result of set bin data to {}: {}".format(
|
|
829
|
+
airgap_key, res_airgap
|
|
830
|
+
)
|
|
831
|
+
)
|
|
832
|
+
|
|
833
|
+
|
|
834
|
+
def generate_benchmark_stream_request(
|
|
835
|
+
id,
|
|
836
|
+
conn,
|
|
837
|
+
run_image,
|
|
838
|
+
build_arch,
|
|
839
|
+
testDetails,
|
|
840
|
+
build_os,
|
|
841
|
+
build_artifacts=[],
|
|
842
|
+
build_command=None,
|
|
843
|
+
build_config_metadata=None,
|
|
844
|
+
build_image=None,
|
|
845
|
+
build_vars_str=None,
|
|
846
|
+
compiler=None,
|
|
847
|
+
cpp_compiler=None,
|
|
848
|
+
git_branch=None,
|
|
849
|
+
git_hash=None,
|
|
850
|
+
git_timestamp_ms=None,
|
|
851
|
+
git_version=None,
|
|
852
|
+
pull_request=None,
|
|
853
|
+
redis_temporary_dir=None,
|
|
854
|
+
tests_groups_regexp=".*",
|
|
855
|
+
tests_priority_lower_limit=0,
|
|
856
|
+
tests_priority_upper_limit=10000,
|
|
857
|
+
tests_regexp=".*",
|
|
858
|
+
command_regexp=".*",
|
|
859
|
+
use_git_timestamp=False,
|
|
860
|
+
server_name="redis",
|
|
861
|
+
github_org="redis",
|
|
862
|
+
github_repo="redis",
|
|
863
|
+
existing_artifact_keys=None,
|
|
864
|
+
):
|
|
865
|
+
build_stream_fields = {
|
|
866
|
+
"id": id,
|
|
867
|
+
"use_git_timestamp": str(use_git_timestamp),
|
|
868
|
+
"run_image": run_image,
|
|
869
|
+
"os": build_os,
|
|
870
|
+
"arch": build_arch,
|
|
871
|
+
"build_artifacts": ",".join(build_artifacts),
|
|
872
|
+
"tests_regexp": tests_regexp,
|
|
873
|
+
"tests_priority_upper_limit": tests_priority_upper_limit,
|
|
874
|
+
"tests_priority_lower_limit": tests_priority_lower_limit,
|
|
875
|
+
"tests_groups_regexp": tests_groups_regexp,
|
|
876
|
+
"command_regexp": command_regexp,
|
|
877
|
+
"server_name": server_name,
|
|
878
|
+
"github_org": github_org,
|
|
879
|
+
"github_repo": github_repo,
|
|
880
|
+
}
|
|
881
|
+
if build_config_metadata is not None:
|
|
882
|
+
build_stream_fields["metadata"] = json.dumps(build_config_metadata)
|
|
883
|
+
if compiler is not None:
|
|
884
|
+
build_stream_fields["compiler"] = compiler
|
|
885
|
+
if cpp_compiler is not None:
|
|
886
|
+
build_stream_fields["cpp_compiler"] = cpp_compiler
|
|
887
|
+
if build_vars_str is not None:
|
|
888
|
+
build_stream_fields["build_vars"] = build_vars_str
|
|
889
|
+
if build_command is not None:
|
|
890
|
+
logging.info(f"adding build_command: {build_command}")
|
|
891
|
+
build_stream_fields["build_command"] = build_command
|
|
892
|
+
if build_image is not None:
|
|
893
|
+
build_stream_fields["build_image"] = build_image
|
|
894
|
+
else:
|
|
895
|
+
build_stream_fields["build_image"] = run_image
|
|
896
|
+
if git_hash is not None:
|
|
897
|
+
build_stream_fields["git_hash"] = git_hash
|
|
898
|
+
if pull_request is not None:
|
|
899
|
+
build_stream_fields["pull_request"] = pull_request
|
|
900
|
+
if git_branch is not None:
|
|
901
|
+
build_stream_fields["git_branch"] = git_branch
|
|
902
|
+
if git_version is not None:
|
|
903
|
+
build_stream_fields["git_version"] = git_version
|
|
904
|
+
if git_timestamp_ms is not None:
|
|
905
|
+
build_stream_fields["git_timestamp_ms"] = git_timestamp_ms
|
|
906
|
+
|
|
907
|
+
if existing_artifact_keys is not None:
|
|
908
|
+
# Use existing artifact keys (for reuse case)
|
|
909
|
+
for artifact in build_artifacts:
|
|
910
|
+
bin_key = existing_artifact_keys[artifact]
|
|
911
|
+
build_stream_fields[artifact] = bin_key
|
|
912
|
+
# Get the length from the existing artifact
|
|
913
|
+
bin_artifact_len = conn.strlen(bin_key)
|
|
914
|
+
build_stream_fields["{}_len_bytes".format(artifact)] = bin_artifact_len
|
|
915
|
+
else:
|
|
916
|
+
# Build new artifacts and store them
|
|
917
|
+
prefix = f"github_org={github_org}/github_repo={github_repo}/git_branch={str(git_branch)}/git_version={str(git_version)}/git_hash={str(git_hash)}"
|
|
918
|
+
|
|
919
|
+
# Create build signature for new artifacts
|
|
920
|
+
import hashlib
|
|
921
|
+
|
|
922
|
+
build_signature_parts = [
|
|
923
|
+
str(id), # build config ID
|
|
924
|
+
str(build_command), # build command
|
|
925
|
+
str(build_vars_str), # environment variables
|
|
926
|
+
str(compiler), # compiler
|
|
927
|
+
str(cpp_compiler), # C++ compiler
|
|
928
|
+
str(build_image), # build image
|
|
929
|
+
str(build_os), # OS
|
|
930
|
+
str(build_arch), # architecture
|
|
931
|
+
",".join(sorted(build_artifacts)), # artifacts list
|
|
932
|
+
]
|
|
933
|
+
build_signature = hashlib.sha256(
|
|
934
|
+
":".join(build_signature_parts).encode()
|
|
935
|
+
).hexdigest()[:16]
|
|
936
|
+
|
|
937
|
+
for artifact in build_artifacts:
|
|
938
|
+
bin_key = f"zipped:artifacts:{prefix}:{id}:{build_signature}:{artifact}.zip"
|
|
939
|
+
if artifact == "redisearch.so":
|
|
940
|
+
bin_artifact = open(
|
|
941
|
+
f"{redis_temporary_dir}modules/redisearch/src/bin/linux-x64-release/search-community/{artifact}",
|
|
942
|
+
"rb",
|
|
943
|
+
).read()
|
|
944
|
+
else:
|
|
945
|
+
bin_artifact = open(f"{redis_temporary_dir}src/{artifact}", "rb").read()
|
|
946
|
+
bin_artifact_len = len(bytes(bin_artifact))
|
|
947
|
+
assert bin_artifact_len > 0
|
|
948
|
+
conn.set(bin_key, bytes(bin_artifact), ex=REDIS_BINS_EXPIRE_SECS)
|
|
949
|
+
build_stream_fields[artifact] = bin_key
|
|
950
|
+
build_stream_fields["{}_len_bytes".format(artifact)] = bin_artifact_len
|
|
951
|
+
result = True
|
|
952
|
+
if b"platform" in testDetails:
|
|
953
|
+
build_stream_fields["platform"] = testDetails[b"platform"]
|
|
954
|
+
return build_stream_fields, result
|
|
955
|
+
|
|
956
|
+
|
|
957
|
+
def build_spec_image_prefetch(builders_folder, different_build_specs):
|
|
958
|
+
logging.info("checking build spec requirements")
|
|
959
|
+
already_checked_images = []
|
|
960
|
+
hub_pulled_images = 0
|
|
961
|
+
client = docker.from_env()
|
|
962
|
+
for build_spec in different_build_specs:
|
|
963
|
+
build_config, id = get_build_config(builders_folder + "/" + build_spec)
|
|
964
|
+
if build_config["kind"] == "docker":
|
|
965
|
+
build_image = build_config["build_image"]
|
|
966
|
+
hub_pulled_images = check_docker_image_available(
|
|
967
|
+
already_checked_images, build_image, client, hub_pulled_images, id
|
|
968
|
+
)
|
|
969
|
+
if "run_image" in build_config:
|
|
970
|
+
run_image = build_config["run_image"]
|
|
971
|
+
hub_pulled_images = check_docker_image_available(
|
|
972
|
+
already_checked_images, run_image, client, hub_pulled_images, id
|
|
973
|
+
)
|
|
974
|
+
return already_checked_images, hub_pulled_images
|
|
975
|
+
|
|
976
|
+
|
|
977
|
+
def check_docker_image_available(
|
|
978
|
+
already_checked_images, build_image, client, hub_pulled_images, id
|
|
979
|
+
):
|
|
980
|
+
if build_image not in already_checked_images:
|
|
981
|
+
logging.info(
|
|
982
|
+
"Build {} requirement: checking docker image {} is available.".format(
|
|
983
|
+
id, build_image
|
|
984
|
+
)
|
|
985
|
+
)
|
|
986
|
+
local_images = [
|
|
987
|
+
x.tags[0] for x in client.images.list(filters={"reference": build_image})
|
|
988
|
+
]
|
|
989
|
+
if build_image not in local_images:
|
|
990
|
+
logging.info(
|
|
991
|
+
"Build {} requirement: docker image {} is not available locally. Fetching it from hub".format(
|
|
992
|
+
id, build_image
|
|
993
|
+
)
|
|
994
|
+
)
|
|
995
|
+
client.images.pull(build_image)
|
|
996
|
+
hub_pulled_images = hub_pulled_images + 1
|
|
997
|
+
else:
|
|
998
|
+
logging.info(
|
|
999
|
+
"Build {} requirement: docker image {} is available locally.".format(
|
|
1000
|
+
id, build_image
|
|
1001
|
+
)
|
|
1002
|
+
)
|
|
1003
|
+
already_checked_images.append(build_image)
|
|
1004
|
+
else:
|
|
1005
|
+
logging.info(
|
|
1006
|
+
"Build {} requirement: docker image {} availability was already checked.".format(
|
|
1007
|
+
id, build_image
|
|
1008
|
+
)
|
|
1009
|
+
)
|
|
1010
|
+
return hub_pulled_images
|