gitlab_quality-test_tooling 3.14.0 → 3.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile.lock +1 -1
- data/exe/test-coverage +31 -0
- data/lib/gitlab_quality/test_tooling/code_coverage/click_house/client.rb +29 -0
- data/lib/gitlab_quality/test_tooling/code_coverage/click_house/per_test_coverage_table.rb +169 -0
- data/lib/gitlab_quality/test_tooling/code_coverage/click_house/table.rb +7 -12
- data/lib/gitlab_quality/test_tooling/code_coverage/click_house/test_health_risk_aggregation.sql +123 -0
- data/lib/gitlab_quality/test_tooling/code_coverage/click_house/test_health_risk_aggregator.rb +114 -0
- data/lib/gitlab_quality/test_tooling/code_coverage/per_test_coverage_data.rb +174 -0
- data/lib/gitlab_quality/test_tooling/version.rb +1 -1
- metadata +7 -2
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: c8b4293811154f61f07fd8e35523a2406ba9832c6fe3e5eb3cf12164688df97a
|
|
4
|
+
data.tar.gz: c31082d6308fabe29ec51b4be09ebfd5d4c1d84e61ae8b3ce3741ffadcace9a6
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 1e522f9897eeddf1857caa10470ab1e11ae1ad7bad5062af80dbabdfb80e0fd11170f2011a2c28e9fcf5bb59394ca5124d9a88bedca637fbaec2e2fbf462503c
|
|
7
|
+
data.tar.gz: 1bb96de8d84feb07694145507df76707d88906b0671887b8104b79c3105859dda2c1d9ca6b68e0344478b955cdf91ec5f459ad6f1e48ca3612653b233c214531
|
data/Gemfile.lock
CHANGED
data/exe/test-coverage
CHANGED
|
@@ -10,13 +10,16 @@ require_relative "../lib/gitlab_quality/test_tooling"
|
|
|
10
10
|
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/category_owners'
|
|
11
11
|
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/click_house/category_owners_table'
|
|
12
12
|
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/click_house/coverage_metrics_table'
|
|
13
|
+
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/click_house/per_test_coverage_table'
|
|
13
14
|
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/click_house/test_file_mappings_table'
|
|
15
|
+
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/click_house/test_health_risk_aggregator'
|
|
14
16
|
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/coverage_data'
|
|
15
17
|
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/lcov_file'
|
|
16
18
|
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/artifacts'
|
|
17
19
|
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/test_report'
|
|
18
20
|
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/test_map'
|
|
19
21
|
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/test_file_mapping_data'
|
|
22
|
+
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/per_test_coverage_data'
|
|
20
23
|
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/source_file_classifier'
|
|
21
24
|
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/responsibility_classifier'
|
|
22
25
|
require_relative '../lib/gitlab_quality/test_tooling/code_coverage/responsibility_patterns_config'
|
|
@@ -63,6 +66,13 @@ options = OptionParser.new do |opts|
|
|
|
63
66
|
params[:responsibility_patterns] = path
|
|
64
67
|
end
|
|
65
68
|
|
|
69
|
+
opts.on('--per-test-coverage GLOB',
|
|
70
|
+
'Optional. Glob pattern for per-test coverage JSON files. ' \
|
|
71
|
+
'When provided, populates code_coverage.test_coverage_per_file and runs the ' \
|
|
72
|
+
'daily test_health_risk aggregation. (e.g., "tmp/per-test-coverage-*.json")') do |pattern|
|
|
73
|
+
params[:per_test_coverage] = pattern
|
|
74
|
+
end
|
|
75
|
+
|
|
66
76
|
opts.separator ""
|
|
67
77
|
opts.separator "Environment variables:"
|
|
68
78
|
opts.separator " GLCI_CLICKHOUSE_METRICS_PASSWORD ClickHouse password (required, not passed via CLI for security)"
|
|
@@ -192,6 +202,27 @@ if params.any? && (required_params - params.keys).none?
|
|
|
192
202
|
)
|
|
193
203
|
test_file_mappings_table = GitlabQuality::TestTooling::CodeCoverage::ClickHouse::TestFileMappingsTable.new(**shared_clickhouse_data)
|
|
194
204
|
test_file_mappings_table.push(test_file_mapping_data.as_db_table)
|
|
205
|
+
|
|
206
|
+
# Per-test coverage export (optional). Only runs when --per-test-coverage
|
|
207
|
+
# was provided AND at least one matching artifact exists.
|
|
208
|
+
if params[:per_test_coverage]
|
|
209
|
+
per_test_files = Dir.glob(params[:per_test_coverage])
|
|
210
|
+
if per_test_files.any?
|
|
211
|
+
per_test_data = GitlabQuality::TestTooling::CodeCoverage::PerTestCoverageData.new(
|
|
212
|
+
per_test_files,
|
|
213
|
+
tests_to_categories: tests_to_categories,
|
|
214
|
+
feature_categories_to_teams: category_owners.feature_categories_to_teams,
|
|
215
|
+
captured_sha: ENV.fetch('CI_COMMIT_SHA', '')
|
|
216
|
+
)
|
|
217
|
+
per_test_coverage_table = GitlabQuality::TestTooling::CodeCoverage::ClickHouse::PerTestCoverageTable.new(**clickhouse_data)
|
|
218
|
+
per_test_coverage_table.push(per_test_data.as_db_table)
|
|
219
|
+
|
|
220
|
+
aggregator = GitlabQuality::TestTooling::CodeCoverage::ClickHouse::TestHealthRiskAggregator.new(**clickhouse_data)
|
|
221
|
+
aggregator.run
|
|
222
|
+
else
|
|
223
|
+
puts "No per-test coverage artifacts matched #{params[:per_test_coverage]}; skipping per-test export and aggregation."
|
|
224
|
+
end
|
|
225
|
+
end
|
|
195
226
|
else
|
|
196
227
|
puts "Missing argument(s). Required arguments are: #{required_params}\nPassed arguments are: #{params}\n"
|
|
197
228
|
puts options
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module GitlabQuality
|
|
4
|
+
module TestTooling
|
|
5
|
+
module CodeCoverage
|
|
6
|
+
module ClickHouse
|
|
7
|
+
# Memoized ClickHouse client accessor shared by `Table` and
|
|
8
|
+
# `TestHealthRiskAggregator`. Both classes need the same client
|
|
9
|
+
# construction from `@url` / `@database` / `@username` / `@password` /
|
|
10
|
+
# `@logger` instance variables; this module factors out the duplicated
|
|
11
|
+
# accessor without forcing one class to inherit from the other.
|
|
12
|
+
module Client
|
|
13
|
+
private
|
|
14
|
+
|
|
15
|
+
# @return [GitlabQuality::TestTooling::ClickHouse::Client]
|
|
16
|
+
def client
|
|
17
|
+
@client ||= GitlabQuality::TestTooling::ClickHouse::Client.new(
|
|
18
|
+
url: url,
|
|
19
|
+
database: database,
|
|
20
|
+
username: username,
|
|
21
|
+
password: password,
|
|
22
|
+
logger: logger
|
|
23
|
+
)
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
end
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative 'table'
|
|
4
|
+
|
|
5
|
+
module GitlabQuality
|
|
6
|
+
module TestTooling
|
|
7
|
+
module CodeCoverage
|
|
8
|
+
module ClickHouse
|
|
9
|
+
# Inserts per-test, per-source-file line-set coverage rows into
|
|
10
|
+
# `code_coverage.test_coverage_per_file`. The `covered_lines` column is
|
|
11
|
+
# `AggregateFunction(groupBitmap, UInt32)` so JSONEachRow can't carry it;
|
|
12
|
+
# rows go in via raw `INSERT ... VALUES` statements wrapping
|
|
13
|
+
# `bitmapBuild(CAST([line, ...] AS Array(UInt32)))` per row.
|
|
14
|
+
#
|
|
15
|
+
# Dedup across runs is handled by the table's
|
|
16
|
+
# `SharedReplacingMergeTree(version)` engine on
|
|
17
|
+
# `(ci_project_path, test_file, source_file)` ORDER BY. Within a single
|
|
18
|
+
# run, callers must pre-aggregate at the (test_file, source_file) grain
|
|
19
|
+
# before pushing: multiple examples within the same test_file should be
|
|
20
|
+
# unioned into one row by the loader, not handed in as duplicates.
|
|
21
|
+
class PerTestCoverageTable < GitlabQuality::TestTooling::CodeCoverage::ClickHouse::Table
|
|
22
|
+
TABLE_NAME = "test_coverage_per_file"
|
|
23
|
+
BATCH_SIZE = 500
|
|
24
|
+
# Intentionally generous ceiling on line numbers. Real source files
|
|
25
|
+
# are thousands of lines; generated artifacts (large GraphQL schemas,
|
|
26
|
+
# bundled JS, JSON manifests) can run past 100k. The cap is set to
|
|
27
|
+
# flag clearly bogus values (negative, garbage casts, anything past
|
|
28
|
+
# ~1M) without rejecting realistic generated files. ClickHouse's
|
|
29
|
+
# UInt32 ceiling is ~4.3B, so we still have orders of magnitude of
|
|
30
|
+
# headroom above this. Tighten only with evidence.
|
|
31
|
+
MAX_LINE_NUMBER = 1_000_000
|
|
32
|
+
|
|
33
|
+
# @param data [Array<Hash>] one entry per (test_file, source_file). Each entry needs:
|
|
34
|
+
# :test_file [String]
|
|
35
|
+
# :source_file [String]
|
|
36
|
+
# :covered_lines [Array<Integer>] line numbers covered by this test on this file
|
|
37
|
+
# :total_lines [Integer] executable lines in the source file
|
|
38
|
+
# :feature_category, :group, :stage, :section [String, optional]
|
|
39
|
+
# @return [void]
|
|
40
|
+
def push(data) # rubocop:disable Metrics/AbcSize
|
|
41
|
+
return logger.warn("#{LOG_PREFIX} No data found, skipping ClickHouse export!") if data.empty?
|
|
42
|
+
|
|
43
|
+
logger.debug("#{LOG_PREFIX} Starting per-test coverage export to ClickHouse")
|
|
44
|
+
sanitized_data = sanitize(data)
|
|
45
|
+
|
|
46
|
+
return logger.warn("#{LOG_PREFIX} No valid data found after sanitization, skipping ClickHouse export!") if sanitized_data.empty?
|
|
47
|
+
|
|
48
|
+
total_batches = (sanitized_data.size.to_f / BATCH_SIZE).ceil
|
|
49
|
+
sanitized_data.each_slice(BATCH_SIZE).with_index do |batch, index|
|
|
50
|
+
logger.debug("#{LOG_PREFIX} Pushing batch #{index + 1} of #{total_batches} (#{batch.size} rows)")
|
|
51
|
+
client.query(build_insert_sql(batch), format: "TabSeparated")
|
|
52
|
+
end
|
|
53
|
+
logger.info("#{LOG_PREFIX} Successfully pushed #{sanitized_data.size} records to #{full_table_name}")
|
|
54
|
+
rescue StandardError => e
|
|
55
|
+
logger.error("#{LOG_PREFIX} Error occurred while pushing data to #{full_table_name}: #{e.message}")
|
|
56
|
+
raise
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
private
|
|
60
|
+
|
|
61
|
+
def valid_record?(record)
|
|
62
|
+
valid_test_file?(record) && valid_source_file?(record) && valid_covered_lines?(record)
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
def valid_test_file?(record)
|
|
66
|
+
return true unless record[:test_file].blank?
|
|
67
|
+
|
|
68
|
+
logger.warn("#{LOG_PREFIX} Skipping record with nil/empty test_file: #{record}")
|
|
69
|
+
false
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
def valid_source_file?(record)
|
|
73
|
+
return true unless record[:source_file].blank?
|
|
74
|
+
|
|
75
|
+
logger.warn("#{LOG_PREFIX} Skipping record with nil/empty source_file: #{record}")
|
|
76
|
+
false
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
def valid_covered_lines?(record)
|
|
80
|
+
covered = record[:covered_lines]
|
|
81
|
+
return true if covered.is_a?(Array) && !covered.empty?
|
|
82
|
+
|
|
83
|
+
logger.warn("#{LOG_PREFIX} Skipping record with empty/invalid covered_lines: #{record[:test_file]} → #{record[:source_file]}")
|
|
84
|
+
false
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
def sanitized_data_record(record) # rubocop:disable Metrics/AbcSize
|
|
88
|
+
sanitized_lines = sanitize_lines(record[:covered_lines])
|
|
89
|
+
|
|
90
|
+
# `valid_covered_lines?` only checks the raw input is a non-empty
|
|
91
|
+
# Array. Post-sanitisation, every entry could still be filtered
|
|
92
|
+
# out (negatives, zeros, values past MAX_LINE_NUMBER). An empty
|
|
93
|
+
# `bitmapBuild([])` row carries no useful signal for the
|
|
94
|
+
# aggregation and just wastes a tuple, so drop the record here.
|
|
95
|
+
if sanitized_lines.empty?
|
|
96
|
+
logger.warn(
|
|
97
|
+
"#{LOG_PREFIX} Skipping record whose covered_lines emptied after sanitisation: " \
|
|
98
|
+
"#{record[:test_file]} → #{record[:source_file]}"
|
|
99
|
+
)
|
|
100
|
+
return nil
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
{
|
|
104
|
+
timestamp: time,
|
|
105
|
+
ci_project_path: ENV.fetch('CI_PROJECT_PATH', nil),
|
|
106
|
+
test_file: record[:test_file],
|
|
107
|
+
source_file: record[:source_file],
|
|
108
|
+
covered_lines: sanitized_lines,
|
|
109
|
+
total_lines: record[:total_lines].to_i,
|
|
110
|
+
feature_category: record[:feature_category] || '',
|
|
111
|
+
group: record[:group] || '',
|
|
112
|
+
stage: record[:stage] || '',
|
|
113
|
+
section: record[:section] || '',
|
|
114
|
+
captured_sha: record[:captured_sha].to_s
|
|
115
|
+
}
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
def build_insert_sql(batch)
|
|
119
|
+
rows_sql = batch.map { |record| build_row_sql(record) }.join(",\n")
|
|
120
|
+
<<~SQL
|
|
121
|
+
INSERT INTO #{full_table_name}
|
|
122
|
+
(timestamp, ci_project_path, test_file, source_file, covered_lines, total_lines, feature_category, `group`, stage, section, captured_sha)
|
|
123
|
+
VALUES
|
|
124
|
+
#{rows_sql}
|
|
125
|
+
SQL
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
# Precondition: `record[:covered_lines]` is the sanitised integer
|
|
129
|
+
# array produced upstream by `sanitize_lines` (via
|
|
130
|
+
# `sanitized_data_record`). Values are positive integers within
|
|
131
|
+
# MAX_LINE_NUMBER; no defensive validation is repeated here because
|
|
132
|
+
# this method is on the hot path (every row in every batch).
|
|
133
|
+
def build_row_sql(record) # rubocop:disable Metrics/AbcSize
|
|
134
|
+
line_array = "[#{record[:covered_lines].join(',')}]"
|
|
135
|
+
timestamp_str = record[:timestamp].iso8601(3)
|
|
136
|
+
"(" \
|
|
137
|
+
"'#{timestamp_str}', " \
|
|
138
|
+
"'#{escape(record[:ci_project_path])}', " \
|
|
139
|
+
"'#{escape(record[:test_file])}', " \
|
|
140
|
+
"'#{escape(record[:source_file])}', " \
|
|
141
|
+
"bitmapBuild(CAST(#{line_array} AS Array(UInt32))), " \
|
|
142
|
+
"#{record[:total_lines]}, " \
|
|
143
|
+
"'#{escape(record[:feature_category])}', " \
|
|
144
|
+
"'#{escape(record[:group])}', " \
|
|
145
|
+
"'#{escape(record[:stage])}', " \
|
|
146
|
+
"'#{escape(record[:section])}', " \
|
|
147
|
+
"'#{escape(record[:captured_sha])}'" \
|
|
148
|
+
")"
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
# Filter line numbers down to positive integers within MAX_LINE_NUMBER.
|
|
152
|
+
# Drops anything that isn't a valid line number; doesn't raise so a
|
|
153
|
+
# single bad row doesn't fail the whole batch.
|
|
154
|
+
def sanitize_lines(lines)
|
|
155
|
+
Array(lines).filter_map do |line|
|
|
156
|
+
n = line.to_i
|
|
157
|
+
n if n.positive? && n <= MAX_LINE_NUMBER
|
|
158
|
+
end
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
# ClickHouse string escape: backslash and single quote.
|
|
162
|
+
def escape(str)
|
|
163
|
+
str.to_s.gsub(/\\/, '\\\\\\\\').gsub("'", "''") # rubocop:disable Style/RedundantRegexpArgument
|
|
164
|
+
end
|
|
165
|
+
end
|
|
166
|
+
end
|
|
167
|
+
end
|
|
168
|
+
end
|
|
169
|
+
end
|
|
@@ -2,12 +2,18 @@
|
|
|
2
2
|
|
|
3
3
|
require 'time'
|
|
4
4
|
|
|
5
|
+
require_relative 'client'
|
|
6
|
+
|
|
5
7
|
module GitlabQuality
|
|
6
8
|
module TestTooling
|
|
7
9
|
module CodeCoverage
|
|
8
10
|
module ClickHouse
|
|
11
|
+
# Shared log prefix for all classes in this namespace. Hoisted up from
|
|
12
|
+
# individual classes so the prefix can change in one place.
|
|
13
|
+
LOG_PREFIX = "[CodeCoverage]" unless defined?(LOG_PREFIX)
|
|
14
|
+
|
|
9
15
|
class Table
|
|
10
|
-
|
|
16
|
+
include Client
|
|
11
17
|
|
|
12
18
|
def initialize(url:, database:, username: nil, password: nil, logger: nil)
|
|
13
19
|
@url = url
|
|
@@ -74,17 +80,6 @@ module GitlabQuality
|
|
|
74
80
|
logger.warn("#{LOG_PREFIX} Invalid CI_PIPELINE_CREATED_AT format: #{ci_created_at}, using current time")
|
|
75
81
|
Time.now.utc
|
|
76
82
|
end
|
|
77
|
-
|
|
78
|
-
# @return [GitlabQuality::TestTooling::ClickHouse::Client]
|
|
79
|
-
def client
|
|
80
|
-
@client ||= GitlabQuality::TestTooling::ClickHouse::Client.new(
|
|
81
|
-
url: url,
|
|
82
|
-
database: database,
|
|
83
|
-
username: username,
|
|
84
|
-
password: password,
|
|
85
|
-
logger: logger
|
|
86
|
-
)
|
|
87
|
-
end
|
|
88
83
|
end
|
|
89
84
|
end
|
|
90
85
|
end
|
data/lib/gitlab_quality/test_tooling/code_coverage/click_house/test_health_risk_aggregation.sql
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
-- Daily aggregation of per-test coverage data into per-group risk summary.
|
|
2
|
+
--
|
|
3
|
+
-- Reads the bitmap line sets from `code_coverage.test_coverage_per_file`,
|
|
4
|
+
-- joins per-test quarantine and flaky status from `test_metrics`, then
|
|
5
|
+
-- computes for each source file:
|
|
6
|
+
-- quarantined_union = union of line sets covered by *quarantined* tests
|
|
7
|
+
-- flaky_union = union of line sets covered by *flaky* tests
|
|
8
|
+
-- quarantined_or_flaky_union = union of line sets covered by *quarantined OR flaky* tests
|
|
9
|
+
-- healthy_union = union of line sets covered by tests that are neither
|
|
10
|
+
--
|
|
11
|
+
-- The "at risk" line counts are:
|
|
12
|
+
-- at_risk_lines_quarantine = lines uniquely covered by quarantined tests
|
|
13
|
+
-- (not covered by any healthy test)
|
|
14
|
+
-- at_risk_lines_flaky = lines uniquely covered by flaky tests
|
|
15
|
+
-- at_risk_lines_combined = lines uniquely covered by quarantined OR flaky
|
|
16
|
+
--
|
|
17
|
+
-- Healthy union excludes flaky tests too, by design: a flaky test isn't a
|
|
18
|
+
-- reliable coverage safety net, so its lines shouldn't count toward the
|
|
19
|
+
-- baseline against which quarantine-only risk is measured.
|
|
20
|
+
--
|
|
21
|
+
-- Result: one row per (snapshot_date, group, stage, section) tuple inserted
|
|
22
|
+
-- into `code_coverage.test_health_risk_per_group`.
|
|
23
|
+
--
|
|
24
|
+
-- Parameter substitution: this template uses `{name}` braces (not %{name} or
|
|
25
|
+
-- :name). They're replaced by literal text via `gsub` in
|
|
26
|
+
-- TestHealthRiskAggregator#build_sql, after each parameter passes a regex
|
|
27
|
+
-- check. A typoed placeholder name will silently fall through gsub as a
|
|
28
|
+
-- literal — match the names exactly.
|
|
29
|
+
--
|
|
30
|
+
-- {snapshot_date} : the date stamp for this run, e.g. '2026-05-07'
|
|
31
|
+
-- {coverage_window} : interval to look back for fresh per-test rows,
|
|
32
|
+
-- default '2 DAY' (see Resilience note below)
|
|
33
|
+
-- {risk_window} : interval to consider quarantine/flaky status from
|
|
34
|
+
-- the test_metrics summaries, default '30 DAY'
|
|
35
|
+
|
|
36
|
+
-- Idempotency: this INSERT is unconditional. The target table must use
|
|
37
|
+
-- SharedReplacingMergeTree(version) (or ReplacingMergeTree on non-Cloud
|
|
38
|
+
-- ClickHouse; Cloud silently rewrites to Shared...) keyed by
|
|
39
|
+
-- (snapshot_date, group, stage, section), with
|
|
40
|
+
-- `version UInt64 MATERIALIZED toUnixTimestamp64Milli(now64(3))`, so re-running
|
|
41
|
+
-- for the same snapshot_date produces a higher version that replaces the
|
|
42
|
+
-- previous row on merge. Without that engine, re-runs would duplicate rows.
|
|
43
|
+
--
|
|
44
|
+
-- Resilience: `{coverage_window}` defaults to 2 DAY (see
|
|
45
|
+
-- TestHealthRiskAggregator::DEFAULT_COVERAGE_WINDOW). One missed nightly
|
|
46
|
+
-- run is recovered on the next night because the aggregation still sees
|
|
47
|
+
-- the previous day's per-test rows. Cross-run race: if two jobs land
|
|
48
|
+
-- inserts overlapping with the aggregation, the later aggregation wins
|
|
49
|
+
-- because of the version-based replacement.
|
|
50
|
+
INSERT INTO code_coverage.test_health_risk_per_group
|
|
51
|
+
WITH
|
|
52
|
+
quarantine_status AS (
|
|
53
|
+
-- `date` is the daily aggregation timestamp on the test_metrics summary
|
|
54
|
+
-- view (when the per-test counts were rolled up), not the date a test
|
|
55
|
+
-- entered quarantine. We treat any test marked quarantined within the
|
|
56
|
+
-- risk window as currently quarantined.
|
|
57
|
+
SELECT
|
|
58
|
+
test_file,
|
|
59
|
+
uniqIfMerge(quarantined_cases) > 0 AS is_quarantined
|
|
60
|
+
FROM test_metrics.test_file_quarantine_summary
|
|
61
|
+
WHERE date >= now() - INTERVAL {risk_window}
|
|
62
|
+
GROUP BY test_file
|
|
63
|
+
HAVING is_quarantined
|
|
64
|
+
),
|
|
65
|
+
flaky_status AS (
|
|
66
|
+
SELECT
|
|
67
|
+
test_file,
|
|
68
|
+
uniqIfMerge(flaky_cases) > 0 AS is_flaky
|
|
69
|
+
FROM test_metrics.test_file_flaky_summary
|
|
70
|
+
WHERE date >= now() - INTERVAL {risk_window}
|
|
71
|
+
GROUP BY test_file
|
|
72
|
+
HAVING is_flaky
|
|
73
|
+
),
|
|
74
|
+
per_test_status AS (
|
|
75
|
+
SELECT
|
|
76
|
+
tc.source_file,
|
|
77
|
+
tc.`group`,
|
|
78
|
+
tc.stage,
|
|
79
|
+
tc.section,
|
|
80
|
+
tc.total_lines,
|
|
81
|
+
tc.covered_lines,
|
|
82
|
+
coalesce(qs.is_quarantined, FALSE) AS is_quarantined,
|
|
83
|
+
coalesce(fs.is_flaky, FALSE) AS is_flaky
|
|
84
|
+
FROM code_coverage.test_coverage_per_file tc FINAL
|
|
85
|
+
LEFT JOIN quarantine_status qs ON qs.test_file = tc.test_file
|
|
86
|
+
LEFT JOIN flaky_status fs ON fs.test_file = tc.test_file
|
|
87
|
+
WHERE tc.timestamp >= now() - INTERVAL {coverage_window}
|
|
88
|
+
),
|
|
89
|
+
per_file AS (
|
|
90
|
+
SELECT
|
|
91
|
+
source_file,
|
|
92
|
+
`group`,
|
|
93
|
+
stage,
|
|
94
|
+
section,
|
|
95
|
+
max(total_lines) AS total_lines,
|
|
96
|
+
groupBitmapMergeStateIf(covered_lines, is_quarantined) AS quarantined_union,
|
|
97
|
+
groupBitmapMergeStateIf(covered_lines, is_flaky) AS flaky_union,
|
|
98
|
+
groupBitmapMergeStateIf(covered_lines, is_quarantined OR is_flaky) AS quarantined_or_flaky_union,
|
|
99
|
+
groupBitmapMergeStateIf(covered_lines, NOT is_quarantined AND NOT is_flaky) AS healthy_union
|
|
100
|
+
FROM per_test_status
|
|
101
|
+
GROUP BY source_file, `group`, stage, section
|
|
102
|
+
)
|
|
103
|
+
SELECT
|
|
104
|
+
toDate('{snapshot_date}') AS snapshot_date,
|
|
105
|
+
`group`,
|
|
106
|
+
stage,
|
|
107
|
+
section,
|
|
108
|
+
count(*) AS source_file_count,
|
|
109
|
+
-- at_risk_lines_combined ≤ at_risk_lines_quarantine + at_risk_lines_flaky.
|
|
110
|
+
-- A line covered by both a quarantined and a flaky test is single-counted
|
|
111
|
+
-- in quarantined_or_flaky_union, so the combined column is the *unique*
|
|
112
|
+
-- exclusive-line loss across both risk sources, not their sum.
|
|
113
|
+
sum(bitmapCardinality(bitmapAndnot(quarantined_union, healthy_union))) AS at_risk_lines_quarantine,
|
|
114
|
+
sum(bitmapCardinality(bitmapAndnot(flaky_union, healthy_union))) AS at_risk_lines_flaky,
|
|
115
|
+
sum(bitmapCardinality(bitmapAndnot(quarantined_or_flaky_union, healthy_union))) AS at_risk_lines_combined,
|
|
116
|
+
-- team_executable_lines is the per-team executable-line denominator across
|
|
117
|
+
-- every (source_file, team) row owned by the team. A source file owned by
|
|
118
|
+
-- tests in two different (group, stage, section) tuples contributes to each
|
|
119
|
+
-- team's total separately, so summing this column across teams is NOT the
|
|
120
|
+
-- codebase-wide line count.
|
|
121
|
+
sum(total_lines) AS team_executable_lines
|
|
122
|
+
FROM per_file
|
|
123
|
+
GROUP BY `group`, stage, section;
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative 'client'
|
|
4
|
+
require_relative 'table'
|
|
5
|
+
|
|
6
|
+
module GitlabQuality
|
|
7
|
+
module TestTooling
|
|
8
|
+
module CodeCoverage
|
|
9
|
+
module ClickHouse
|
|
10
|
+
# Runs the daily aggregation that turns `code_coverage.test_coverage_per_file`
|
|
11
|
+
# rows into a small `code_coverage.test_health_risk_per_group` summary
|
|
12
|
+
# the dashboard reads.
|
|
13
|
+
#
|
|
14
|
+
# Hybrid model: this Ruby class is the orchestrator (schedule, error
|
|
15
|
+
# handling, parameter substitution); ClickHouse runs the bitmap math
|
|
16
|
+
# via `INSERT ... SELECT` from the SQL file shipped alongside.
|
|
17
|
+
class TestHealthRiskAggregator
|
|
18
|
+
include Client
|
|
19
|
+
|
|
20
|
+
SQL_FILE = File.expand_path('test_health_risk_aggregation.sql', __dir__)
|
|
21
|
+
|
|
22
|
+
# 2 DAY rather than 1 DAY makes the aggregation self-healing across
|
|
23
|
+
# a single missed nightly run: if last night's export failed, this
|
|
24
|
+
# night's run still sees yesterday's per-test rows and produces a
|
|
25
|
+
# current snapshot. ReplacingMergeTree FINAL on the source table
|
|
26
|
+
# ensures we read only the latest version per (test_file, source_file).
|
|
27
|
+
# ClickHouse accepts both '2 DAY' and '2 DAYS'; we use the singular
|
|
28
|
+
# form for consistency with `30 DAY` below.
|
|
29
|
+
DEFAULT_COVERAGE_WINDOW = '2 DAY'
|
|
30
|
+
DEFAULT_RISK_WINDOW = '30 DAY'
|
|
31
|
+
|
|
32
|
+
# `snapshot_date` is YYYY-MM-DD; intervals are `<integer> <unit>`
|
|
33
|
+
# (singular or plural).
|
|
34
|
+
DATE_PATTERN = /\A\d{4}-\d{2}-\d{2}\z/
|
|
35
|
+
INTERVAL_PATTERN = /\A\d+\s+(SECOND|MINUTE|HOUR|DAY|WEEK|MONTH|QUARTER|YEAR)S?\z/i
|
|
36
|
+
|
|
37
|
+
def initialize(
|
|
38
|
+
url:, database:, username: nil, password: nil, logger: nil,
|
|
39
|
+
coverage_window: DEFAULT_COVERAGE_WINDOW, risk_window: DEFAULT_RISK_WINDOW)
|
|
40
|
+
@url = url
|
|
41
|
+
@database = database
|
|
42
|
+
@username = username
|
|
43
|
+
@password = password
|
|
44
|
+
@logger = logger || ::Logger.new($stdout, level: 1)
|
|
45
|
+
@coverage_window = coverage_window
|
|
46
|
+
@risk_window = risk_window
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
# @param snapshot_date [Date, String] date stamp for this run; defaults to today.
|
|
50
|
+
# @return [void]
|
|
51
|
+
def run(snapshot_date: Date.today) # rubocop:disable Metrics/AbcSize
|
|
52
|
+
sql = build_sql(snapshot_date: snapshot_date)
|
|
53
|
+
logger.info(
|
|
54
|
+
"#{LOG_PREFIX} Running test_health_risk aggregation snapshot_date=#{snapshot_date} " \
|
|
55
|
+
"coverage_window=#{coverage_window} risk_window=#{risk_window}"
|
|
56
|
+
)
|
|
57
|
+
client.query(sql, format: "TabSeparated")
|
|
58
|
+
inserted = fetch_row_count(snapshot_date)
|
|
59
|
+
if inserted.is_a?(Integer) && inserted.zero?
|
|
60
|
+
logger.warn(
|
|
61
|
+
"#{LOG_PREFIX} Aggregation wrote 0 rows for snapshot_date=#{snapshot_date}. " \
|
|
62
|
+
"This is valid if no per-test data is in the coverage_window, but worth checking " \
|
|
63
|
+
"test_coverage_per_file directly if a recent export ran."
|
|
64
|
+
)
|
|
65
|
+
else
|
|
66
|
+
logger.info("#{LOG_PREFIX} Aggregation wrote #{inserted} rows for snapshot_date=#{snapshot_date}")
|
|
67
|
+
end
|
|
68
|
+
rescue StandardError => e
|
|
69
|
+
logger.error("#{LOG_PREFIX} Aggregation failed for #{snapshot_date}: #{e.message}")
|
|
70
|
+
raise
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
private
|
|
74
|
+
|
|
75
|
+
attr_reader :url, :database, :username, :password, :logger, :coverage_window, :risk_window
|
|
76
|
+
|
|
77
|
+
def build_sql(snapshot_date:)
|
|
78
|
+
template = File.read(SQL_FILE)
|
|
79
|
+
template
|
|
80
|
+
.gsub('{snapshot_date}', validate_date(snapshot_date))
|
|
81
|
+
.gsub('{coverage_window}', validate_interval(coverage_window))
|
|
82
|
+
.gsub('{risk_window}', validate_interval(risk_window))
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
# DateTime/Time `to_s` includes the time portion and is rejected.
|
|
86
|
+
def validate_date(value)
|
|
87
|
+
str = value.to_s
|
|
88
|
+
raise ArgumentError, "Invalid snapshot_date: #{value.inspect}" unless DATE_PATTERN.match?(str)
|
|
89
|
+
|
|
90
|
+
str
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
def validate_interval(value)
|
|
94
|
+
raise ArgumentError, "Invalid interval expression: #{value.inspect}" unless INTERVAL_PATTERN.match?(value.to_s)
|
|
95
|
+
|
|
96
|
+
value
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
# Returns 'unknown' on any error so a transient count-query failure
|
|
100
|
+
# can't mask the success of the actual INSERT.
|
|
101
|
+
def fetch_row_count(snapshot_date)
|
|
102
|
+
count_sql = "SELECT count() FROM code_coverage.test_health_risk_per_group FINAL " \
|
|
103
|
+
"WHERE snapshot_date = toDate('#{validate_date(snapshot_date)}')"
|
|
104
|
+
result = client.query(count_sql, format: "JSONCompact")
|
|
105
|
+
result&.dig('data', 0, 0) || 'unknown'
|
|
106
|
+
rescue StandardError => e
|
|
107
|
+
logger.debug("#{LOG_PREFIX} Could not fetch post-aggregation row count: #{e.message}")
|
|
108
|
+
'unknown'
|
|
109
|
+
end
|
|
110
|
+
end
|
|
111
|
+
end
|
|
112
|
+
end
|
|
113
|
+
end
|
|
114
|
+
end
|
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'json'
|
|
4
|
+
|
|
5
|
+
module GitlabQuality
|
|
6
|
+
module TestTooling
|
|
7
|
+
module CodeCoverage
|
|
8
|
+
# Reads per-test coverage files and produces rows for
|
|
9
|
+
# `PerTestCoverageTable`.
|
|
10
|
+
#
|
|
11
|
+
# Two input formats are supported, dispatched by file extension:
|
|
12
|
+
#
|
|
13
|
+
# `.json`: one document with the example id as the outer key.
|
|
14
|
+
# {
|
|
15
|
+
# "spec/path/to/test_spec.rb[1:1]": {
|
|
16
|
+
# "app/path/to/source.rb": [null, 1, 0, 5, 1, ...]
|
|
17
|
+
# },
|
|
18
|
+
# ...
|
|
19
|
+
# }
|
|
20
|
+
#
|
|
21
|
+
# `.ndjson`: one JSON object per line, with `id` and `files` fields.
|
|
22
|
+
# {"id":"spec/path/to/test_spec.rb[1:1]","files":{"app/path/to/source.rb":[null,1,0,5,1]}}
|
|
23
|
+
# {"id":"spec/path/to/test_spec.rb[1:2]","files":{"app/path/to/source.rb":[null,0,1,0,1]}}
|
|
24
|
+
#
|
|
25
|
+
# The NDJSON form lets the producing formatter stream per-example data
|
|
26
|
+
# to disk without holding the full suite in memory. Both forms carry
|
|
27
|
+
# the same per-test data; the parser is symmetric.
|
|
28
|
+
#
|
|
29
|
+
# Inner key (in either form) is a source file path. Inner value is a
|
|
30
|
+
# 0-indexed array of per-line hit counts. `null` means non-executable;
|
|
31
|
+
# `0` means executable but not hit by this test; positive integer means
|
|
32
|
+
# executed. This is the standard Ruby `Coverage` module output shape,
|
|
33
|
+
# also produced by any per-test capture that emits one line-hit array
|
|
34
|
+
# per (test, file) pair.
|
|
35
|
+
#
|
|
36
|
+
# This class:
|
|
37
|
+
# - strips `[<example_uid>]` from the example id to get a per-test-file key
|
|
38
|
+
# - converts each line-hit array into a (covered_lines, total_lines) pair
|
|
39
|
+
# - pre-aggregates within (test_file, source_file): unions covered
|
|
40
|
+
# lines across all examples in the same test file, takes the max
|
|
41
|
+
# total_lines
|
|
42
|
+
# - drops rows with empty bitmaps (file imported but no line hit)
|
|
43
|
+
# - enriches with feature_category / group / stage / section when test
|
|
44
|
+
# metadata is provided
|
|
45
|
+
class PerTestCoverageData
|
|
46
|
+
# Raised when a coverage artifact can't be parsed. Wraps the underlying
|
|
47
|
+
# `JSON::ParserError` or `Errno::ENOENT` so callers outside the
|
|
48
|
+
# gitlab-org/gitlab CI context (where upstream `needs:` ordering
|
|
49
|
+
# guarantees well-formed artifacts) can rescue precisely without
|
|
50
|
+
# catching unrelated standard exceptions.
|
|
51
|
+
ParseError = Class.new(StandardError)
|
|
52
|
+
|
|
53
|
+
# @param coverage_files [Array<String>] paths to per-test coverage JSON artifacts
|
|
54
|
+
# @param tests_to_categories [Hash<String, Array<String>>] test_file => [feature_category]
|
|
55
|
+
# @param feature_categories_to_teams [Hash<String, Hash>] category => {group:, stage:, section:}
|
|
56
|
+
# @param captured_sha [String] the git SHA the coverage was captured against; attached to
|
|
57
|
+
# every emitted row so downstream delta-capture jobs can ask
|
|
58
|
+
# `SELECT max(captured_sha) FROM code_coverage.test_coverage_per_file` to find the
|
|
59
|
+
# previous successful capture point. Defaults to '' when unknown.
|
|
60
|
+
# @raise [ParseError] if a coverage file is missing or contains invalid JSON
|
|
61
|
+
def initialize(coverage_files, tests_to_categories: {}, feature_categories_to_teams: {}, captured_sha: '')
|
|
62
|
+
@coverage_files = Array(coverage_files)
|
|
63
|
+
@tests_to_categories = tests_to_categories
|
|
64
|
+
@feature_categories_to_teams = feature_categories_to_teams
|
|
65
|
+
@captured_sha = captured_sha.to_s
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
# @return [Array<Hash<Symbol, Object>>] per-test-file, per-source-file rows for PerTestCoverageTable
|
|
69
|
+
def as_db_table # rubocop:disable Metrics/AbcSize
|
|
70
|
+
aggregated = {}
|
|
71
|
+
|
|
72
|
+
@coverage_files.each do |path|
|
|
73
|
+
each_example(path) do |example_id, files|
|
|
74
|
+
test_file = extract_test_file_path(example_id)
|
|
75
|
+
files.each do |source_file, line_hits|
|
|
76
|
+
covered, total = parse_line_hits(line_hits)
|
|
77
|
+
next if covered.empty?
|
|
78
|
+
|
|
79
|
+
key = [test_file, source_file]
|
|
80
|
+
if aggregated.key?(key)
|
|
81
|
+
aggregated[key][:covered_lines].merge(covered)
|
|
82
|
+
# max rather than picking either side: examples within the
|
|
83
|
+
# same test file may report arrays of different lengths if
|
|
84
|
+
# the source file was edited mid-run. Pragmatic, not exact.
|
|
85
|
+
aggregated[key][:total_lines] = [aggregated[key][:total_lines], total].max
|
|
86
|
+
else
|
|
87
|
+
# dup so the merge above can never alias a Set returned by
|
|
88
|
+
# parse_line_hits to a different key later in the loop.
|
|
89
|
+
aggregated[key] = { covered_lines: covered.dup, total_lines: total }
|
|
90
|
+
end
|
|
91
|
+
end
|
|
92
|
+
end
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
aggregated.map do |(test_file, source_file), agg|
|
|
96
|
+
category = @tests_to_categories[test_file]&.first || ''
|
|
97
|
+
team = @feature_categories_to_teams[category] || {}
|
|
98
|
+
|
|
99
|
+
{
|
|
100
|
+
test_file: test_file,
|
|
101
|
+
source_file: source_file,
|
|
102
|
+
covered_lines: agg[:covered_lines].to_a.sort,
|
|
103
|
+
total_lines: agg[:total_lines],
|
|
104
|
+
feature_category: category,
|
|
105
|
+
group: team[:group] || '',
|
|
106
|
+
stage: team[:stage] || '',
|
|
107
|
+
section: team[:section] || '',
|
|
108
|
+
captured_sha: @captured_sha
|
|
109
|
+
}
|
|
110
|
+
end
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
private
|
|
114
|
+
|
|
115
|
+
# Yield (example_id, files) pairs from one input file. Dispatches
|
|
116
|
+
# on extension: `.ndjson` is parsed line-by-line so a multi-GB
|
|
117
|
+
# capture file does not need to fit in memory; everything else is
|
|
118
|
+
# parsed as a single JSON document with `{example_id => files}` at
|
|
119
|
+
# the top level. Both forms wrap parse failures in `ParseError` so
|
|
120
|
+
# callers can rescue without naming the underlying exception classes.
|
|
121
|
+
def each_example(path)
|
|
122
|
+
if path.end_with?('.ndjson')
|
|
123
|
+
File.foreach(path) do |line|
|
|
124
|
+
line = line.strip
|
|
125
|
+
next if line.empty?
|
|
126
|
+
|
|
127
|
+
entry = JSON.parse(line)
|
|
128
|
+
yield entry.fetch('id'), entry.fetch('files')
|
|
129
|
+
end
|
|
130
|
+
else
|
|
131
|
+
JSON.parse(File.read(path)).each do |example_id, files|
|
|
132
|
+
yield example_id, files
|
|
133
|
+
end
|
|
134
|
+
end
|
|
135
|
+
rescue JSON::ParserError, Errno::ENOENT => e
|
|
136
|
+
raise ParseError, "Failed to parse coverage artifact #{path}: #{e.message}"
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
# Normalise an example id (`<test_file>[<uid>]:<line>`) to the
|
|
140
|
+
# bare test_file path. Strips a leading `./`, the trailing
|
|
141
|
+
# `[<uid>]`, and any `:<line>` suffix so multiple examples within
|
|
142
|
+
# the same spec file collapse to the same test_file key. Path
|
|
143
|
+
# separator handling assumes Linux paths (everything in CI is
|
|
144
|
+
# Linux); a Windows-style `C:/foo/spec.rb` would split incorrectly
|
|
145
|
+
# on the first `:`.
|
|
146
|
+
def extract_test_file_path(example_id)
|
|
147
|
+
stripped = example_id.delete_prefix('./')
|
|
148
|
+
stripped = stripped.sub(/\[.+\]\z/, '')
|
|
149
|
+
stripped.split(':').first
|
|
150
|
+
end
|
|
151
|
+
|
|
152
|
+
# Walk the per-line hit array. Returns (Set<Integer> covered_lines,
|
|
153
|
+
# Integer total_executable_lines). Indexes are 0-based; line numbers
|
|
154
|
+
# are 1-based. `nil` entries are non-executable lines.
|
|
155
|
+
# A `nil` value at the file level (file loaded but never recorded
|
|
156
|
+
# under this example) yields an empty result rather than raising,
|
|
157
|
+
# so a single odd cell doesn't fail the whole export.
|
|
158
|
+
def parse_line_hits(line_hits)
|
|
159
|
+
return [Set.new, 0] unless line_hits.is_a?(Array)
|
|
160
|
+
|
|
161
|
+
covered = Set.new
|
|
162
|
+
total = 0
|
|
163
|
+
line_hits.each_with_index do |hits, index|
|
|
164
|
+
next if hits.nil?
|
|
165
|
+
|
|
166
|
+
total += 1
|
|
167
|
+
covered.add(index + 1) if hits.is_a?(Numeric) && hits.positive?
|
|
168
|
+
end
|
|
169
|
+
[covered, total]
|
|
170
|
+
end
|
|
171
|
+
end
|
|
172
|
+
end
|
|
173
|
+
end
|
|
174
|
+
end
|
metadata
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: gitlab_quality-test_tooling
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 3.
|
|
4
|
+
version: 3.15.0
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- GitLab Quality
|
|
8
8
|
autorequire:
|
|
9
9
|
bindir: exe
|
|
10
10
|
cert_chain: []
|
|
11
|
-
date: 2026-05-
|
|
11
|
+
date: 2026-05-13 00:00:00.000000000 Z
|
|
12
12
|
dependencies:
|
|
13
13
|
- !ruby/object:Gem::Dependency
|
|
14
14
|
name: climate_control
|
|
@@ -491,11 +491,16 @@ files:
|
|
|
491
491
|
- lib/gitlab_quality/test_tooling/code_coverage/artifacts.rb
|
|
492
492
|
- lib/gitlab_quality/test_tooling/code_coverage/category_owners.rb
|
|
493
493
|
- lib/gitlab_quality/test_tooling/code_coverage/click_house/category_owners_table.rb
|
|
494
|
+
- lib/gitlab_quality/test_tooling/code_coverage/click_house/client.rb
|
|
494
495
|
- lib/gitlab_quality/test_tooling/code_coverage/click_house/coverage_metrics_table.rb
|
|
496
|
+
- lib/gitlab_quality/test_tooling/code_coverage/click_house/per_test_coverage_table.rb
|
|
495
497
|
- lib/gitlab_quality/test_tooling/code_coverage/click_house/table.rb
|
|
496
498
|
- lib/gitlab_quality/test_tooling/code_coverage/click_house/test_file_mappings_table.rb
|
|
499
|
+
- lib/gitlab_quality/test_tooling/code_coverage/click_house/test_health_risk_aggregation.sql
|
|
500
|
+
- lib/gitlab_quality/test_tooling/code_coverage/click_house/test_health_risk_aggregator.rb
|
|
497
501
|
- lib/gitlab_quality/test_tooling/code_coverage/coverage_data.rb
|
|
498
502
|
- lib/gitlab_quality/test_tooling/code_coverage/lcov_file.rb
|
|
503
|
+
- lib/gitlab_quality/test_tooling/code_coverage/per_test_coverage_data.rb
|
|
499
504
|
- lib/gitlab_quality/test_tooling/code_coverage/responsibility_classifier.rb
|
|
500
505
|
- lib/gitlab_quality/test_tooling/code_coverage/responsibility_patterns_config.rb
|
|
501
506
|
- lib/gitlab_quality/test_tooling/code_coverage/rspec_report.rb
|