polyrun 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (105) hide show
  1. checksums.yaml +7 -0
  2. data/CODE_OF_CONDUCT.md +31 -0
  3. data/CONTRIBUTING.md +84 -0
  4. data/LICENSE +21 -0
  5. data/README.md +140 -0
  6. data/SECURITY.md +27 -0
  7. data/bin/polyrun +6 -0
  8. data/docs/SETUP_PROFILE.md +106 -0
  9. data/lib/polyrun/cli/coverage_commands.rb +150 -0
  10. data/lib/polyrun/cli/coverage_merge_io.rb +124 -0
  11. data/lib/polyrun/cli/database_commands.rb +149 -0
  12. data/lib/polyrun/cli/env_commands.rb +43 -0
  13. data/lib/polyrun/cli/helpers.rb +113 -0
  14. data/lib/polyrun/cli/init_command.rb +99 -0
  15. data/lib/polyrun/cli/plan_command.rb +134 -0
  16. data/lib/polyrun/cli/prepare_command.rb +71 -0
  17. data/lib/polyrun/cli/prepare_recipe.rb +77 -0
  18. data/lib/polyrun/cli/queue_command.rb +101 -0
  19. data/lib/polyrun/cli/quick_command.rb +13 -0
  20. data/lib/polyrun/cli/report_commands.rb +94 -0
  21. data/lib/polyrun/cli/run_shards_command.rb +88 -0
  22. data/lib/polyrun/cli/run_shards_plan_boot_phases.rb +91 -0
  23. data/lib/polyrun/cli/run_shards_plan_options.rb +45 -0
  24. data/lib/polyrun/cli/run_shards_planning.rb +124 -0
  25. data/lib/polyrun/cli/run_shards_run.rb +168 -0
  26. data/lib/polyrun/cli/start_bootstrap.rb +99 -0
  27. data/lib/polyrun/cli/timing_command.rb +31 -0
  28. data/lib/polyrun/cli.rb +184 -0
  29. data/lib/polyrun/config.rb +61 -0
  30. data/lib/polyrun/coverage/cobertura_zero_lines.rb +32 -0
  31. data/lib/polyrun/coverage/collector.rb +184 -0
  32. data/lib/polyrun/coverage/collector_finish.rb +95 -0
  33. data/lib/polyrun/coverage/filter.rb +22 -0
  34. data/lib/polyrun/coverage/formatter.rb +115 -0
  35. data/lib/polyrun/coverage/merge/formatters.rb +181 -0
  36. data/lib/polyrun/coverage/merge/formatters_html.rb +55 -0
  37. data/lib/polyrun/coverage/merge.rb +127 -0
  38. data/lib/polyrun/coverage/merge_fragment_meta.rb +47 -0
  39. data/lib/polyrun/coverage/merge_merge_two.rb +117 -0
  40. data/lib/polyrun/coverage/rails.rb +128 -0
  41. data/lib/polyrun/coverage/reporting.rb +41 -0
  42. data/lib/polyrun/coverage/result.rb +18 -0
  43. data/lib/polyrun/coverage/track_files.rb +141 -0
  44. data/lib/polyrun/data/cached_fixtures.rb +122 -0
  45. data/lib/polyrun/data/factory_counts.rb +35 -0
  46. data/lib/polyrun/data/factory_instrumentation.rb +50 -0
  47. data/lib/polyrun/data/fixtures.rb +68 -0
  48. data/lib/polyrun/data/parallel_provisioning.rb +93 -0
  49. data/lib/polyrun/data/snapshot.rb +84 -0
  50. data/lib/polyrun/database/clone_shards.rb +81 -0
  51. data/lib/polyrun/database/provision.rb +72 -0
  52. data/lib/polyrun/database/shard.rb +63 -0
  53. data/lib/polyrun/database/url_builder/connection/infer.rb +49 -0
  54. data/lib/polyrun/database/url_builder/connection/url_builders.rb +43 -0
  55. data/lib/polyrun/database/url_builder/connection.rb +191 -0
  56. data/lib/polyrun/database/url_builder/template_prepare.rb +21 -0
  57. data/lib/polyrun/database/url_builder.rb +160 -0
  58. data/lib/polyrun/debug.rb +81 -0
  59. data/lib/polyrun/env/ci.rb +65 -0
  60. data/lib/polyrun/log.rb +70 -0
  61. data/lib/polyrun/minitest.rb +17 -0
  62. data/lib/polyrun/partition/constraints.rb +69 -0
  63. data/lib/polyrun/partition/hrw.rb +33 -0
  64. data/lib/polyrun/partition/min_heap.rb +64 -0
  65. data/lib/polyrun/partition/paths.rb +28 -0
  66. data/lib/polyrun/partition/paths_build.rb +128 -0
  67. data/lib/polyrun/partition/plan.rb +189 -0
  68. data/lib/polyrun/partition/plan_lpt.rb +49 -0
  69. data/lib/polyrun/partition/plan_sharding.rb +48 -0
  70. data/lib/polyrun/partition/stable_shuffle.rb +18 -0
  71. data/lib/polyrun/prepare/artifacts.rb +40 -0
  72. data/lib/polyrun/prepare/assets.rb +57 -0
  73. data/lib/polyrun/queue/file_store.rb +199 -0
  74. data/lib/polyrun/queue/file_store_pending.rb +48 -0
  75. data/lib/polyrun/quick/assertions.rb +32 -0
  76. data/lib/polyrun/quick/errors.rb +6 -0
  77. data/lib/polyrun/quick/example_group.rb +66 -0
  78. data/lib/polyrun/quick/example_runner.rb +93 -0
  79. data/lib/polyrun/quick/matchers.rb +156 -0
  80. data/lib/polyrun/quick/reporter.rb +42 -0
  81. data/lib/polyrun/quick/runner.rb +180 -0
  82. data/lib/polyrun/quick.rb +1 -0
  83. data/lib/polyrun/railtie.rb +7 -0
  84. data/lib/polyrun/reporting/junit.rb +125 -0
  85. data/lib/polyrun/reporting/junit_emit.rb +58 -0
  86. data/lib/polyrun/reporting/rspec_junit.rb +39 -0
  87. data/lib/polyrun/rspec.rb +15 -0
  88. data/lib/polyrun/templates/POLYRUN.md +45 -0
  89. data/lib/polyrun/templates/ci_matrix.polyrun.yml +14 -0
  90. data/lib/polyrun/templates/minimal_gem.polyrun.yml +13 -0
  91. data/lib/polyrun/templates/rails_prepare.polyrun.yml +31 -0
  92. data/lib/polyrun/timing/merge.rb +35 -0
  93. data/lib/polyrun/timing/summary.rb +25 -0
  94. data/lib/polyrun/version.rb +3 -0
  95. data/lib/polyrun.rb +58 -0
  96. data/polyrun.gemspec +37 -0
  97. data/sig/polyrun/cli.rbs +6 -0
  98. data/sig/polyrun/config.rbs +20 -0
  99. data/sig/polyrun/debug.rbs +12 -0
  100. data/sig/polyrun/log.rbs +12 -0
  101. data/sig/polyrun/minitest.rbs +5 -0
  102. data/sig/polyrun/quick.rbs +19 -0
  103. data/sig/polyrun/rspec.rbs +5 -0
  104. data/sig/polyrun.rbs +11 -0
  105. metadata +288 -0
@@ -0,0 +1,127 @@
1
+ require "json"
2
+
3
+ module Polyrun
4
+ module Coverage
5
+ # Merges SimpleCov-compatible coverage blobs (line arrays and optional branches).
6
+ # Intended to be replaced or accelerated by a native extension for large suites.
7
+ #
8
+ # Complexity: +merge_two+ is linear in the number of file keys in its operands. Shards are combined with
9
+ # +merge_blob_tree+ (pairwise rounds), so total work stays linear in the sum of blob sizes across shards
10
+ # (same asymptotic cost as a left fold; shallower call depth). Group recomputation after merge is
11
+ # O(files x groups) with one pass over files (+TrackFiles.group_summaries+).
12
+ module Merge
13
+ module_function
14
+
15
+ # Merged coverage blob only (same as +merge_fragments(paths)[:blob]+).
16
+ # Uses a balanced binary tree of +merge_two+ calls (depth O(log k) for k shards) so work stays
17
+ # linear in total key count across merges; +merge_two+ is associative.
18
+ def merge_files(paths)
19
+ merge_fragments(paths)[:blob]
20
+ end
21
+
22
+ # Returns +{ blob:, meta:, groups: }+ where +groups+ is recomputed from merged blob when fragments
23
+ # include +meta.polyrun_coverage_root+ and +meta.polyrun_coverage_groups+ (emitted by {Collector}).
24
+ # When +meta.polyrun_track_files+ is present (sharded runs defer per-shard untracked expansion),
25
+ # applies +TrackFiles.merge_untracked_into_blob+ once on the merged blob so totals match serial.
26
+ def merge_fragments(paths)
27
+ return {blob: {}, meta: {}, groups: nil} if paths.empty?
28
+
29
+ docs = paths.map { |p| JSON.parse(File.read(p)) }
30
+ blobs = docs.map { |d| extract_coverage_blob(d) }
31
+ merged_blob = merge_blob_tree(blobs)
32
+ merged_meta = merge_fragment_metas(docs)
33
+ merged_blob = apply_track_files_once_after_merge(merged_blob, merged_meta)
34
+ groups_payload = recompute_groups_from_meta(merged_blob, merged_meta)
35
+ {blob: merged_blob, meta: merged_meta, groups: groups_payload}
36
+ end
37
+
38
+ def apply_track_files_once_after_merge(blob, merged_meta)
39
+ return blob unless merged_meta.is_a?(Hash)
40
+
41
+ tf = merged_meta["polyrun_track_files"]
42
+ root = merged_meta["polyrun_coverage_root"]
43
+ return blob if tf.nil? || root.nil?
44
+
45
+ require_relative "track_files"
46
+ TrackFiles.merge_untracked_into_blob(blob, root, tf)
47
+ end
48
+
49
+ # Balanced reduction: same total +merge_two+ work as a left fold, shallower call stack.
50
+ def merge_blob_tree(blobs)
51
+ return {} if blobs.empty?
52
+ return blobs.first if blobs.size == 1
53
+
54
+ list = blobs.dup
55
+ while list.size > 1
56
+ nxt = []
57
+ i = 0
58
+ while i < list.size
59
+ if i + 1 < list.size
60
+ nxt << merge_two(list[i], list[i + 1])
61
+ i += 2
62
+ else
63
+ nxt << list[i]
64
+ i += 1
65
+ end
66
+ end
67
+ list = nxt
68
+ end
69
+ list.first
70
+ end
71
+
72
+ INTERNAL_META_KEYS = %w[polyrun_coverage_root polyrun_coverage_groups polyrun_track_files].freeze
73
+
74
+ def normalize_track_files_meta(tf)
75
+ case tf
76
+ when Array then tf.map(&:to_s).sort
77
+ else [tf.to_s]
78
+ end
79
+ end
80
+
81
+ def recompute_groups_from_meta(blob, merged_meta)
82
+ return nil unless merged_meta.is_a?(Hash)
83
+
84
+ r = merged_meta["polyrun_coverage_root"]
85
+ g = merged_meta["polyrun_coverage_groups"]
86
+ return nil if r.nil? || g.nil? || g.empty?
87
+
88
+ require_relative "track_files"
89
+ TrackFiles.group_summaries(blob, r, g)
90
+ end
91
+
92
+ def parse_file(path)
93
+ text = File.read(path)
94
+ data = JSON.parse(text)
95
+ extract_coverage_blob(data)
96
+ end
97
+
98
+ # Picks top-level export `coverage`, merges all suite entries (e.g. RSpec + Minitest),
99
+ # and combines both when present.
100
+ def extract_coverage_blob(data)
101
+ return {} unless data.is_a?(Hash)
102
+
103
+ top = data["coverage"]
104
+ nested = []
105
+ data.each do |k, v|
106
+ next if k == "coverage"
107
+ next unless v.is_a?(Hash) && v["coverage"].is_a?(Hash)
108
+
109
+ nested << v["coverage"]
110
+ end
111
+
112
+ if nested.empty?
113
+ return top if top.is_a?(Hash)
114
+
115
+ return {}
116
+ end
117
+
118
+ merged = nested.reduce { |acc, el| merge_two(acc, el) }
119
+ top.is_a?(Hash) ? merge_two(top, merged) : merged
120
+ end
121
+ end
122
+ end
123
+ end
124
+
125
+ require_relative "merge_merge_two"
126
+ require_relative "merge_fragment_meta"
127
+ require_relative "merge/formatters"
@@ -0,0 +1,47 @@
1
+ module Polyrun
2
+ module Coverage
3
+ module Merge
4
+ module_function
5
+
6
+ def merge_fragment_metas(docs)
7
+ metas = docs.map { |d| extract_doc_meta(d) }
8
+ base = metas.first.dup
9
+ roots = metas.map { |m| m["polyrun_coverage_root"] }.compact
10
+ grs = metas.map { |m| m["polyrun_coverage_groups"] }.compact
11
+ tfs = metas.map { |m| m["polyrun_track_files"] }.compact
12
+ merge_fragment_meta_warn_root!(roots)
13
+ merge_fragment_meta_warn_groups!(grs)
14
+ merge_fragment_meta_warn_track_files!(tfs)
15
+ root = roots.first
16
+ groups_cfg = grs.first
17
+ track_files_cfg = tfs.first
18
+ base["polyrun_coverage_root"] = root if root
19
+ base["polyrun_coverage_groups"] = groups_cfg if groups_cfg
20
+ base["polyrun_track_files"] = track_files_cfg if track_files_cfg
21
+ base
22
+ end
23
+
24
+ def extract_doc_meta(d)
25
+ (d.is_a?(Hash) && d["meta"].is_a?(Hash)) ? d["meta"].transform_keys(&:to_s) : {}
26
+ end
27
+
28
+ def merge_fragment_meta_warn_root!(roots)
29
+ return if roots.uniq.size <= 1
30
+
31
+ Polyrun::Log.warn "Polyrun merge-coverage: polyrun_coverage_root differs across fragments; using first."
32
+ end
33
+
34
+ def merge_fragment_meta_warn_groups!(grs)
35
+ return if grs.uniq.size <= 1
36
+
37
+ Polyrun::Log.warn "Polyrun merge-coverage: polyrun_coverage_groups differs across fragments; using first."
38
+ end
39
+
40
+ def merge_fragment_meta_warn_track_files!(tfs)
41
+ return if tfs.map { |tf| JSON.generate(normalize_track_files_meta(tf)) }.uniq.size <= 1
42
+
43
+ Polyrun::Log.warn "Polyrun merge-coverage: polyrun_track_files differs across fragments; using first."
44
+ end
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,117 @@
1
+ module Polyrun
2
+ module Coverage
3
+ module Merge
4
+ module_function
5
+
6
+ def merge_two(a, b)
7
+ keys = a.keys | b.keys
8
+ out = {}
9
+ keys.each do |path|
10
+ out[path] = merge_file_entry(a[path], b[path])
11
+ end
12
+ out
13
+ end
14
+
15
+ def normalize_file_entry(v)
16
+ return nil if v.nil?
17
+ return {"lines" => v} if v.is_a?(Array)
18
+
19
+ v
20
+ end
21
+
22
+ def line_array_from_file_entry(file)
23
+ h = normalize_file_entry(file)
24
+ return nil unless h.is_a?(Hash)
25
+
26
+ h["lines"] || h[:lines]
27
+ end
28
+
29
+ def merge_file_entry(x, y)
30
+ x = normalize_file_entry(x)
31
+ y = normalize_file_entry(y)
32
+ return y if x.nil?
33
+ return x if y.nil?
34
+
35
+ lines = merge_line_arrays(x["lines"] || x[:lines], y["lines"] || y[:lines])
36
+ entry = {"lines" => lines}
37
+ bx = x["branches"] || x[:branches]
38
+ by = y["branches"] || y[:branches]
39
+ br = merge_branch_arrays(bx, by)
40
+ entry["branches"] = br if br
41
+ entry
42
+ end
43
+
44
+ def merge_line_arrays(a, b)
45
+ a ||= []
46
+ b ||= []
47
+ na = a.size
48
+ nb = b.size
49
+ max_len = (na > nb) ? na : nb
50
+ out = Array.new(max_len)
51
+ i = 0
52
+ while i < max_len
53
+ out[i] = merge_line_hits(a[i], b[i])
54
+ i += 1
55
+ end
56
+ out
57
+ end
58
+
59
+ def merge_line_hits(x, y)
60
+ return y if x.nil?
61
+ return x if y.nil?
62
+ return "ignored" if x == "ignored" || y == "ignored"
63
+
64
+ xi = line_hit_to_i(x)
65
+ yi = line_hit_to_i(y)
66
+ return xi + yi if xi && yi
67
+
68
+ return yi if xi.nil? && yi
69
+ return xi if yi.nil? && xi
70
+
71
+ x
72
+ end
73
+
74
+ def line_hit_to_i(v)
75
+ case v
76
+ when Integer then v
77
+ when nil then nil
78
+ else
79
+ Integer(v, exception: false)
80
+ end
81
+ end
82
+
83
+ def merge_branch_arrays(a, b)
84
+ return nil if a.nil? && b.nil?
85
+ return (a || b).dup if a.nil? || b.nil?
86
+
87
+ index = {}
88
+ [a, b].each do |arr|
89
+ arr.each do |br|
90
+ k = branch_key(br)
91
+ existing = index[k]
92
+ index[k] =
93
+ if existing
94
+ merge_branch_entries(existing, br)
95
+ else
96
+ br.dup
97
+ end
98
+ end
99
+ end
100
+ index.values.sort_by { |br| branch_key(br) }
101
+ end
102
+
103
+ def branch_key(br)
104
+ h = br.is_a?(Hash) ? br : {}
105
+ [h["type"] || h[:type], h["start_line"] || h[:start_line], h["end_line"] || h[:end_line]]
106
+ end
107
+
108
+ def merge_branch_entries(x, y)
109
+ out = x.is_a?(Hash) ? x.dup : {}
110
+ xc = (x["coverage"] || x[:coverage]).to_i
111
+ yc = (y["coverage"] || y[:coverage]).to_i
112
+ out["coverage"] = xc + yc
113
+ out
114
+ end
115
+ end
116
+ end
117
+ end
@@ -0,0 +1,128 @@
1
+ require "yaml"
2
+
3
+ require_relative "collector"
4
+ require_relative "formatter"
5
+
6
+ module Polyrun
7
+ module Coverage
8
+ # Rails integration entry point for {Collector.start!}: optional +config/polyrun_coverage.yml+ under the project
9
+ # root, root inference from +spec_helper.rb+ / +rails_helper.rb+ / +test_helper.rb+, and +report_formats+ for
10
+ # {Formatter.multi}. Call at the **top** of +spec/spec_helper.rb+ (before +require "rails"+) so stdlib
11
+ # +Coverage+ sees application code.
12
+ #
13
+ # require "polyrun/coverage/rails"
14
+ # Polyrun::Coverage::Rails.start!
15
+ #
16
+ module Rails
17
+ module_function
18
+
19
+ DEFAULT_CONFIG_RELATIVE = File.join("config", "polyrun_coverage.yml").freeze
20
+
21
+ # @param root [String, nil] project root (default: infer from caller, or +Rails.root+ when already loaded)
22
+ # @param config_path [String, nil] YAML path (default: +<root>/config/polyrun_coverage.yml+ if present)
23
+ # @param overrides [Hash] merged over YAML; keys match {Collector.start!} (+report_formats+ builds +formatter+)
24
+ def start!(root: nil, config_path: nil, **overrides)
25
+ return if Collector.disabled?
26
+
27
+ root = resolve_root(root)
28
+ root = File.expand_path(root)
29
+ cfg = load_config(root, config_path)
30
+ merged = deep_merge_hashes(cfg, stringify_keys(overrides))
31
+ merged["root"] = root
32
+
33
+ report_formats = merged.delete("report_formats")
34
+ unless merged.key?("formatter") || report_formats.nil?
35
+ merged["formatter"] = build_formatter(Array(report_formats), root, merged)
36
+ end
37
+
38
+ Collector.start!(**collector_kwargs(merged))
39
+ end
40
+
41
+ # Exposed for tests and custom loaders.
42
+ def infer_root_from_path(path)
43
+ case File.basename(path.to_s)
44
+ when "spec_helper.rb", "rails_helper.rb", "test_helper.rb"
45
+ File.expand_path("..", File.dirname(path))
46
+ end
47
+ end
48
+
49
+ def resolve_root(explicit)
50
+ return File.expand_path(explicit) if explicit
51
+
52
+ if defined?(::Rails) && ::Rails.respond_to?(:root) && ::Rails.root
53
+ return ::Rails.root.to_s
54
+ end
55
+
56
+ caller_locations.each do |loc|
57
+ inferred = infer_root_from_path(loc.path)
58
+ return inferred if inferred
59
+ end
60
+
61
+ raise ArgumentError,
62
+ "Polyrun::Coverage::Rails.start! could not infer project root; pass root: (e.g. Rails.root or File.expand_path('..', __dir__))"
63
+ end
64
+
65
+ def load_config(root, config_path)
66
+ path = config_path || File.join(root, DEFAULT_CONFIG_RELATIVE)
67
+ path = File.expand_path(path)
68
+ return {} unless File.file?(path)
69
+
70
+ data = YAML.load_file(path)
71
+ data.is_a?(Hash) ? stringify_keys(data) : {}
72
+ end
73
+
74
+ def build_formatter(formats, root, merged)
75
+ return nil if formats.empty?
76
+
77
+ dir = merged["report_output_dir"] || File.join(root, "coverage")
78
+ dir = File.expand_path(dir.to_s, root)
79
+ base = (merged["report_basename"] || "polyrun-coverage").to_s
80
+ Formatter.multi(*formats.map { |x| x.to_s.to_sym }, output_dir: dir, basename: base)
81
+ end
82
+
83
+ def collector_kwargs(h)
84
+ root = File.expand_path(h.fetch("root"))
85
+ {
86
+ root: root,
87
+ reject_patterns: Array(h["reject_patterns"] || []),
88
+ track_under: h.key?("track_under") ? Array(h["track_under"]) : ["lib"],
89
+ track_files: h["track_files"],
90
+ groups: h["groups"],
91
+ output_path: h["output_path"],
92
+ minimum_line_percent: h["minimum_line_percent"],
93
+ strict: h["strict"],
94
+ meta: h["meta"].is_a?(Hash) ? h["meta"] : {},
95
+ formatter: h["formatter"],
96
+ report_output_dir: (h["report_output_dir"] ? File.expand_path(h["report_output_dir"].to_s, root) : nil),
97
+ report_basename: (h["report_basename"] || "polyrun-coverage").to_s
98
+ }
99
+ end
100
+
101
+ def stringify_keys(obj)
102
+ case obj
103
+ when Hash
104
+ obj.each_with_object({}) do |(k, v), out|
105
+ out[k.to_s] = stringify_keys(v)
106
+ end
107
+ when Array
108
+ obj.map { |e| stringify_keys(e) }
109
+ else
110
+ obj
111
+ end
112
+ end
113
+
114
+ def deep_merge_hashes(a, b)
115
+ a = a.is_a?(Hash) ? a.dup : {}
116
+ b.each do |k, v|
117
+ key = k.to_s
118
+ a[key] = if a[key].is_a?(Hash) && v.is_a?(Hash)
119
+ deep_merge_hashes(a[key], stringify_keys(v))
120
+ else
121
+ stringify_keys(v)
122
+ end
123
+ end
124
+ a
125
+ end
126
+ end
127
+ end
128
+ end
@@ -0,0 +1,41 @@
1
+ require "json"
2
+
3
+ require_relative "formatter"
4
+ require_relative "merge"
5
+ require_relative "result"
6
+
7
+ module Polyrun
8
+ module Coverage
9
+ # Ready-to-use multi-format output (SimpleCov-compatible result blob), no extra gems.
10
+ # Pass +formatter:+ for multi-formatter composition ({Formatter::MultiFormatter}, custom classes).
11
+ module Reporting
12
+ DEFAULT_FORMATS = %w[json lcov cobertura console html].freeze
13
+
14
+ # Comma list for +merge-coverage+ / +run-shards --merge-coverage+ defaults (Codecov, Jenkins, HTML, etc.).
15
+ DEFAULT_MERGE_FORMAT_LIST = DEFAULT_FORMATS.join(",").freeze
16
+
17
+ # Writes selected formats under output_dir using basename as file prefix (e.g. polyrun-coverage.json).
18
+ # When +formatter+ is nil, builds {Formatter.multi} from +formats+ (symbols or strings).
19
+ def self.write(coverage_blob, output_dir:, basename: "polyrun-coverage", formats: DEFAULT_FORMATS, meta: {}, groups: nil, formatter: nil)
20
+ fmt = formatter || Formatter.multi(*Array(formats).map(&:to_sym), output_dir: output_dir, basename: basename)
21
+ result = Result.new(coverage_blob, meta: meta, groups: groups)
22
+ fmt.format(result, output_dir: output_dir, basename: basename)
23
+ end
24
+
25
+ # Load a merged or raw JSON file from disk and write all requested formats.
26
+ def self.write_from_json_file(json_path, **kwargs)
27
+ text = File.read(json_path)
28
+ data = JSON.parse(text)
29
+ blob = Merge.extract_coverage_blob(data)
30
+ meta = kwargs.delete(:meta) || data["meta"] || {}
31
+ groups =
32
+ if kwargs.key?(:groups)
33
+ kwargs.delete(:groups)
34
+ else
35
+ data["groups"]
36
+ end
37
+ write(blob, meta: meta, groups: groups, **kwargs)
38
+ end
39
+ end
40
+ end
41
+ end
@@ -0,0 +1,18 @@
1
+ module Polyrun
2
+ module Coverage
3
+ # Payload passed to formatters (SimpleCov-compatible): merged line coverage plus JSON meta/groups.
4
+ class Result
5
+ attr_reader :coverage_blob, :meta, :groups
6
+
7
+ def initialize(coverage_blob, meta: {}, groups: nil)
8
+ @coverage_blob = coverage_blob
9
+ @meta = meta.is_a?(Hash) ? meta : {}
10
+ @groups = groups
11
+ end
12
+
13
+ def files
14
+ coverage_blob.keys
15
+ end
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,141 @@
1
+ require "pathname"
2
+
3
+ require_relative "merge"
4
+
5
+ module Polyrun
6
+ module Coverage
7
+ # SimpleCov-compatible +track_files+ (globs from project root) and +add_group+ statistics
8
+ # for the JSON payload (+groups+ with +lines.covered_percent+ per group).
9
+ module TrackFiles
10
+ module_function
11
+
12
+ # Expands one or more glob patterns relative to +root+ (supports +{a,b}/**/*.rb+ with File::FNM_EXTGLOB).
13
+ def expand_globs(root, track_files)
14
+ root = File.expand_path(root)
15
+ patterns = Array(track_files).map(&:to_s).reject(&:empty?)
16
+ return [] if patterns.empty?
17
+
18
+ patterns.flat_map do |pattern|
19
+ Dir.chdir(root) do
20
+ Dir.glob(pattern, File::FNM_EXTGLOB)
21
+ end
22
+ end.map { |rel| File.expand_path(rel, root) }.uniq
23
+ end
24
+
25
+ # Adds tracked files that were never required, with simulated line arrays (blank/comment => nil, else 0).
26
+ # Matches SimpleCov +add_not_loaded_files+ behavior for coverage completeness.
27
+ def merge_untracked_into_blob(blob, root, track_files)
28
+ root = File.expand_path(root)
29
+ out = {}
30
+ blob.each do |k, v|
31
+ out[File.expand_path(k.to_s)] = v
32
+ end
33
+
34
+ expand_globs(root, track_files).each do |abs|
35
+ next if out.key?(abs)
36
+ next unless File.file?(abs)
37
+
38
+ out[abs] = {"lines" => simulated_lines_for_unloaded(abs)}
39
+ end
40
+ out
41
+ end
42
+
43
+ def simulated_lines_for_unloaded(path)
44
+ lines = []
45
+ File.foreach(path) do |line|
46
+ lines << (blank_or_comment?(line) ? nil : 0)
47
+ end
48
+ lines
49
+ rescue Errno::ENOENT, Errno::EACCES
50
+ []
51
+ end
52
+
53
+ def blank_or_comment?(line)
54
+ s = line.strip
55
+ s.empty? || s.start_with?("#")
56
+ end
57
+
58
+ # +groups+ is a Hash of group_name => glob pattern (relative to +root+), SimpleCov +add_group+ style.
59
+ # Produces the +groups+ section of SimpleCov JSON: each group has lines.covered_percent.
60
+ # Assignment uses paths present in +blob+ matching each glob (+File.fnmatch?+), not a fresh Dir.glob,
61
+ # so in-memory coverage lines up with reported files. Files matching no group get "Ungrouped".
62
+ def group_summaries(blob, root, groups)
63
+ return {} if groups.nil? || groups.empty?
64
+
65
+ root = File.expand_path(root)
66
+ normalized = {}
67
+ blob.each { |k, v| normalized[File.expand_path(k.to_s)] = v }
68
+
69
+ accum, ungrouped, any_ungrouped = group_summaries_accumulate(normalized, root, groups)
70
+ group_summaries_build_payload(groups, accum, ungrouped, any_ungrouped)
71
+ end
72
+
73
+ def group_summaries_accumulate(normalized, root, groups)
74
+ accum = Hash.new { |h, k| h[k] = {relevant: 0, covered: 0} }
75
+ ungrouped = {relevant: 0, covered: 0}
76
+ any_ungrouped_file = false
77
+
78
+ normalized.each do |abs, entry|
79
+ counts = Merge.line_counts(entry)
80
+ matched = []
81
+ groups.each do |name, glob_pattern|
82
+ matched << name.to_s if file_matches_glob?(abs, glob_pattern, root)
83
+ end
84
+ if matched.empty?
85
+ any_ungrouped_file = true
86
+ ungrouped[:relevant] += counts[:relevant]
87
+ ungrouped[:covered] += counts[:covered]
88
+ else
89
+ matched.each { |n| add_counts!(accum[n], counts) }
90
+ end
91
+ end
92
+ [accum, ungrouped, any_ungrouped_file]
93
+ end
94
+
95
+ def group_summaries_build_payload(groups, accum, ungrouped, any_ungrouped_file)
96
+ out = {}
97
+ groups.each_key do |name|
98
+ n = name.to_s
99
+ a = accum[n]
100
+ out[n] = {
101
+ "lines" => {
102
+ "covered_percent" => percent_from_counts(a[:relevant], a[:covered])
103
+ }
104
+ }
105
+ end
106
+
107
+ if any_ungrouped_file
108
+ out["Ungrouped"] = {
109
+ "lines" => {
110
+ "covered_percent" => percent_from_counts(ungrouped[:relevant], ungrouped[:covered])
111
+ }
112
+ }
113
+ end
114
+
115
+ out
116
+ end
117
+
118
+ def add_counts!(acc, delta)
119
+ acc[:relevant] += delta[:relevant]
120
+ acc[:covered] += delta[:covered]
121
+ end
122
+
123
+ def percent_from_counts(relevant, covered)
124
+ return round_percent(0.0) if relevant <= 0
125
+
126
+ round_percent(100.0 * covered / relevant)
127
+ end
128
+
129
+ def file_matches_glob?(absolute_path, pattern, root)
130
+ rel = Pathname.new(absolute_path).relative_path_from(Pathname.new(root)).to_s
131
+ File.fnmatch?(pattern, rel, File::FNM_PATHNAME | File::FNM_EXTGLOB)
132
+ rescue ArgumentError
133
+ File.fnmatch?(pattern, absolute_path, File::FNM_PATHNAME | File::FNM_EXTGLOB)
134
+ end
135
+
136
+ def round_percent(x)
137
+ x.to_f.round(2)
138
+ end
139
+ end
140
+ end
141
+ end