polyrun 1.0.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +35 -0
- data/README.md +23 -3
- data/docs/SETUP_PROFILE.md +1 -1
- data/lib/polyrun/cli/ci_shard_run_command.rb +65 -0
- data/lib/polyrun/cli/config_command.rb +42 -0
- data/lib/polyrun/cli/default_run.rb +115 -0
- data/lib/polyrun/cli/help.rb +54 -0
- data/lib/polyrun/cli/helpers.rb +19 -30
- data/lib/polyrun/cli/plan_command.rb +47 -17
- data/lib/polyrun/cli/prepare_command.rb +2 -3
- data/lib/polyrun/cli/prepare_recipe.rb +12 -7
- data/lib/polyrun/cli/queue_command.rb +17 -7
- data/lib/polyrun/cli/run_shards_command.rb +49 -3
- data/lib/polyrun/cli/run_shards_plan_boot_phases.rb +3 -3
- data/lib/polyrun/cli/run_shards_plan_options.rb +18 -11
- data/lib/polyrun/cli/run_shards_planning.rb +16 -12
- data/lib/polyrun/cli/start_bootstrap.rb +2 -6
- data/lib/polyrun/cli.rb +53 -47
- data/lib/polyrun/config/dotted_path.rb +21 -0
- data/lib/polyrun/config/effective.rb +71 -0
- data/lib/polyrun/config/resolver.rb +70 -0
- data/lib/polyrun/config.rb +7 -0
- data/lib/polyrun/database/provision.rb +12 -7
- data/lib/polyrun/partition/constraints.rb +15 -4
- data/lib/polyrun/partition/paths.rb +83 -2
- data/lib/polyrun/partition/plan.rb +38 -28
- data/lib/polyrun/partition/timing_keys.rb +85 -0
- data/lib/polyrun/prepare/assets.rb +12 -5
- data/lib/polyrun/process_stdio.rb +91 -0
- data/lib/polyrun/quick/runner.rb +26 -17
- data/lib/polyrun/rspec.rb +19 -0
- data/lib/polyrun/templates/POLYRUN.md +1 -1
- data/lib/polyrun/templates/ci_matrix.polyrun.yml +4 -1
- data/lib/polyrun/timing/merge.rb +2 -1
- data/lib/polyrun/timing/rspec_example_formatter.rb +53 -0
- data/lib/polyrun/version.rb +1 -1
- data/polyrun.gemspec +1 -1
- data/sig/polyrun/rspec.rbs +2 -0
- metadata +12 -1
data/lib/polyrun/config.rb
CHANGED
|
@@ -5,6 +5,10 @@ module Polyrun
|
|
|
5
5
|
class Config
|
|
6
6
|
DEFAULT_FILENAMES = %w[polyrun.yml config/polyrun.yml].freeze
|
|
7
7
|
|
|
8
|
+
# Parallel worker defaults (+run-shards+, +POLYRUN_WORKERS+); single source with {Resolver} and {Effective}.
|
|
9
|
+
DEFAULT_PARALLEL_WORKERS = 5
|
|
10
|
+
MAX_PARALLEL_WORKERS = 10
|
|
11
|
+
|
|
8
12
|
attr_reader :path, :raw
|
|
9
13
|
|
|
10
14
|
def self.load(path: nil)
|
|
@@ -59,3 +63,6 @@ module Polyrun
|
|
|
59
63
|
end
|
|
60
64
|
end
|
|
61
65
|
end
|
|
66
|
+
|
|
67
|
+
require_relative "config/resolver"
|
|
68
|
+
require_relative "config/effective"
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
require "open3"
|
|
2
2
|
require "shellwords"
|
|
3
|
+
require_relative "../process_stdio"
|
|
3
4
|
|
|
4
5
|
module Polyrun
|
|
5
6
|
module Database
|
|
@@ -49,20 +50,24 @@ module Polyrun
|
|
|
49
50
|
# Multi-DB Rails apps must pass all template URLs in one invocation so each DB uses its own +migrations_paths+.
|
|
50
51
|
# Uses +db:prepare+ (not +db:migrate+ alone) so empty template databases load +schema.rb+ first;
|
|
51
52
|
# apps that squash or archive migrations and keep only incremental files need that path.
|
|
53
|
+
#
|
|
54
|
+
# Streams stdout/stderr to the terminal by default. With +silent: true+, redirects child stdio
|
|
55
|
+
# to +File::NULL+ (no live output; non-interactive).
|
|
52
56
|
def prepare_template!(rails_root:, env:, silent: true)
|
|
53
57
|
exe = File.join(rails_root, "bin", "rails")
|
|
54
58
|
raise Polyrun::Error, "Provision: missing #{exe}" unless File.executable?(exe)
|
|
55
59
|
|
|
56
60
|
child_env = ENV.to_h.merge(env)
|
|
57
61
|
child_env["RAILS_ENV"] ||= ENV["RAILS_ENV"] || "test"
|
|
58
|
-
|
|
59
|
-
|
|
62
|
+
st, out, err = Polyrun::ProcessStdio.spawn_wait(
|
|
63
|
+
child_env,
|
|
64
|
+
exe,
|
|
65
|
+
"db:prepare",
|
|
66
|
+
chdir: rails_root,
|
|
67
|
+
silent: silent
|
|
68
|
+
)
|
|
60
69
|
unless st.success?
|
|
61
|
-
|
|
62
|
-
msg << "\n--- stderr ---\n#{err}" unless err.to_s.strip.empty?
|
|
63
|
-
# Rails often prints the first migration/SQL error on stdout; stderr may only show InFailedSqlTransaction.
|
|
64
|
-
msg << "\n--- stdout ---\n#{rails_out}" unless rails_out.to_s.strip.empty?
|
|
65
|
-
raise Polyrun::Error, msg
|
|
70
|
+
raise Polyrun::Error, Polyrun::ProcessStdio.format_failure_message("db:prepare", st, out, err)
|
|
66
71
|
end
|
|
67
72
|
|
|
68
73
|
true
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
require_relative "timing_keys"
|
|
2
|
+
|
|
1
3
|
module Polyrun
|
|
2
4
|
module Partition
|
|
3
5
|
# Hard constraints for plan assignment (spec_queue.md): pins, serial globs.
|
|
@@ -31,21 +33,30 @@ module Polyrun
|
|
|
31
33
|
end
|
|
32
34
|
|
|
33
35
|
# Returns Integer shard index if constrained, or nil if free to place by LPT/HRW.
|
|
36
|
+
# For +path:line+ items (example granularity), also matches pins/globs against the file path only.
|
|
34
37
|
def forced_shard_for(path)
|
|
35
38
|
rel = path.to_s
|
|
36
39
|
abs = File.expand_path(rel, @root)
|
|
40
|
+
variants = [rel, abs]
|
|
41
|
+
if (fp = TimingKeys.file_part_for_constraint(rel))
|
|
42
|
+
variants << fp
|
|
43
|
+
variants << File.expand_path(fp, @root)
|
|
44
|
+
end
|
|
45
|
+
variants.uniq!
|
|
37
46
|
|
|
38
47
|
@pin_map.each do |pattern, shard|
|
|
39
48
|
next if pattern.to_s.empty?
|
|
40
49
|
|
|
41
|
-
|
|
42
|
-
|
|
50
|
+
variants.each do |rel_i|
|
|
51
|
+
abs_i = File.expand_path(rel_i, @root)
|
|
52
|
+
return shard if match_pattern?(pattern.to_s, rel_i, abs_i)
|
|
43
53
|
end
|
|
44
54
|
end
|
|
45
55
|
|
|
46
56
|
@serial_globs.each do |g|
|
|
47
|
-
|
|
48
|
-
|
|
57
|
+
variants.each do |rel_i|
|
|
58
|
+
abs_i = File.expand_path(rel_i, @root)
|
|
59
|
+
return @serial_shard if match_pattern?(g, rel_i, abs_i)
|
|
49
60
|
end
|
|
50
61
|
end
|
|
51
62
|
|
|
@@ -8,9 +8,45 @@ module Polyrun
|
|
|
8
8
|
File.read(File.expand_path(path.to_s, Dir.pwd)).split("\n").map(&:strip).reject(&:empty?)
|
|
9
9
|
end
|
|
10
10
|
|
|
11
|
+
# Prefer +spec/+ RSpec files, then +test/+ Minitest, then Polyrun Quick files (same globs as +polyrun quick+).
|
|
12
|
+
# Order avoids running the broader Quick glob when RSpec or Minitest files already exist.
|
|
13
|
+
def detect_auto_suite(cwd = Dir.pwd)
|
|
14
|
+
base = File.expand_path(cwd)
|
|
15
|
+
return :rspec if Dir.glob(File.join(base, "spec/**/*_spec.rb")).any?
|
|
16
|
+
|
|
17
|
+
return :minitest if Dir.glob(File.join(base, "test/**/*_test.rb")).any?
|
|
18
|
+
|
|
19
|
+
quick = quick_parallel_default_paths(base)
|
|
20
|
+
return :quick if quick.any?
|
|
21
|
+
|
|
22
|
+
nil
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
# Infer parallel suite from explicit paths (+_spec.rb+ vs +_test.rb+ vs Polyrun quick-style +.rb+).
|
|
26
|
+
# Returns +:rspec+, +:minitest+, +:quick+, +:invalid+ (mixed spec and test), or +nil+ (empty).
|
|
27
|
+
def infer_suite_from_paths(paths)
|
|
28
|
+
paths = paths.map { |p| File.expand_path(p) }
|
|
29
|
+
return nil if paths.empty?
|
|
30
|
+
|
|
31
|
+
specs = paths.count { |p| File.basename(p).end_with?("_spec.rb") }
|
|
32
|
+
tests = paths.count { |p| File.basename(p).end_with?("_test.rb") }
|
|
33
|
+
return :invalid if specs.positive? && tests.positive?
|
|
34
|
+
|
|
35
|
+
return :rspec if specs.positive?
|
|
36
|
+
return :minitest if tests.positive?
|
|
37
|
+
|
|
38
|
+
others = paths.size - specs - tests
|
|
39
|
+
return :quick if others.positive?
|
|
40
|
+
|
|
41
|
+
nil
|
|
42
|
+
end
|
|
43
|
+
|
|
11
44
|
# When +paths_file+ is set but missing, returns +{ error: "..." }+.
|
|
12
45
|
# Otherwise returns +{ items:, source: }+ (human-readable source label).
|
|
13
|
-
|
|
46
|
+
#
|
|
47
|
+
# +partition.suite+ (optional): +auto+ (default), +rspec+, +minitest+, +quick+ — used only when resolving
|
|
48
|
+
# from globs (no explicit +paths_file+ and no +spec/spec_paths.txt+).
|
|
49
|
+
def resolve_run_shard_items(paths_file: nil, cwd: Dir.pwd, partition: {})
|
|
14
50
|
if paths_file
|
|
15
51
|
abs = File.expand_path(paths_file.to_s, cwd)
|
|
16
52
|
unless File.file?(abs)
|
|
@@ -20,9 +56,54 @@ module Polyrun
|
|
|
20
56
|
elsif File.file?(File.join(cwd, "spec", "spec_paths.txt"))
|
|
21
57
|
{items: read_lines(File.join(cwd, "spec", "spec_paths.txt")), source: "spec/spec_paths.txt"}
|
|
22
58
|
else
|
|
23
|
-
|
|
59
|
+
resolve_run_shard_items_glob(cwd: cwd, partition: partition)
|
|
24
60
|
end
|
|
25
61
|
end
|
|
62
|
+
|
|
63
|
+
def resolve_run_shard_items_glob(cwd:, partition: {})
|
|
64
|
+
suite = (partition["suite"] || partition[:suite] || "auto").to_s.downcase
|
|
65
|
+
suite = "auto" if suite.empty?
|
|
66
|
+
|
|
67
|
+
base = File.expand_path(cwd)
|
|
68
|
+
spec = Dir.glob(File.join(base, "spec/**/*_spec.rb")).sort
|
|
69
|
+
test = Dir.glob(File.join(base, "test/**/*_test.rb")).sort
|
|
70
|
+
quick = quick_parallel_default_paths(base)
|
|
71
|
+
|
|
72
|
+
case suite
|
|
73
|
+
when "rspec"
|
|
74
|
+
return {error: "partition.suite is rspec but no spec/**/*_spec.rb files"} if spec.empty?
|
|
75
|
+
|
|
76
|
+
{items: spec, source: "spec/**/*_spec.rb glob"}
|
|
77
|
+
when "minitest"
|
|
78
|
+
return {error: "partition.suite is minitest but no test/**/*_test.rb files"} if test.empty?
|
|
79
|
+
|
|
80
|
+
{items: test, source: "test/**/*_test.rb glob"}
|
|
81
|
+
when "quick"
|
|
82
|
+
return {error: "partition.suite is quick but no Polyrun quick files under spec/ or test/"} if quick.empty?
|
|
83
|
+
|
|
84
|
+
{items: quick, source: "Polyrun quick glob"}
|
|
85
|
+
when "auto"
|
|
86
|
+
if spec.any?
|
|
87
|
+
{items: spec, source: "spec/**/*_spec.rb glob"}
|
|
88
|
+
elsif test.any?
|
|
89
|
+
{items: test, source: "test/**/*_test.rb glob"}
|
|
90
|
+
elsif quick.any?
|
|
91
|
+
{items: quick, source: "Polyrun quick glob"}
|
|
92
|
+
else
|
|
93
|
+
{
|
|
94
|
+
error: "no spec paths (spec/spec_paths.txt, partition.paths_file, or spec/**/*_spec.rb); " \
|
|
95
|
+
"no test/**/*_test.rb; no Polyrun quick files"
|
|
96
|
+
}
|
|
97
|
+
end
|
|
98
|
+
else
|
|
99
|
+
{error: "unknown partition.suite: #{suite.inspect} (expected auto, rspec, minitest, quick)"}
|
|
100
|
+
end
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
def quick_parallel_default_paths(base)
|
|
104
|
+
require_relative "../quick/runner"
|
|
105
|
+
Polyrun::Quick::Runner.parallel_default_paths(base)
|
|
106
|
+
end
|
|
26
107
|
end
|
|
27
108
|
end
|
|
28
109
|
end
|
|
@@ -1,41 +1,48 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
require_relative "timing_keys"
|
|
3
2
|
require_relative "constraints"
|
|
4
3
|
require_relative "hrw"
|
|
5
4
|
require_relative "min_heap"
|
|
6
5
|
require_relative "stable_shuffle"
|
|
7
|
-
|
|
8
6
|
module Polyrun
|
|
9
7
|
module Partition
|
|
10
|
-
# Assigns discrete items (e.g. spec paths) to shards (spec_queue.md).
|
|
8
|
+
# Assigns discrete items (e.g. spec paths, or +path:line+ example locators) to shards (spec_queue.md).
|
|
11
9
|
#
|
|
12
10
|
# Strategies:
|
|
13
11
|
# - +round_robin+ — sorted paths, assign by index mod +total_shards+.
|
|
14
12
|
# - +random_round_robin+ — Fisher–Yates shuffle (optional +seed+), then same mod assignment.
|
|
15
|
-
# - +cost_binpack+ (+cost+, +binpack+, +timing+) — LPT greedy binpack using per-
|
|
13
|
+
# - +cost_binpack+ (+cost+, +binpack+, +timing+) — LPT greedy binpack using per-item weights;
|
|
16
14
|
# optional {Constraints} for pins / serial globs before LPT on the rest.
|
|
15
|
+
# Default +timing_granularity+ is +file+ (one weight per spec file). Experimental +:example+
|
|
16
|
+
# uses +path:line+ locators and per-example weights in the timing JSON.
|
|
17
17
|
# - +hrw+ (+rendezvous+) — rendezvous hashing for minimal remapping when m changes; optional constraints.
|
|
18
18
|
class Plan
|
|
19
19
|
COST_STRATEGIES = %w[cost cost_binpack binpack timing].freeze
|
|
20
20
|
HRW_STRATEGIES = %w[hrw rendezvous].freeze
|
|
21
21
|
|
|
22
|
-
attr_reader :items, :total_shards, :strategy, :seed, :constraints
|
|
22
|
+
attr_reader :items, :total_shards, :strategy, :seed, :constraints, :timing_granularity
|
|
23
23
|
|
|
24
|
-
def initialize(items:, total_shards:, strategy: "round_robin", seed: nil, costs: nil, constraints: nil, root: nil)
|
|
25
|
-
@
|
|
24
|
+
def initialize(items:, total_shards:, strategy: "round_robin", seed: nil, costs: nil, constraints: nil, root: nil, timing_granularity: :file)
|
|
25
|
+
@timing_granularity = TimingKeys.normalize_granularity(timing_granularity)
|
|
26
|
+
@root = root ? File.expand_path(root) : Dir.pwd
|
|
27
|
+
@items = items.map do |x|
|
|
28
|
+
if @timing_granularity == :example
|
|
29
|
+
TimingKeys.normalize_locator(x, @root, :example)
|
|
30
|
+
else
|
|
31
|
+
x.to_s.strip
|
|
32
|
+
end
|
|
33
|
+
end.freeze
|
|
26
34
|
@total_shards = Integer(total_shards)
|
|
27
35
|
raise Polyrun::Error, "total_shards must be >= 1" if @total_shards < 1
|
|
28
36
|
|
|
29
37
|
@strategy = strategy.to_s
|
|
30
38
|
@seed = seed
|
|
31
|
-
@root = root ? File.expand_path(root) : Dir.pwd
|
|
32
39
|
@constraints = normalize_constraints(constraints)
|
|
33
40
|
@costs = normalize_costs(costs)
|
|
34
41
|
|
|
35
42
|
validate_constraints_strategy_combo!
|
|
36
43
|
if cost_strategy? && (@costs.nil? || @costs.empty?)
|
|
37
44
|
raise Polyrun::Error,
|
|
38
|
-
"strategy #{@strategy} requires a timing map (path => seconds), e.g. merged polyrun_timing.json"
|
|
45
|
+
"strategy #{@strategy} requires a timing map (path => seconds or path:line => seconds), e.g. merged polyrun_timing.json"
|
|
39
46
|
end
|
|
40
47
|
end
|
|
41
48
|
|
|
@@ -85,24 +92,14 @@ module Polyrun
|
|
|
85
92
|
"seed" => seed,
|
|
86
93
|
"paths" => shard(shard_index)
|
|
87
94
|
}
|
|
95
|
+
m["timing_granularity"] = timing_granularity.to_s if timing_granularity == :example
|
|
88
96
|
secs = shard_weight_totals
|
|
89
97
|
m["shard_seconds"] = secs if cost_strategy? || (hrw_strategy? && secs.any? { |x| x > 0 })
|
|
90
98
|
m
|
|
91
99
|
end
|
|
92
100
|
|
|
93
|
-
def self.load_timing_costs(path)
|
|
94
|
-
|
|
95
|
-
return {} unless File.file?(abs)
|
|
96
|
-
|
|
97
|
-
data = JSON.parse(File.read(abs))
|
|
98
|
-
return {} unless data.is_a?(Hash)
|
|
99
|
-
|
|
100
|
-
out = {}
|
|
101
|
-
data.each do |k, v|
|
|
102
|
-
key = File.expand_path(k.to_s, Dir.pwd)
|
|
103
|
-
out[key] = v.to_f
|
|
104
|
-
end
|
|
105
|
-
out
|
|
101
|
+
def self.load_timing_costs(path, granularity: :file, root: nil)
|
|
102
|
+
TimingKeys.load_costs_json_file(path, granularity, root: root)
|
|
106
103
|
end
|
|
107
104
|
|
|
108
105
|
def self.cost_strategy?(name)
|
|
@@ -134,7 +131,12 @@ module Polyrun
|
|
|
134
131
|
|
|
135
132
|
c = {}
|
|
136
133
|
costs.each do |k, v|
|
|
137
|
-
key =
|
|
134
|
+
key =
|
|
135
|
+
if @timing_granularity == :example
|
|
136
|
+
TimingKeys.normalize_locator(k.to_s, @root, :example)
|
|
137
|
+
else
|
|
138
|
+
File.expand_path(k.to_s, @root)
|
|
139
|
+
end
|
|
138
140
|
c[key] = v.to_f
|
|
139
141
|
end
|
|
140
142
|
c
|
|
@@ -161,19 +163,27 @@ module Polyrun
|
|
|
161
163
|
end
|
|
162
164
|
|
|
163
165
|
def weight_for(path)
|
|
164
|
-
|
|
165
|
-
return @costs[
|
|
166
|
+
key = cost_lookup_key(path.to_s)
|
|
167
|
+
return @costs[key] if @costs&.key?(key)
|
|
166
168
|
|
|
167
169
|
default_weight
|
|
168
170
|
end
|
|
169
171
|
|
|
170
172
|
def weight_for_optional(path)
|
|
171
|
-
|
|
172
|
-
return @costs[
|
|
173
|
+
key = cost_lookup_key(path.to_s)
|
|
174
|
+
return @costs[key] if @costs&.key?(key)
|
|
173
175
|
|
|
174
176
|
0.0
|
|
175
177
|
end
|
|
176
178
|
|
|
179
|
+
def cost_lookup_key(path)
|
|
180
|
+
if @timing_granularity == :example
|
|
181
|
+
TimingKeys.normalize_locator(path, @root, :example)
|
|
182
|
+
else
|
|
183
|
+
File.expand_path(path, @root)
|
|
184
|
+
end
|
|
185
|
+
end
|
|
186
|
+
|
|
177
187
|
def cost_shards
|
|
178
188
|
@cost_shards ||= build_lpt_buckets
|
|
179
189
|
end
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
require "json"
|
|
2
|
+
|
|
3
|
+
require_relative "../log"
|
|
4
|
+
|
|
5
|
+
module Polyrun
|
|
6
|
+
module Partition
|
|
7
|
+
# Normalizes partition item keys and timing JSON keys for +file+ vs experimental +example+ granularity.
|
|
8
|
+
#
|
|
9
|
+
# * +file+ — one item per spec file; keys are absolute paths (see {#canonical_file_path}).
|
|
10
|
+
# * +example+ — one item per example (RSpec-style +path:line+); keys are +"#{absolute_path}:#{line}+".
|
|
11
|
+
module TimingKeys
|
|
12
|
+
module_function
|
|
13
|
+
|
|
14
|
+
# Resolves the parent directory with +File.realpath+ so +/var/...+ and +/private/var/...+ (macOS
|
|
15
|
+
# tmpdirs) and symlink segments map to one key for the same file.
|
|
16
|
+
def canonical_file_path(abs_path)
|
|
17
|
+
dir = File.dirname(abs_path)
|
|
18
|
+
base = File.basename(abs_path)
|
|
19
|
+
File.join(File.realpath(dir), base)
|
|
20
|
+
rescue SystemCallError
|
|
21
|
+
abs_path
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
# @return [:file, :example]
|
|
25
|
+
def normalize_granularity(value)
|
|
26
|
+
case value.to_s.strip.downcase
|
|
27
|
+
when "example", "examples"
|
|
28
|
+
:example
|
|
29
|
+
else
|
|
30
|
+
:file
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
# File path only (for partition constraints) when +item+ is +path:line+.
|
|
35
|
+
def file_part_for_constraint(item)
|
|
36
|
+
s = item.to_s
|
|
37
|
+
m = s.match(/\A(.+):(\d+)\z/)
|
|
38
|
+
return nil unless m && m[2].match?(/\A\d+\z/)
|
|
39
|
+
|
|
40
|
+
m[1]
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
# Normalize a path or +path:line+ locator relative to +root+ for cost maps and +Plan+ items.
|
|
44
|
+
def normalize_locator(raw, root, granularity)
|
|
45
|
+
s = raw.to_s.strip
|
|
46
|
+
return canonical_file_path(File.expand_path(s, root)) if s.empty?
|
|
47
|
+
|
|
48
|
+
if granularity == :example && (m = s.match(/\A(.+):(\d+)\z/)) && m[2].match?(/\A\d+\z/)
|
|
49
|
+
fp = canonical_file_path(File.expand_path(m[1], root))
|
|
50
|
+
return "#{fp}:#{m[2]}"
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
canonical_file_path(File.expand_path(s, root))
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
# Loads merged timing JSON (+path => seconds+ or +path:line => seconds+).
|
|
57
|
+
#
|
|
58
|
+
# @param root [String, nil] directory for normalizing relative keys (default: +Dir.pwd+). Use the
|
|
59
|
+
# same working directory (or pass the same +root+ as {Partition::Plan}+'s +root+) as when
|
|
60
|
+
# generating the timing file so keys align.
|
|
61
|
+
def load_costs_json_file(path, granularity, root: nil)
|
|
62
|
+
abs = File.expand_path(path.to_s, Dir.pwd)
|
|
63
|
+
return {} unless File.file?(abs)
|
|
64
|
+
|
|
65
|
+
data = JSON.parse(File.read(abs))
|
|
66
|
+
return {} unless data.is_a?(Hash)
|
|
67
|
+
|
|
68
|
+
g = normalize_granularity(granularity)
|
|
69
|
+
root = File.expand_path(root || Dir.pwd)
|
|
70
|
+
out = {}
|
|
71
|
+
data.each do |k, v|
|
|
72
|
+
key = normalize_locator(k.to_s, root, g)
|
|
73
|
+
fv = v.to_f
|
|
74
|
+
if out.key?(key) && out[key] != fv
|
|
75
|
+
Polyrun::Log.warn(
|
|
76
|
+
"polyrun: timing JSON duplicate key #{key.inspect} after normalize (#{out[key]} vs #{fv}); using #{fv}"
|
|
77
|
+
)
|
|
78
|
+
end
|
|
79
|
+
out[key] = fv
|
|
80
|
+
end
|
|
81
|
+
out
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
end
|
|
85
|
+
end
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
require "digest/md5"
|
|
2
2
|
require "fileutils"
|
|
3
|
-
|
|
3
|
+
require_relative "../process_stdio"
|
|
4
4
|
|
|
5
5
|
module Polyrun
|
|
6
6
|
module Prepare
|
|
@@ -41,14 +41,21 @@ module Polyrun
|
|
|
41
41
|
end
|
|
42
42
|
|
|
43
43
|
# Shells out to +bin/rails assets:precompile+ when +rails_root+ contains +bin/rails+.
|
|
44
|
+
# +silent: true+ discards child stdio (+File::NULL+); +silent: false+ inherits the terminal.
|
|
44
45
|
def precompile!(rails_root:, silent: true)
|
|
45
46
|
exe = File.join(rails_root, "bin", "rails")
|
|
46
47
|
raise Polyrun::Error, "Prepare::Assets: no #{exe}" unless File.executable?(exe)
|
|
47
48
|
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
49
|
+
st, out, err = Polyrun::ProcessStdio.spawn_wait(
|
|
50
|
+
nil,
|
|
51
|
+
exe,
|
|
52
|
+
"assets:precompile",
|
|
53
|
+
chdir: rails_root,
|
|
54
|
+
silent: silent
|
|
55
|
+
)
|
|
56
|
+
unless st.success?
|
|
57
|
+
raise Polyrun::Error, Polyrun::ProcessStdio.format_failure_message("assets:precompile", st, out, err)
|
|
58
|
+
end
|
|
52
59
|
|
|
53
60
|
true
|
|
54
61
|
end
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
require "tempfile"
|
|
2
|
+
|
|
3
|
+
module Polyrun
|
|
4
|
+
# Run a subprocess without +Open3+ pipe reader threads (avoids noisy +IOError+s on SIGINT when
|
|
5
|
+
# streams close). By default stdin/stdout/stderr are inherited so output streams live and the
|
|
6
|
+
# child can use the TTY for prompts.
|
|
7
|
+
module ProcessStdio
|
|
8
|
+
MAX_FAILURE_CAPTURE_BYTES = 32_768
|
|
9
|
+
|
|
10
|
+
class << self
|
|
11
|
+
# @param env [Hash, nil] optional environment for the child (only forwarded when a Hash)
|
|
12
|
+
# @param argv [Array<String>] command argv
|
|
13
|
+
# @param silent [Boolean] if true, connect stdin/stdout/stderr to +File::NULL+ (no terminal output;
|
|
14
|
+
# non-interactive). Still no Open3 pipe threads.
|
|
15
|
+
# @return [Process::Status]
|
|
16
|
+
def inherit_stdio_spawn_wait(env, *argv, chdir: nil, silent: false)
|
|
17
|
+
st, = spawn_wait(env, *argv, chdir: chdir, silent: silent)
|
|
18
|
+
st
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
# Like {#inherit_stdio_spawn_wait}, but returns captured stdout/stderr when +silent+ is true.
|
|
22
|
+
# On success those strings are empty (not read). When +silent+ is false, output goes to the TTY
|
|
23
|
+
# and returned captures are empty.
|
|
24
|
+
#
|
|
25
|
+
# @return [Array(Process::Status, String, String)] status, stdout capture, stderr capture
|
|
26
|
+
def spawn_wait(env, *argv, chdir: nil, silent: false)
|
|
27
|
+
args = spawn_argv(env, *argv)
|
|
28
|
+
return spawn_wait_inherit(args, chdir) unless silent
|
|
29
|
+
|
|
30
|
+
spawn_wait_silent(args, chdir)
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
# Builds a diagnostic string for failed subprocesses (used when +silent: true+ hid live output).
|
|
34
|
+
def format_failure_message(label, status, stdout, stderr)
|
|
35
|
+
msg = "#{label} failed (exit #{status.exitstatus})"
|
|
36
|
+
s = stdout.to_s
|
|
37
|
+
e = stderr.to_s
|
|
38
|
+
msg << "\n--- stdout ---\n#{s}" unless s.strip.empty?
|
|
39
|
+
msg << "\n--- stderr ---\n#{e}" unless e.strip.empty?
|
|
40
|
+
msg
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
private
|
|
44
|
+
|
|
45
|
+
def spawn_argv(env, *argv)
|
|
46
|
+
a = []
|
|
47
|
+
a << env if env.is_a?(Hash)
|
|
48
|
+
a.concat(argv)
|
|
49
|
+
a
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
def spawn_wait_inherit(args, chdir)
|
|
53
|
+
opts = {in: :in, out: :out, err: :err}
|
|
54
|
+
opts[:chdir] = chdir if chdir
|
|
55
|
+
pid = Process.spawn(*args, **opts)
|
|
56
|
+
st = Process.wait2(pid).last
|
|
57
|
+
[st, "", ""]
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
def spawn_wait_silent(args, chdir)
|
|
61
|
+
Tempfile.create("polyrun-out") do |tfout|
|
|
62
|
+
Tempfile.create("polyrun-err") do |tferr|
|
|
63
|
+
tfout.close
|
|
64
|
+
tferr.close
|
|
65
|
+
out_path = tfout.path
|
|
66
|
+
err_path = tferr.path
|
|
67
|
+
opts = {in: File::NULL, out: out_path, err: err_path}
|
|
68
|
+
opts[:chdir] = chdir if chdir
|
|
69
|
+
pid = Process.spawn(*args, **opts)
|
|
70
|
+
st = Process.wait2(pid).last
|
|
71
|
+
if st.success?
|
|
72
|
+
[st, "", ""]
|
|
73
|
+
else
|
|
74
|
+
out = File.binread(out_path)
|
|
75
|
+
err = File.binread(err_path)
|
|
76
|
+
[st, truncate_failure_capture(out), truncate_failure_capture(err)]
|
|
77
|
+
end
|
|
78
|
+
end
|
|
79
|
+
end
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
def truncate_failure_capture(bytes)
|
|
83
|
+
s = bytes.to_s
|
|
84
|
+
return s if s.bytesize <= MAX_FAILURE_CAPTURE_BYTES
|
|
85
|
+
|
|
86
|
+
tail = s.byteslice(-MAX_FAILURE_CAPTURE_BYTES, MAX_FAILURE_CAPTURE_BYTES)
|
|
87
|
+
"... (#{s.bytesize} bytes total; showing last #{MAX_FAILURE_CAPTURE_BYTES} bytes)\n" + tail
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
end
|
|
91
|
+
end
|
data/lib/polyrun/quick/runner.rb
CHANGED
|
@@ -62,6 +62,30 @@ module Polyrun
|
|
|
62
62
|
new(out: out, err: err, verbose: verbose).run(paths)
|
|
63
63
|
end
|
|
64
64
|
|
|
65
|
+
# Files Polyrun::Quick would run with no explicit paths (excludes normal RSpec/Minitest files).
|
|
66
|
+
def self.parallel_default_paths(cwd = Dir.pwd)
|
|
67
|
+
base = File.expand_path(cwd)
|
|
68
|
+
globs = [
|
|
69
|
+
File.join(base, "spec", "polyrun_quick", "**", "*.rb"),
|
|
70
|
+
File.join(base, "test", "polyrun_quick", "**", "*.rb"),
|
|
71
|
+
File.join(base, "spec", "**", "*.rb"),
|
|
72
|
+
File.join(base, "test", "**", "*.rb")
|
|
73
|
+
]
|
|
74
|
+
globs.flat_map { |g| Dir.glob(g) }.uniq.reject { |p| quick_path_excluded?(p, base) }.sort
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
def self.quick_path_excluded?(path, base)
|
|
78
|
+
rel = Pathname.new(path).relative_path_from(Pathname.new(base)).to_s
|
|
79
|
+
parts = rel.split(File::SEPARATOR)
|
|
80
|
+
bn = File.basename(path)
|
|
81
|
+
return true if bn.end_with?("_spec.rb", "_test.rb")
|
|
82
|
+
return true if %w[spec_helper.rb rails_helper.rb test_helper.rb].include?(bn)
|
|
83
|
+
return true if parts[0] == "spec" && %w[support fixtures factories].include?(parts[1])
|
|
84
|
+
return true if parts[0] == "test" && %w[support fixtures].include?(parts[1])
|
|
85
|
+
|
|
86
|
+
false
|
|
87
|
+
end
|
|
88
|
+
|
|
65
89
|
def initialize(out: $stdout, err: $stderr, verbose: false)
|
|
66
90
|
@out = out
|
|
67
91
|
@err = err
|
|
@@ -153,27 +177,12 @@ module Polyrun
|
|
|
153
177
|
end
|
|
154
178
|
|
|
155
179
|
def default_globs
|
|
156
|
-
|
|
157
|
-
globs = [
|
|
158
|
-
File.join(base, "spec", "polyrun_quick", "**", "*.rb"),
|
|
159
|
-
File.join(base, "test", "polyrun_quick", "**", "*.rb"),
|
|
160
|
-
File.join(base, "spec", "**", "*.rb"),
|
|
161
|
-
File.join(base, "test", "**", "*.rb")
|
|
162
|
-
]
|
|
163
|
-
globs.flat_map { |g| Dir.glob(g) }.uniq.reject { |p| default_quick_exclude?(p, base) }.sort
|
|
180
|
+
Runner.parallel_default_paths(Dir.pwd)
|
|
164
181
|
end
|
|
165
182
|
|
|
166
183
|
# Omit RSpec/Minitest files and common helpers so +polyrun quick+ with no args does not load normal suites.
|
|
167
184
|
def default_quick_exclude?(path, base)
|
|
168
|
-
|
|
169
|
-
parts = rel.split(File::SEPARATOR)
|
|
170
|
-
bn = File.basename(path)
|
|
171
|
-
return true if bn.end_with?("_spec.rb", "_test.rb")
|
|
172
|
-
return true if %w[spec_helper.rb rails_helper.rb test_helper.rb].include?(bn)
|
|
173
|
-
return true if parts[0] == "spec" && %w[support fixtures factories].include?(parts[1])
|
|
174
|
-
return true if parts[0] == "test" && %w[support fixtures].include?(parts[1])
|
|
175
|
-
|
|
176
|
-
false
|
|
185
|
+
Runner.quick_path_excluded?(path, base)
|
|
177
186
|
end
|
|
178
187
|
end
|
|
179
188
|
end
|
data/lib/polyrun/rspec.rb
CHANGED
|
@@ -11,5 +11,24 @@ module Polyrun
|
|
|
11
11
|
Polyrun::Data::ParallelProvisioning.run_suite_hooks!
|
|
12
12
|
end
|
|
13
13
|
end
|
|
14
|
+
|
|
15
|
+
# Experimental: add {Timing::RSpecExampleFormatter} and write per-example JSON (see +timing_granularity: example+).
|
|
16
|
+
# With +output_path:+, that path is used directly (no +ENV+ mutation). Without it, the formatter
|
|
17
|
+
# reads +ENV["POLYRUN_EXAMPLE_TIMING_OUT"]+ or defaults to +polyrun_timing_examples.json+.
|
|
18
|
+
def install_example_timing!(output_path: nil)
|
|
19
|
+
require_relative "timing/rspec_example_formatter"
|
|
20
|
+
fmt =
|
|
21
|
+
if output_path
|
|
22
|
+
op = output_path
|
|
23
|
+
Class.new(Polyrun::Timing::RSpecExampleFormatter) do
|
|
24
|
+
define_method(:timing_output_path) { op }
|
|
25
|
+
end
|
|
26
|
+
else
|
|
27
|
+
Polyrun::Timing::RSpecExampleFormatter
|
|
28
|
+
end
|
|
29
|
+
::RSpec.configure do |config|
|
|
30
|
+
config.add_formatter fmt
|
|
31
|
+
end
|
|
32
|
+
end
|
|
14
33
|
end
|
|
15
34
|
end
|
|
@@ -25,7 +25,7 @@ Adjust `--workers` or use `bin/rspec_parallel` if your repo provides a wrapper.
|
|
|
25
25
|
### Model B — matrix: one shard per job
|
|
26
26
|
|
|
27
27
|
- Matrix sets `POLYRUN_SHARD_INDEX` and `POLYRUN_SHARD_TOTAL` explicitly (many runners do not set `CI_NODE_*` by default).
|
|
28
|
-
- Each job runs `polyrun
|
|
28
|
+
- Each job runs `polyrun ci-shard-run -- …` (e.g. `-- bundle exec rspec` or `ci-shard-rspec`), or `build-paths` + `plan` + your runner manually. Legacy: `bin/polyrun-rspec` or `bin/rspec_ci_shard` patterns.
|
|
29
29
|
- Upload `coverage/polyrun-fragment-<shard>.json` per job; a `merge-coverage` job downloads all fragments and merges.
|
|
30
30
|
|
|
31
31
|
Do not combine Model A and Model B in one workflow without a documented reason (nested parallelism and duplicate merges).
|
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
# Polyrun — partition contract for CI matrix (one job per POLYRUN_SHARD_INDEX).
|
|
2
|
-
# Each matrix row: set POLYRUN_SHARD_INDEX and POLYRUN_SHARD_TOTAL; run
|
|
2
|
+
# Each matrix row: set POLYRUN_SHARD_INDEX and POLYRUN_SHARD_TOTAL; run:
|
|
3
|
+
# bundle exec polyrun -c polyrun.yml ci-shard-run -- bundle exec rspec
|
|
4
|
+
# (or ci-shard-rspec; or e.g. ci-shard-run -- bundle exec polyrun quick).
|
|
5
|
+
# Equivalent to build-paths, plan --shard/--total, then run that command with this slice's paths.
|
|
3
6
|
# A separate CI job downloads coverage/polyrun-fragment-*.json and runs merge-coverage.
|
|
4
7
|
# Do not use parallel-rspec with multiple workers inside the same matrix row unless you intend nested parallelism.
|
|
5
8
|
# See: docs/SETUP_PROFILE.md
|
data/lib/polyrun/timing/merge.rb
CHANGED
|
@@ -4,7 +4,8 @@ require_relative "../debug"
|
|
|
4
4
|
|
|
5
5
|
module Polyrun
|
|
6
6
|
module Timing
|
|
7
|
-
# Merges per-shard timing JSON files (spec2 §2.4): path => wall seconds (float)
|
|
7
|
+
# Merges per-shard timing JSON files (spec2 §2.4): path => wall seconds (float), or (experimental)
|
|
8
|
+
# +absolute_path:line+ => seconds for per-example timing.
|
|
8
9
|
# Disjoint suites: values merged by taking the maximum per path when duplicates appear.
|
|
9
10
|
module Merge
|
|
10
11
|
module_function
|