polyrun 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CODE_OF_CONDUCT.md +31 -0
- data/CONTRIBUTING.md +84 -0
- data/LICENSE +21 -0
- data/README.md +140 -0
- data/SECURITY.md +27 -0
- data/bin/polyrun +6 -0
- data/docs/SETUP_PROFILE.md +106 -0
- data/lib/polyrun/cli/coverage_commands.rb +150 -0
- data/lib/polyrun/cli/coverage_merge_io.rb +124 -0
- data/lib/polyrun/cli/database_commands.rb +149 -0
- data/lib/polyrun/cli/env_commands.rb +43 -0
- data/lib/polyrun/cli/helpers.rb +113 -0
- data/lib/polyrun/cli/init_command.rb +99 -0
- data/lib/polyrun/cli/plan_command.rb +134 -0
- data/lib/polyrun/cli/prepare_command.rb +71 -0
- data/lib/polyrun/cli/prepare_recipe.rb +77 -0
- data/lib/polyrun/cli/queue_command.rb +101 -0
- data/lib/polyrun/cli/quick_command.rb +13 -0
- data/lib/polyrun/cli/report_commands.rb +94 -0
- data/lib/polyrun/cli/run_shards_command.rb +88 -0
- data/lib/polyrun/cli/run_shards_plan_boot_phases.rb +91 -0
- data/lib/polyrun/cli/run_shards_plan_options.rb +45 -0
- data/lib/polyrun/cli/run_shards_planning.rb +124 -0
- data/lib/polyrun/cli/run_shards_run.rb +168 -0
- data/lib/polyrun/cli/start_bootstrap.rb +99 -0
- data/lib/polyrun/cli/timing_command.rb +31 -0
- data/lib/polyrun/cli.rb +184 -0
- data/lib/polyrun/config.rb +61 -0
- data/lib/polyrun/coverage/cobertura_zero_lines.rb +32 -0
- data/lib/polyrun/coverage/collector.rb +184 -0
- data/lib/polyrun/coverage/collector_finish.rb +95 -0
- data/lib/polyrun/coverage/filter.rb +22 -0
- data/lib/polyrun/coverage/formatter.rb +115 -0
- data/lib/polyrun/coverage/merge/formatters.rb +181 -0
- data/lib/polyrun/coverage/merge/formatters_html.rb +55 -0
- data/lib/polyrun/coverage/merge.rb +127 -0
- data/lib/polyrun/coverage/merge_fragment_meta.rb +47 -0
- data/lib/polyrun/coverage/merge_merge_two.rb +117 -0
- data/lib/polyrun/coverage/rails.rb +128 -0
- data/lib/polyrun/coverage/reporting.rb +41 -0
- data/lib/polyrun/coverage/result.rb +18 -0
- data/lib/polyrun/coverage/track_files.rb +141 -0
- data/lib/polyrun/data/cached_fixtures.rb +122 -0
- data/lib/polyrun/data/factory_counts.rb +35 -0
- data/lib/polyrun/data/factory_instrumentation.rb +50 -0
- data/lib/polyrun/data/fixtures.rb +68 -0
- data/lib/polyrun/data/parallel_provisioning.rb +93 -0
- data/lib/polyrun/data/snapshot.rb +84 -0
- data/lib/polyrun/database/clone_shards.rb +81 -0
- data/lib/polyrun/database/provision.rb +72 -0
- data/lib/polyrun/database/shard.rb +63 -0
- data/lib/polyrun/database/url_builder/connection/infer.rb +49 -0
- data/lib/polyrun/database/url_builder/connection/url_builders.rb +43 -0
- data/lib/polyrun/database/url_builder/connection.rb +191 -0
- data/lib/polyrun/database/url_builder/template_prepare.rb +21 -0
- data/lib/polyrun/database/url_builder.rb +160 -0
- data/lib/polyrun/debug.rb +81 -0
- data/lib/polyrun/env/ci.rb +65 -0
- data/lib/polyrun/log.rb +70 -0
- data/lib/polyrun/minitest.rb +17 -0
- data/lib/polyrun/partition/constraints.rb +69 -0
- data/lib/polyrun/partition/hrw.rb +33 -0
- data/lib/polyrun/partition/min_heap.rb +64 -0
- data/lib/polyrun/partition/paths.rb +28 -0
- data/lib/polyrun/partition/paths_build.rb +128 -0
- data/lib/polyrun/partition/plan.rb +189 -0
- data/lib/polyrun/partition/plan_lpt.rb +49 -0
- data/lib/polyrun/partition/plan_sharding.rb +48 -0
- data/lib/polyrun/partition/stable_shuffle.rb +18 -0
- data/lib/polyrun/prepare/artifacts.rb +40 -0
- data/lib/polyrun/prepare/assets.rb +57 -0
- data/lib/polyrun/queue/file_store.rb +199 -0
- data/lib/polyrun/queue/file_store_pending.rb +48 -0
- data/lib/polyrun/quick/assertions.rb +32 -0
- data/lib/polyrun/quick/errors.rb +6 -0
- data/lib/polyrun/quick/example_group.rb +66 -0
- data/lib/polyrun/quick/example_runner.rb +93 -0
- data/lib/polyrun/quick/matchers.rb +156 -0
- data/lib/polyrun/quick/reporter.rb +42 -0
- data/lib/polyrun/quick/runner.rb +180 -0
- data/lib/polyrun/quick.rb +1 -0
- data/lib/polyrun/railtie.rb +7 -0
- data/lib/polyrun/reporting/junit.rb +125 -0
- data/lib/polyrun/reporting/junit_emit.rb +58 -0
- data/lib/polyrun/reporting/rspec_junit.rb +39 -0
- data/lib/polyrun/rspec.rb +15 -0
- data/lib/polyrun/templates/POLYRUN.md +45 -0
- data/lib/polyrun/templates/ci_matrix.polyrun.yml +14 -0
- data/lib/polyrun/templates/minimal_gem.polyrun.yml +13 -0
- data/lib/polyrun/templates/rails_prepare.polyrun.yml +31 -0
- data/lib/polyrun/timing/merge.rb +35 -0
- data/lib/polyrun/timing/summary.rb +25 -0
- data/lib/polyrun/version.rb +3 -0
- data/lib/polyrun.rb +58 -0
- data/polyrun.gemspec +37 -0
- data/sig/polyrun/cli.rbs +6 -0
- data/sig/polyrun/config.rbs +20 -0
- data/sig/polyrun/debug.rbs +12 -0
- data/sig/polyrun/log.rbs +12 -0
- data/sig/polyrun/minitest.rbs +5 -0
- data/sig/polyrun/quick.rbs +19 -0
- data/sig/polyrun/rspec.rbs +5 -0
- data/sig/polyrun.rbs +11 -0
- metadata +288 -0
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
require "json"
|
|
2
|
+
require "open3"
|
|
3
|
+
require "optparse"
|
|
4
|
+
|
|
5
|
+
require_relative "prepare_recipe"
|
|
6
|
+
|
|
7
|
+
module Polyrun
|
|
8
|
+
class CLI
|
|
9
|
+
module PrepareCommand
|
|
10
|
+
include PrepareRecipe
|
|
11
|
+
|
|
12
|
+
private
|
|
13
|
+
|
|
14
|
+
def cmd_prepare(argv, config_path)
|
|
15
|
+
dry = false
|
|
16
|
+
OptionParser.new do |opts|
|
|
17
|
+
opts.on("--dry-run", "Print steps only") { dry = true }
|
|
18
|
+
end.parse!(argv)
|
|
19
|
+
|
|
20
|
+
cfg = Polyrun::Config.load(path: config_path || ENV["POLYRUN_CONFIG"])
|
|
21
|
+
prep = cfg.prepare
|
|
22
|
+
recipe = prep["recipe"] || prep[:recipe] || "default"
|
|
23
|
+
prep_env = (prep["env"] || prep[:env] || {}).transform_keys(&:to_s).transform_values(&:to_s)
|
|
24
|
+
child_env = prep_env.empty? ? nil : ENV.to_h.merge(prep_env)
|
|
25
|
+
manifest = prepare_build_manifest(recipe, dry, prep_env)
|
|
26
|
+
|
|
27
|
+
exit_code = prepare_dispatch_recipe(manifest, prep, recipe, dry, child_env)
|
|
28
|
+
return exit_code unless exit_code.nil?
|
|
29
|
+
|
|
30
|
+
prepare_write_artifact_manifest!(manifest, recipe, dry)
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def prepare_write_artifact_manifest!(manifest, recipe, dry)
|
|
34
|
+
entries = (manifest["artifacts"] || []).map do |p|
|
|
35
|
+
{"path" => p, "kind" => (File.directory?(p) ? "directory" : "file")}
|
|
36
|
+
end
|
|
37
|
+
manifest["artifact_manifest_path"] = Polyrun::Prepare::Artifacts.write!(root: Dir.pwd, recipe: recipe, entries: entries, dry_run: dry)
|
|
38
|
+
Polyrun::Log.puts JSON.generate(manifest)
|
|
39
|
+
0
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def prepare_build_manifest(recipe, dry, prep_env)
|
|
43
|
+
manifest = {
|
|
44
|
+
"recipe" => recipe,
|
|
45
|
+
"dry_run" => dry,
|
|
46
|
+
"artifacts" => [],
|
|
47
|
+
"executed" => !dry
|
|
48
|
+
}
|
|
49
|
+
manifest["env"] = prep_env unless prep_env.empty?
|
|
50
|
+
manifest
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def prepare_dispatch_recipe(manifest, prep, recipe, dry, child_env)
|
|
54
|
+
case recipe
|
|
55
|
+
when "default", nil, ""
|
|
56
|
+
prepare_recipe_default(manifest, recipe)
|
|
57
|
+
nil
|
|
58
|
+
when "assets"
|
|
59
|
+
m, err = prepare_recipe_assets(manifest, prep, dry, child_env)
|
|
60
|
+
err
|
|
61
|
+
when "shell"
|
|
62
|
+
m, err = prepare_recipe_shell(manifest, prep, dry, child_env)
|
|
63
|
+
err
|
|
64
|
+
else
|
|
65
|
+
Polyrun::Log.warn "unknown prepare recipe: #{recipe}"
|
|
66
|
+
1
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
end
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
require "open3"
|
|
2
|
+
|
|
3
|
+
module Polyrun
|
|
4
|
+
class CLI
|
|
5
|
+
module PrepareRecipe
|
|
6
|
+
private
|
|
7
|
+
|
|
8
|
+
def prepare_recipe_default(manifest, recipe)
|
|
9
|
+
Polyrun::Log.warn "polyrun prepare: default recipe (no side effects)" if @verbose
|
|
10
|
+
[manifest, nil]
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
def prepare_recipe_assets(manifest, prep, dry, child_env)
|
|
14
|
+
rails_root = File.expand_path(prep["rails_root"] || prep[:rails_root] || ".", Dir.pwd)
|
|
15
|
+
manifest["rails_root"] = rails_root
|
|
16
|
+
custom = prep["command"] || prep[:command]
|
|
17
|
+
if dry
|
|
18
|
+
manifest["actions"] = [
|
|
19
|
+
custom ? custom.to_s.strip : "bin/rails assets:precompile"
|
|
20
|
+
]
|
|
21
|
+
manifest["executed"] = false
|
|
22
|
+
return [manifest, nil]
|
|
23
|
+
end
|
|
24
|
+
if custom && !custom.to_s.strip.empty?
|
|
25
|
+
_out, err, st = Open3.capture3(*([child_env].compact + ["sh", "-c", custom.to_s]), chdir: rails_root)
|
|
26
|
+
prepare_log_stderr(err)
|
|
27
|
+
unless st.success?
|
|
28
|
+
Polyrun::Log.warn "polyrun prepare: assets custom command failed (exit #{st.exitstatus})"
|
|
29
|
+
return [manifest, 1]
|
|
30
|
+
end
|
|
31
|
+
else
|
|
32
|
+
Polyrun::Prepare::Assets.precompile!(rails_root: rails_root, silent: !@verbose)
|
|
33
|
+
end
|
|
34
|
+
manifest["artifacts"] = [File.join(rails_root, "public", "assets").to_s]
|
|
35
|
+
[manifest, nil]
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
def prepare_shell_command_lines(prep)
|
|
39
|
+
command = prep["command"] || prep[:command]
|
|
40
|
+
commands = prep["commands"] || prep[:commands]
|
|
41
|
+
lines = []
|
|
42
|
+
lines.concat(Array(commands).map { |c| c.to_s.strip }.reject(&:empty?)) if commands
|
|
43
|
+
lines << command.to_s.strip if command && !command.to_s.strip.empty?
|
|
44
|
+
lines
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def prepare_recipe_shell(manifest, prep, dry, child_env)
|
|
48
|
+
rails_root = File.expand_path(prep["rails_root"] || prep[:rails_root] || ".", Dir.pwd)
|
|
49
|
+
manifest["rails_root"] = rails_root
|
|
50
|
+
lines = prepare_shell_command_lines(prep)
|
|
51
|
+
|
|
52
|
+
if lines.empty?
|
|
53
|
+
Polyrun::Log.warn "polyrun prepare: shell recipe requires prepare.command and/or prepare.commands"
|
|
54
|
+
return [manifest, 1]
|
|
55
|
+
end
|
|
56
|
+
manifest["actions"] = lines
|
|
57
|
+
if dry
|
|
58
|
+
manifest["executed"] = false
|
|
59
|
+
return [manifest, nil]
|
|
60
|
+
end
|
|
61
|
+
lines.each_with_index do |line, i|
|
|
62
|
+
_out, err, st = Open3.capture3(*([child_env].compact + ["sh", "-c", line]), chdir: rails_root)
|
|
63
|
+
prepare_log_stderr(err)
|
|
64
|
+
unless st.success?
|
|
65
|
+
Polyrun::Log.warn "polyrun prepare: shell step #{i + 1} failed (exit #{st.exitstatus})"
|
|
66
|
+
return [manifest, 1]
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
[manifest, nil]
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
def prepare_log_stderr(err)
|
|
73
|
+
Polyrun::Log.warn err unless err.to_s.empty?
|
|
74
|
+
end
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
end
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
require "json"
|
|
2
|
+
require "optparse"
|
|
3
|
+
|
|
4
|
+
module Polyrun
|
|
5
|
+
class CLI
|
|
6
|
+
module QueueCommand
|
|
7
|
+
private
|
|
8
|
+
|
|
9
|
+
# File-backed queue (spec_queue.md): init → claim batches → ack (ledger append-only).
|
|
10
|
+
def cmd_queue(argv)
|
|
11
|
+
dir = ".polyrun-queue"
|
|
12
|
+
paths_file = nil
|
|
13
|
+
timing_path = nil
|
|
14
|
+
worker = ENV["USER"] || "worker"
|
|
15
|
+
batch = 5
|
|
16
|
+
lease_id = nil
|
|
17
|
+
|
|
18
|
+
sub = argv.shift
|
|
19
|
+
Polyrun::Debug.log("queue: subcommand=#{sub.inspect}")
|
|
20
|
+
case sub
|
|
21
|
+
when "init"
|
|
22
|
+
queue_cmd_init(argv, dir, paths_file, timing_path)
|
|
23
|
+
when "claim"
|
|
24
|
+
queue_cmd_claim(argv, dir, worker, batch)
|
|
25
|
+
when "ack"
|
|
26
|
+
queue_cmd_ack(argv, dir, lease_id, worker)
|
|
27
|
+
when "status"
|
|
28
|
+
queue_cmd_status(argv, dir)
|
|
29
|
+
else
|
|
30
|
+
Polyrun::Log.warn "usage: polyrun queue {init|claim|ack|status} [options]"
|
|
31
|
+
2
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
def queue_cmd_init(argv, dir, paths_file, timing_path)
|
|
36
|
+
OptionParser.new do |opts|
|
|
37
|
+
opts.banner = "usage: polyrun queue init --paths-file P [--timing PATH] [--dir DIR]"
|
|
38
|
+
opts.on("--dir PATH") { |v| dir = v }
|
|
39
|
+
opts.on("--paths-file PATH") { |v| paths_file = v }
|
|
40
|
+
opts.on("--timing PATH") { |v| timing_path = v }
|
|
41
|
+
end.parse!(argv)
|
|
42
|
+
unless paths_file
|
|
43
|
+
Polyrun::Log.warn "queue init: need --paths-file"
|
|
44
|
+
return 2
|
|
45
|
+
end
|
|
46
|
+
items = Polyrun::Partition::Paths.read_lines(paths_file)
|
|
47
|
+
costs = timing_path ? Polyrun::Partition::Plan.load_timing_costs(File.expand_path(timing_path, Dir.pwd)) : nil
|
|
48
|
+
ordered = queue_init_ordered_items(items, costs)
|
|
49
|
+
Polyrun::Queue::FileStore.new(dir).init!(ordered)
|
|
50
|
+
Polyrun::Log.puts JSON.generate({"dir" => File.expand_path(dir), "count" => ordered.size})
|
|
51
|
+
0
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
def queue_init_ordered_items(items, costs)
|
|
55
|
+
if costs && !costs.empty?
|
|
56
|
+
dw = costs.values.sum / costs.size.to_f
|
|
57
|
+
items.sort_by { |p| [-queue_weight_for(p, costs, dw), p] }
|
|
58
|
+
else
|
|
59
|
+
items.sort
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def queue_cmd_claim(argv, dir, worker, batch)
|
|
64
|
+
OptionParser.new do |opts|
|
|
65
|
+
opts.banner = "usage: polyrun queue claim [--dir DIR] [--worker ID] [--batch N]"
|
|
66
|
+
opts.on("--dir PATH") { |v| dir = v }
|
|
67
|
+
opts.on("--worker ID") { |v| worker = v }
|
|
68
|
+
opts.on("--batch N", Integer) { |v| batch = v }
|
|
69
|
+
end.parse!(argv)
|
|
70
|
+
r = Polyrun::Queue::FileStore.new(dir).claim!(worker_id: worker, batch_size: batch)
|
|
71
|
+
Polyrun::Log.puts JSON.generate(r)
|
|
72
|
+
0
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
def queue_cmd_ack(argv, dir, lease_id, worker)
|
|
76
|
+
OptionParser.new do |opts|
|
|
77
|
+
opts.banner = "usage: polyrun queue ack --lease ID [--dir DIR] [--worker ID]"
|
|
78
|
+
opts.on("--dir PATH") { |v| dir = v }
|
|
79
|
+
opts.on("--lease ID") { |v| lease_id = v }
|
|
80
|
+
opts.on("--worker ID") { |v| worker = v }
|
|
81
|
+
end.parse!(argv)
|
|
82
|
+
unless lease_id
|
|
83
|
+
Polyrun::Log.warn "queue ack: need --lease"
|
|
84
|
+
return 2
|
|
85
|
+
end
|
|
86
|
+
Polyrun::Queue::FileStore.new(dir).ack!(lease_id: lease_id, worker_id: worker)
|
|
87
|
+
Polyrun::Log.puts "ok"
|
|
88
|
+
0
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
def queue_cmd_status(argv, dir)
|
|
92
|
+
OptionParser.new do |opts|
|
|
93
|
+
opts.on("--dir PATH") { |v| dir = v }
|
|
94
|
+
end.parse!(argv)
|
|
95
|
+
s = Polyrun::Queue::FileStore.new(dir).status
|
|
96
|
+
Polyrun::Log.puts JSON.generate(s)
|
|
97
|
+
0
|
|
98
|
+
end
|
|
99
|
+
end
|
|
100
|
+
end
|
|
101
|
+
end
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
require "json"
|
|
2
|
+
require "optparse"
|
|
3
|
+
|
|
4
|
+
module Polyrun
|
|
5
|
+
class CLI
|
|
6
|
+
module ReportCommands
|
|
7
|
+
private
|
|
8
|
+
|
|
9
|
+
def cmd_report_junit(argv)
|
|
10
|
+
inputs, output = report_junit_parse_inputs(argv)
|
|
11
|
+
inputs.uniq!
|
|
12
|
+
if inputs.empty?
|
|
13
|
+
Polyrun::Log.warn "report-junit: need -i FILE (existing path after glob expansion)"
|
|
14
|
+
return 2
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
inputs = inputs.map { |p| File.expand_path(p) }
|
|
18
|
+
return 2 unless report_junit_inputs_exist?(inputs)
|
|
19
|
+
|
|
20
|
+
out = report_junit_resolved_output(inputs, output)
|
|
21
|
+
path =
|
|
22
|
+
if inputs.size == 1
|
|
23
|
+
Polyrun::Reporting::Junit.write_from_json_file(inputs.first, output_path: out)
|
|
24
|
+
else
|
|
25
|
+
Polyrun::Reporting::Junit.merge_rspec_json_files(inputs, output_path: out)
|
|
26
|
+
end
|
|
27
|
+
Polyrun::Log.puts path
|
|
28
|
+
0
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def report_junit_parse_inputs(argv)
|
|
32
|
+
inputs = []
|
|
33
|
+
output = nil
|
|
34
|
+
OptionParser.new do |opts|
|
|
35
|
+
opts.banner = "usage: polyrun report-junit -i FILE [-i FILE]... [-o PATH]"
|
|
36
|
+
opts.on("-i", "--input PATH", "RSpec JSON (repeatable; globs ok; multiple files merge examples)") do |v|
|
|
37
|
+
expand_merge_input_pattern(v).each { |x| inputs << x }
|
|
38
|
+
end
|
|
39
|
+
opts.on("-o", "--output PATH", "Default: <dir of first input>/junit.xml") { |v| output = v }
|
|
40
|
+
end.parse!(argv)
|
|
41
|
+
if inputs.empty? && argv.first
|
|
42
|
+
expand_merge_input_pattern(argv.first).each { |x| inputs << x }
|
|
43
|
+
end
|
|
44
|
+
[inputs, output]
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def report_junit_inputs_exist?(inputs)
|
|
48
|
+
inputs.each do |p|
|
|
49
|
+
unless File.file?(p)
|
|
50
|
+
Polyrun::Log.warn "report-junit: not a file: #{p}"
|
|
51
|
+
return false
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
true
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def report_junit_resolved_output(inputs, output)
|
|
58
|
+
if output
|
|
59
|
+
File.expand_path(output)
|
|
60
|
+
else
|
|
61
|
+
File.join(File.dirname(inputs.first), "junit.xml")
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
def cmd_report_timing(argv)
|
|
66
|
+
input = nil
|
|
67
|
+
out_file = nil
|
|
68
|
+
top = 30
|
|
69
|
+
OptionParser.new do |opts|
|
|
70
|
+
opts.banner = "usage: polyrun report-timing -i FILE [-o PATH] [--top N]"
|
|
71
|
+
opts.on("-i", "--input PATH", "Merged polyrun_timing.json (path => seconds)") { |v| input = v }
|
|
72
|
+
opts.on("-o", "--output PATH", "Write summary to file instead of stdout") { |v| out_file = v }
|
|
73
|
+
opts.on("--top N", Integer) { |v| top = v }
|
|
74
|
+
end.parse!(argv)
|
|
75
|
+
input ||= argv.first
|
|
76
|
+
|
|
77
|
+
unless input && File.file?(input)
|
|
78
|
+
Polyrun::Log.warn "report-timing: need -i FILE"
|
|
79
|
+
return 2
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
merged = JSON.parse(File.read(File.expand_path(input)))
|
|
83
|
+
text = Polyrun::Timing::Summary.format_slow_files(merged, top: top)
|
|
84
|
+
if out_file
|
|
85
|
+
File.write(File.expand_path(out_file), text)
|
|
86
|
+
Polyrun::Log.puts File.expand_path(out_file)
|
|
87
|
+
else
|
|
88
|
+
Polyrun::Log.print text
|
|
89
|
+
end
|
|
90
|
+
0
|
|
91
|
+
end
|
|
92
|
+
end
|
|
93
|
+
end
|
|
94
|
+
end
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
require "optparse"
|
|
2
|
+
require "rbconfig"
|
|
3
|
+
|
|
4
|
+
require_relative "start_bootstrap"
|
|
5
|
+
require_relative "run_shards_run"
|
|
6
|
+
|
|
7
|
+
module Polyrun
|
|
8
|
+
class CLI
|
|
9
|
+
module RunShardsCommand
|
|
10
|
+
include StartBootstrap
|
|
11
|
+
include RunShardsRun
|
|
12
|
+
|
|
13
|
+
private
|
|
14
|
+
|
|
15
|
+
# Default and upper bound for parallel OS processes (POLYRUN_WORKERS / --workers).
|
|
16
|
+
DEFAULT_PARALLEL_WORKERS = 5
|
|
17
|
+
MAX_PARALLEL_WORKERS = 10
|
|
18
|
+
|
|
19
|
+
# Spawns N OS processes (not Ruby threads) with POLYRUN_SHARD_INDEX / POLYRUN_SHARD_TOTAL so
|
|
20
|
+
# {Coverage::Collector} writes coverage/polyrun-fragment-<shard>.json. Merge with merge-coverage.
|
|
21
|
+
def cmd_run_shards(argv, config_path)
|
|
22
|
+
run_shards_run!(argv, config_path)
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
# Same as run-shards with --merge-coverage; if you omit --, runs `bundle exec rspec`.
|
|
26
|
+
def cmd_parallel_rspec(argv, config_path)
|
|
27
|
+
sep = argv.index("--")
|
|
28
|
+
combined =
|
|
29
|
+
if sep
|
|
30
|
+
head = argv[0...sep]
|
|
31
|
+
tail = argv[sep..]
|
|
32
|
+
head + ["--merge-coverage"] + tail
|
|
33
|
+
else
|
|
34
|
+
argv + ["--merge-coverage", "--", "bundle", "exec", "rspec"]
|
|
35
|
+
end
|
|
36
|
+
Polyrun::Debug.log_kv(parallel_rspec: "combined argv", argv: combined)
|
|
37
|
+
cmd_run_shards(combined, config_path)
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
# Convenience alias: optional legacy script/build_spec_paths.rb (if present and partition.paths_build unset), then parallel-rspec.
|
|
41
|
+
def cmd_start(argv, config_path)
|
|
42
|
+
cfg = Polyrun::Config.load(path: config_path || ENV["POLYRUN_CONFIG"])
|
|
43
|
+
code = start_bootstrap!(cfg, argv, config_path)
|
|
44
|
+
return code if code != 0
|
|
45
|
+
|
|
46
|
+
unless skip_build_spec_paths?
|
|
47
|
+
unless partition_paths_build?(cfg.partition)
|
|
48
|
+
build_script = File.expand_path("script/build_spec_paths.rb", Dir.pwd)
|
|
49
|
+
if File.file?(build_script)
|
|
50
|
+
ok = system({"RUBYOPT" => nil}, RbConfig.ruby, build_script)
|
|
51
|
+
return 1 unless ok
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
cmd_parallel_rspec(argv, config_path)
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def partition_paths_build?(partition)
|
|
59
|
+
pb = partition["paths_build"] || partition[:paths_build]
|
|
60
|
+
pb.is_a?(Hash) && !pb.empty?
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def skip_build_spec_paths?
|
|
64
|
+
v = ENV["POLYRUN_SKIP_BUILD_SPEC_PATHS"].to_s.downcase
|
|
65
|
+
%w[1 true yes].include?(v)
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
# ENV for a worker process: POLYRUN_SHARD_* plus per-shard database URLs from polyrun.yml or DATABASE_URL.
|
|
69
|
+
def shard_child_env(cfg:, workers:, shard:)
|
|
70
|
+
child_env = ENV.to_h.merge(
|
|
71
|
+
Polyrun::Database::Shard.env_map(shard_index: shard, shard_total: workers)
|
|
72
|
+
)
|
|
73
|
+
dh = cfg.databases
|
|
74
|
+
if dh.is_a?(Hash) && !dh.empty?
|
|
75
|
+
child_env.merge!(Polyrun::Database::UrlBuilder.env_exports_for_databases(dh, shard_index: shard))
|
|
76
|
+
elsif workers > 1 && (u = ENV["DATABASE_URL"]) && !u.to_s.strip.empty?
|
|
77
|
+
child_env["DATABASE_URL"] = Polyrun::Database::Shard.database_url_with_shard(u, shard)
|
|
78
|
+
end
|
|
79
|
+
child_env
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
def cmd_build_paths(config_path)
|
|
83
|
+
cfg = Polyrun::Config.load(path: config_path || ENV["POLYRUN_CONFIG"])
|
|
84
|
+
Polyrun::Partition::PathsBuild.apply!(partition: cfg.partition, cwd: Dir.pwd)
|
|
85
|
+
end
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
end
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
require "shellwords"
|
|
2
|
+
|
|
3
|
+
module Polyrun
|
|
4
|
+
class CLI
|
|
5
|
+
# Boot argv, then phase A (options + validate) and B (items + plan) for run-shards.
|
|
6
|
+
module RunShardsPlanBootPhases
|
|
7
|
+
private
|
|
8
|
+
|
|
9
|
+
# @return [:fail, Integer] | [:ok, Hash, Array<String>]
|
|
10
|
+
def run_shards_plan_phase_a(head, cmd, pc)
|
|
11
|
+
o = run_shards_plan_options(head, pc)
|
|
12
|
+
code = Polyrun::Partition::PathsBuild.apply!(partition: pc, cwd: Dir.pwd)
|
|
13
|
+
return [:fail, code] if code != 0
|
|
14
|
+
|
|
15
|
+
o[:timing_path] = run_shards_default_timing_path(pc, o[:timing_path], o[:strategy])
|
|
16
|
+
err = run_shards_validate_workers!(o)
|
|
17
|
+
return [:fail, err] if err
|
|
18
|
+
|
|
19
|
+
err = run_shards_validate_cmd(cmd)
|
|
20
|
+
return [:fail, err] if err
|
|
21
|
+
|
|
22
|
+
cmd = Shellwords.split(cmd.first) if cmd.size == 1 && cmd.first.include?(" ")
|
|
23
|
+
|
|
24
|
+
[:ok, o, cmd]
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def run_shards_plan_phase_b(o, cmd, cfg, pc, run_t0, config_path)
|
|
28
|
+
items, paths_source, err = run_shards_resolve_items(o[:paths_file])
|
|
29
|
+
return [err, nil] if err
|
|
30
|
+
|
|
31
|
+
costs, strategy, err = run_shards_resolve_costs(o[:timing_path], o[:strategy])
|
|
32
|
+
return [err, nil] if err
|
|
33
|
+
|
|
34
|
+
run_shards_plan_ready_log(o, strategy, cmd, paths_source, items.size)
|
|
35
|
+
|
|
36
|
+
constraints = load_partition_constraints(pc, o[:constraints_path])
|
|
37
|
+
plan = run_shards_make_plan(items, o[:workers], strategy, o[:seed], costs, constraints)
|
|
38
|
+
|
|
39
|
+
run_shards_debug_shard_sizes(plan, o[:workers])
|
|
40
|
+
Polyrun::Log.warn "polyrun run-shards: #{items.size} paths → #{o[:workers]} workers (#{strategy})" if @verbose
|
|
41
|
+
|
|
42
|
+
parallel = o[:workers] > 1
|
|
43
|
+
run_shards_warn_parallel_banner(items.size, o[:workers], strategy) if parallel
|
|
44
|
+
|
|
45
|
+
[nil, run_shards_plan_context_hash(o, cmd, cfg, plan, run_t0, parallel, config_path)]
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
def run_shards_plan_boot(argv, config_path)
|
|
49
|
+
run_t0 = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
|
50
|
+
sep = argv.index("--")
|
|
51
|
+
unless sep
|
|
52
|
+
Polyrun::Log.warn "polyrun run-shards: need -- before the command (e.g. run-shards --workers 5 -- bundle exec rspec)"
|
|
53
|
+
return [2, nil]
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
head = argv[0...sep]
|
|
57
|
+
cmd = argv[(sep + 1)..].map(&:to_s)
|
|
58
|
+
cfg = Polyrun::Config.load(path: config_path || ENV["POLYRUN_CONFIG"])
|
|
59
|
+
[run_t0, head, cmd, cfg, cfg.partition]
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
def run_shards_plan_ready_log(o, strategy, cmd, paths_source, item_count)
|
|
63
|
+
Polyrun::Debug.log_kv(
|
|
64
|
+
run_shards: "ready to partition",
|
|
65
|
+
workers: o[:workers],
|
|
66
|
+
strategy: strategy,
|
|
67
|
+
merge_coverage: o[:merge_coverage],
|
|
68
|
+
command: cmd,
|
|
69
|
+
timing_path: o[:timing_path],
|
|
70
|
+
paths_source: paths_source,
|
|
71
|
+
item_count: item_count
|
|
72
|
+
)
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
def run_shards_plan_context_hash(o, cmd, cfg, plan, run_t0, parallel, config_path)
|
|
76
|
+
{
|
|
77
|
+
workers: o[:workers],
|
|
78
|
+
cmd: cmd,
|
|
79
|
+
cfg: cfg,
|
|
80
|
+
plan: plan,
|
|
81
|
+
run_t0: run_t0,
|
|
82
|
+
parallel: parallel,
|
|
83
|
+
merge_coverage: o[:merge_coverage],
|
|
84
|
+
merge_output: o[:merge_output],
|
|
85
|
+
merge_format: o[:merge_format],
|
|
86
|
+
config_path: config_path
|
|
87
|
+
}
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
end
|
|
91
|
+
end
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
require "optparse"
|
|
2
|
+
|
|
3
|
+
module Polyrun
|
|
4
|
+
class CLI
|
|
5
|
+
module RunShardsPlanOptions
|
|
6
|
+
private
|
|
7
|
+
|
|
8
|
+
def run_shards_plan_options(head, pc)
|
|
9
|
+
st = run_shards_plan_options_state(pc)
|
|
10
|
+
run_shards_plan_options_parse!(head, st)
|
|
11
|
+
st[:paths_file] ||= pc["paths_file"] || pc[:paths_file]
|
|
12
|
+
st
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def run_shards_plan_options_state(pc)
|
|
16
|
+
{
|
|
17
|
+
workers: env_int("POLYRUN_WORKERS", RunShardsCommand::DEFAULT_PARALLEL_WORKERS),
|
|
18
|
+
paths_file: nil,
|
|
19
|
+
strategy: (pc["strategy"] || pc[:strategy] || "round_robin").to_s,
|
|
20
|
+
seed: pc["seed"] || pc[:seed],
|
|
21
|
+
timing_path: nil,
|
|
22
|
+
constraints_path: nil,
|
|
23
|
+
merge_coverage: false,
|
|
24
|
+
merge_output: nil,
|
|
25
|
+
merge_format: nil
|
|
26
|
+
}
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def run_shards_plan_options_parse!(head, st)
|
|
30
|
+
OptionParser.new do |opts|
|
|
31
|
+
opts.banner = "usage: polyrun run-shards [--workers N] [--strategy NAME] [--paths-file P] [--timing P] [--constraints P] [--seed S] [--merge-coverage] [--merge-output P] [--merge-format LIST] [--] <command> [args...]"
|
|
32
|
+
opts.on("--workers N", Integer) { |v| st[:workers] = v }
|
|
33
|
+
opts.on("--strategy NAME", String) { |v| st[:strategy] = v }
|
|
34
|
+
opts.on("--seed VAL") { |v| st[:seed] = v }
|
|
35
|
+
opts.on("--paths-file PATH", String) { |v| st[:paths_file] = v }
|
|
36
|
+
opts.on("--constraints PATH", String) { |v| st[:constraints_path] = v }
|
|
37
|
+
opts.on("--timing PATH", "merged polyrun_timing.json; implies cost_binpack unless hrw/cost") { |v| st[:timing_path] = v }
|
|
38
|
+
opts.on("--merge-coverage", "After success, merge coverage/polyrun-fragment-*.json (Polyrun coverage must be enabled)") { st[:merge_coverage] = true }
|
|
39
|
+
opts.on("--merge-output PATH", String) { |v| st[:merge_output] = v }
|
|
40
|
+
opts.on("--merge-format LIST", String) { |v| st[:merge_format] = v }
|
|
41
|
+
end.parse!(head)
|
|
42
|
+
end
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
end
|