parallel_tests 3.3.0 → 4.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Readme.md +53 -27
- data/bin/parallel_cucumber +2 -1
- data/bin/parallel_rspec +2 -1
- data/bin/parallel_spinach +2 -1
- data/bin/parallel_test +2 -1
- data/lib/parallel_tests/cli.rb +154 -92
- data/lib/parallel_tests/cucumber/failures_logger.rb +1 -1
- data/lib/parallel_tests/cucumber/features_with_steps.rb +4 -3
- data/lib/parallel_tests/cucumber/runner.rb +10 -7
- data/lib/parallel_tests/cucumber/scenario_line_logger.rb +4 -4
- data/lib/parallel_tests/cucumber/scenarios.rb +9 -8
- data/lib/parallel_tests/gherkin/io.rb +2 -3
- data/lib/parallel_tests/gherkin/listener.rb +9 -10
- data/lib/parallel_tests/gherkin/runner.rb +29 -35
- data/lib/parallel_tests/gherkin/runtime_logger.rb +2 -1
- data/lib/parallel_tests/grouper.rb +57 -6
- data/lib/parallel_tests/pids.rb +5 -4
- data/lib/parallel_tests/railtie.rb +1 -0
- data/lib/parallel_tests/rspec/failures_logger.rb +2 -2
- data/lib/parallel_tests/rspec/logger_base.rb +9 -7
- data/lib/parallel_tests/rspec/runner.rb +32 -19
- data/lib/parallel_tests/rspec/runtime_logger.rb +12 -10
- data/lib/parallel_tests/rspec/summary_logger.rb +2 -3
- data/lib/parallel_tests/spinach/runner.rb +6 -2
- data/lib/parallel_tests/tasks.rb +130 -71
- data/lib/parallel_tests/test/runner.rb +90 -41
- data/lib/parallel_tests/test/runtime_logger.rb +19 -14
- data/lib/parallel_tests/version.rb +2 -1
- data/lib/parallel_tests.rb +13 -13
- metadata +10 -10
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 1bcc6e9cd8a207f7a7ec8253139040265cf1c1962e5d25629bca37e5cd2b111e
|
4
|
+
data.tar.gz: 7db53c69e2048799c12504a6ce7b56b6b7b4833ef77fdc298ba50c61fe8ed742
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: f682ef1d3752cd3893e186879d905954456b51900ce0d4cfd2213f1c226d13fb38d1a3f251dc75bccb8da663273f59ef977e0aa92707f3ede49db381a53c40f5
|
7
|
+
data.tar.gz: 7a3bad92876b0b225f381b60b89e7049f84e9069a89b710749ecad0e6beed6a60952ed3d850dbe90621a731a974b7694b9182bb02a0f5e3e5292ccb2b6226614
|
data/Readme.md
CHANGED
@@ -1,8 +1,7 @@
|
|
1
1
|
# parallel_tests
|
2
2
|
|
3
3
|
[](https://rubygems.org/gems/parallel_tests)
|
4
|
-
[](https://github.com/grosser/parallel_tests/actions?query=workflow%3Awindows)
|
4
|
+
[](https://github.com/grosser/parallel_tests/actions?query=workflow%3Atest)
|
6
5
|
|
7
6
|
Speedup Test::Unit + RSpec + Cucumber + Spinach by running parallel on multiple CPU cores.<br/>
|
8
7
|
ParallelTests splits tests into even groups (by number of lines or runtime) and runs each group in a single process with its own database.
|
@@ -37,9 +36,15 @@ test:
|
|
37
36
|
### Copy development schema (repeat after migrations)
|
38
37
|
rake parallel:prepare
|
39
38
|
|
39
|
+
### Run migrations in additional database(s) (repeat after migrations)
|
40
|
+
rake parallel:migrate
|
41
|
+
|
40
42
|
### Setup environment from scratch (create db and loads schema, useful for CI)
|
41
43
|
rake parallel:setup
|
42
44
|
|
45
|
+
### Drop all test databases
|
46
|
+
rake parallel:drop
|
47
|
+
|
43
48
|
### Run!
|
44
49
|
rake parallel:test # Test::Unit
|
45
50
|
rake parallel:spec # RSpec
|
@@ -142,17 +147,19 @@ Add the following to your `.rspec_parallel` (or `.rspec`) :
|
|
142
147
|
RSpec: FailuresLogger
|
143
148
|
-----------------------
|
144
149
|
|
145
|
-
Produce
|
146
|
-
|
147
|
-
E.g.
|
150
|
+
Produce pastable command-line snippets for each failed example. For example:
|
148
151
|
|
149
|
-
|
152
|
+
```bash
|
153
|
+
rspec /path/to/my_spec.rb:123 # should do something
|
154
|
+
```
|
150
155
|
|
151
|
-
Add
|
156
|
+
Add to `.rspec_parallel` or use as CLI flag:
|
152
157
|
|
153
158
|
--format progress
|
154
159
|
--format ParallelTests::RSpec::FailuresLogger --out tmp/failing_specs.log
|
155
160
|
|
161
|
+
(Not needed to retry failures, for that pass [--only-failures](https://relishapp.com/rspec/rspec-core/docs/command-line/only-failures) to rspec)
|
162
|
+
|
156
163
|
Cucumber: FailuresLogger
|
157
164
|
-----------------------
|
158
165
|
|
@@ -177,19 +184,24 @@ Setup for non-rails
|
|
177
184
|
|
178
185
|
gem install parallel_tests
|
179
186
|
# go to your project dir
|
180
|
-
parallel_test
|
181
|
-
parallel_rspec
|
182
|
-
parallel_cucumber
|
183
|
-
parallel_spinach
|
187
|
+
parallel_test
|
188
|
+
parallel_rspec
|
189
|
+
parallel_cucumber
|
190
|
+
parallel_spinach
|
191
|
+
|
192
|
+
- use `ENV['TEST_ENV_NUMBER']` inside your tests to select separate db/memcache/etc. (docker compose: expose it)
|
184
193
|
|
185
|
-
-
|
186
|
-
- Only run selected files & folders:
|
194
|
+
- Only run a subset of files / folders:
|
187
195
|
|
188
196
|
`parallel_test test/bar test/baz/foo_text.rb`
|
189
197
|
|
190
198
|
- Pass test-options and files via `--`:
|
191
199
|
|
192
|
-
`
|
200
|
+
`parallel_rspec -- -t acceptance -f progress -- spec/foo_spec.rb spec/acceptance`
|
201
|
+
|
202
|
+
- Pass in test options, by using the -o flag (wrap everything in quotes):
|
203
|
+
|
204
|
+
`parallel_cucumber -n 2 -o '-p foo_profile --tags @only_this_tag or @only_that_tag --format summary'`
|
193
205
|
|
194
206
|
Options are:
|
195
207
|
<!-- copy output from bundle exec ./bin/parallel_test -h -->
|
@@ -197,24 +209,32 @@ Options are:
|
|
197
209
|
-p, --pattern [PATTERN] run tests matching this regex pattern
|
198
210
|
--exclude-pattern [PATTERN] exclude tests matching this regex pattern
|
199
211
|
--group-by [TYPE] group tests by:
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
212
|
+
found - order of finding files
|
213
|
+
steps - number of cucumber/spinach steps
|
214
|
+
scenarios - individual cucumber scenarios
|
215
|
+
filesize - by size of the file
|
216
|
+
runtime - info from runtime log
|
217
|
+
default - runtime when runtime log is filled otherwise filesize
|
206
218
|
-m, --multiply-processes [FLOAT] use given number as a multiplier of processes to run
|
207
219
|
-s, --single [PATTERN] Run all matching files in the same process
|
208
|
-
-i, --isolate Do not run any other tests in the group used by --single(-s)
|
209
|
-
|
210
|
-
--
|
211
|
-
--
|
220
|
+
-i, --isolate Do not run any other tests in the group used by --single(-s)
|
221
|
+
--isolate-n [PROCESSES] Use 'isolate' singles with number of processes, default: 1.
|
222
|
+
--highest-exit-status Exit with the highest exit status provided by test run(s)
|
223
|
+
--specify-groups [SPECS] Use 'specify-groups' if you want to specify multiple specs running in multiple
|
224
|
+
processes in a specific formation. Commas indicate specs in the same process,
|
225
|
+
pipes indicate specs in a new process. Cannot use with --single, --isolate, or
|
226
|
+
--isolate-n. Ex.
|
227
|
+
$ parallel_tests -n 3 . --specify-groups '1_spec.rb,2_spec.rb|3_spec.rb'
|
228
|
+
Process 1 will contain 1_spec.rb and 2_spec.rb
|
229
|
+
Process 2 will contain 3_spec.rb
|
230
|
+
Process 3 will contain all other specs
|
231
|
+
--only-group INT[,INT]
|
212
232
|
-e, --exec [COMMAND] execute this code parallel and with ENV['TEST_ENV_NUMBER']
|
213
233
|
-o, --test-options '[OPTIONS]' execute test commands with those options
|
214
234
|
-t, --type [TYPE] test(default) / rspec / cucumber / spinach
|
215
235
|
--suffix [PATTERN] override built in test file pattern (should match suffix):
|
216
|
-
|
217
|
-
|
236
|
+
'_spec.rb$' - matches rspec files
|
237
|
+
'_(test|spec).rb$' - matches test or spec files
|
218
238
|
--serialize-stdout Serialize stdout output, nothing will be written until everything is done
|
219
239
|
--prefix-output-with-test-env-number
|
220
240
|
Prefixes test env number to the output when not using --serialize-stdout
|
@@ -282,7 +302,7 @@ TIPS
|
|
282
302
|
`export PARALLEL_TEST_FIRST_IS_1=true` will provide the same result
|
283
303
|
- [email_spec and/or action_mailer_cache_delivery](https://github.com/grosser/parallel_tests/wiki)
|
284
304
|
- [zeus-parallel_tests](https://github.com/sevos/zeus-parallel_tests)
|
285
|
-
- [Distributed
|
305
|
+
- [Distributed Parallel Tests on CI systems)](https://github.com/grosser/parallel_tests/wiki/Distributed-Parallel-Tests-on-CI-systems) learn how `parallel_tests` can run on distributed servers such as Travis and GitLab-CI. Also shows you how to use parallel_tests without adding `TEST_ENV_NUMBER`-backends
|
286
306
|
- [Capybara setup](https://github.com/grosser/parallel_tests/wiki)
|
287
307
|
- [Sphinx setup](https://github.com/grosser/parallel_tests/wiki)
|
288
308
|
- [Capistrano setup](https://github.com/grosser/parallel_tests/wiki/Remotely-with-capistrano) let your tests run on a big box instead of your laptop
|
@@ -378,6 +398,12 @@ inspired by [pivotal labs](https://blog.pivotal.io/labs/labs/parallelize-your-rs
|
|
378
398
|
- [Calaway](https://github.com/calaway)
|
379
399
|
- [alboyadjian](https://github.com/alboyadjian)
|
380
400
|
- [Nathan Broadbent](https://github.com/ndbroadbent)
|
401
|
+
- [Vikram B Kumar](https://github.com/v-kumar)
|
402
|
+
- [Joshua Pinter](https://github.com/joshuapinter)
|
403
|
+
- [Zach Dennis](https://github.com/zdennis)
|
404
|
+
- [Jon Dufresne](https://github.com/jdufresne)
|
405
|
+
- [Eric Kessler](https://github.com/enkessler)
|
406
|
+
- [Adis Osmonov](https://github.com/adis-io)
|
381
407
|
|
382
408
|
[Michael Grosser](http://grosser.it)<br/>
|
383
409
|
michael@grosser.it<br/>
|
data/bin/parallel_cucumber
CHANGED
@@ -1,7 +1,8 @@
|
|
1
1
|
#!/usr/bin/env ruby
|
2
|
+
# frozen_string_literal: true
|
2
3
|
|
3
4
|
# enable local usage from cloned repo
|
4
|
-
root = File.expand_path(
|
5
|
+
root = File.expand_path('..', __dir__)
|
5
6
|
$LOAD_PATH << "#{root}/lib" if File.exist?("#{root}/Gemfile")
|
6
7
|
|
7
8
|
require "parallel_tests"
|
data/bin/parallel_rspec
CHANGED
@@ -1,7 +1,8 @@
|
|
1
1
|
#!/usr/bin/env ruby
|
2
|
+
# frozen_string_literal: true
|
2
3
|
|
3
4
|
# enable local usage from cloned repo
|
4
|
-
root = File.expand_path(
|
5
|
+
root = File.expand_path('..', __dir__)
|
5
6
|
$LOAD_PATH << "#{root}/lib" if File.exist?("#{root}/Gemfile")
|
6
7
|
|
7
8
|
require "parallel_tests"
|
data/bin/parallel_spinach
CHANGED
@@ -1,7 +1,8 @@
|
|
1
1
|
#!/usr/bin/env ruby
|
2
|
+
# frozen_string_literal: true
|
2
3
|
|
3
4
|
# enable local usage from cloned repo
|
4
|
-
root = File.expand_path(
|
5
|
+
root = File.expand_path('..', __dir__)
|
5
6
|
$LOAD_PATH << "#{root}/lib" if File.exist?("#{root}/Gemfile")
|
6
7
|
|
7
8
|
require "parallel_tests"
|
data/bin/parallel_test
CHANGED
@@ -1,7 +1,8 @@
|
|
1
1
|
#!/usr/bin/env ruby
|
2
|
+
# frozen_string_literal: true
|
2
3
|
|
3
4
|
# enable local usage from cloned repo
|
4
|
-
root = File.expand_path(
|
5
|
+
root = File.expand_path('..', __dir__)
|
5
6
|
$LOAD_PATH << "#{root}/lib" if File.exist?("#{root}/Gemfile")
|
6
7
|
|
7
8
|
require "parallel_tests"
|
data/lib/parallel_tests/cli.rb
CHANGED
@@ -1,3 +1,4 @@
|
|
1
|
+
# frozen_string_literal: true
|
1
2
|
require 'optparse'
|
2
3
|
require 'tempfile'
|
3
4
|
require 'parallel_tests'
|
@@ -14,12 +15,12 @@ module ParallelTests
|
|
14
15
|
ENV['DISABLE_SPRING'] ||= '1'
|
15
16
|
|
16
17
|
num_processes = ParallelTests.determine_number_of_processes(options[:count])
|
17
|
-
num_processes
|
18
|
+
num_processes *= (options[:multiply] || 1)
|
18
19
|
|
19
20
|
options[:first_is_1] ||= first_is_1?
|
20
21
|
|
21
22
|
if options[:execute]
|
22
|
-
|
23
|
+
execute_command_in_parallel(options[:execute], num_processes, options)
|
23
24
|
else
|
24
25
|
run_tests_in_parallel(num_processes, options)
|
25
26
|
end
|
@@ -31,9 +32,23 @@ module ParallelTests
|
|
31
32
|
@graceful_shutdown_attempted ||= false
|
32
33
|
Kernel.exit if @graceful_shutdown_attempted
|
33
34
|
|
34
|
-
#
|
35
|
-
#
|
36
|
-
|
35
|
+
# In a shell, all sub-processes also get an interrupt, so they shut themselves down.
|
36
|
+
# In a background process this does not happen and we need to do it ourselves.
|
37
|
+
# We cannot always send the interrupt since then the sub-processes would get interrupted twice when in foreground
|
38
|
+
# and that messes with interrupt handling.
|
39
|
+
#
|
40
|
+
# (can simulate detached with `(bundle exec parallel_rspec test/a_spec.rb -n 2 &)`)
|
41
|
+
# also the integration test "passes on int signal to child processes" is detached.
|
42
|
+
#
|
43
|
+
# On windows getpgid does not work so we resort to always killing which is the smaller bug.
|
44
|
+
#
|
45
|
+
# The ParallelTests::Pids `synchronize` method can't be called directly from a trap,
|
46
|
+
# using Thread workaround https://github.com/ddollar/foreman/issues/332
|
47
|
+
Thread.new do
|
48
|
+
if Gem.win_platform? || ((child_pid = ParallelTests.pids.all.first) && Process.getpgid(child_pid) != Process.pid)
|
49
|
+
ParallelTests.stop_all_processes
|
50
|
+
end
|
51
|
+
end
|
37
52
|
|
38
53
|
@graceful_shutdown_attempted = true
|
39
54
|
end
|
@@ -56,26 +71,21 @@ module ParallelTests
|
|
56
71
|
def run_tests_in_parallel(num_processes, options)
|
57
72
|
test_results = nil
|
58
73
|
|
59
|
-
run_tests_proc = ->
|
74
|
+
run_tests_proc = -> do
|
60
75
|
groups = @runner.tests_in_groups(options[:files], num_processes, options)
|
61
|
-
groups.reject!
|
62
|
-
|
63
|
-
test_results = if options[:only_group]
|
64
|
-
groups_to_run = options[:only_group].collect{|i| groups[i - 1]}.compact
|
65
|
-
report_number_of_tests(groups_to_run) unless options[:quiet]
|
66
|
-
execute_in_parallel(groups_to_run, groups_to_run.size, options) do |group|
|
67
|
-
run_tests(group, groups_to_run.index(group), 1, options)
|
68
|
-
end
|
69
|
-
else
|
70
|
-
report_number_of_tests(groups) unless options[:quiet]
|
76
|
+
groups.reject!(&:empty?)
|
71
77
|
|
72
|
-
|
73
|
-
|
74
|
-
|
78
|
+
if options[:only_group]
|
79
|
+
groups = options[:only_group].map { |i| groups[i - 1] }.compact
|
80
|
+
num_processes = 1
|
75
81
|
end
|
76
82
|
|
83
|
+
report_number_of_tests(groups) unless options[:quiet]
|
84
|
+
test_results = execute_in_parallel(groups, groups.size, options) do |group|
|
85
|
+
run_tests(group, groups.index(group), num_processes, options)
|
86
|
+
end
|
77
87
|
report_results(test_results, options) unless options[:quiet]
|
78
|
-
|
88
|
+
end
|
79
89
|
|
80
90
|
if options[:quiet]
|
81
91
|
run_tests_proc.call
|
@@ -83,12 +93,23 @@ module ParallelTests
|
|
83
93
|
report_time_taken(&run_tests_proc)
|
84
94
|
end
|
85
95
|
|
86
|
-
|
96
|
+
if any_test_failed?(test_results)
|
97
|
+
warn final_fail_message
|
98
|
+
|
99
|
+
# return the highest exit status to allow sub-processes to send things other than 1
|
100
|
+
exit_status = if options[:highest_exit_status]
|
101
|
+
test_results.map { |data| data.fetch(:exit_status) }.max
|
102
|
+
else
|
103
|
+
1
|
104
|
+
end
|
105
|
+
|
106
|
+
exit exit_status
|
107
|
+
end
|
87
108
|
end
|
88
109
|
|
89
110
|
def run_tests(group, process_number, num_processes, options)
|
90
111
|
if group.empty?
|
91
|
-
{:
|
112
|
+
{ stdout: '', exit_status: 0, command: nil, seed: nil }
|
92
113
|
else
|
93
114
|
@runner.run_tests(group, process_number, num_processes, options)
|
94
115
|
end
|
@@ -104,18 +125,16 @@ module ParallelTests
|
|
104
125
|
|
105
126
|
def lock(lockfile)
|
106
127
|
File.open(lockfile) do |lock|
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
lock.flock File::LOCK_UN
|
113
|
-
end
|
128
|
+
lock.flock File::LOCK_EX
|
129
|
+
yield
|
130
|
+
ensure
|
131
|
+
# This shouldn't be necessary, but appears to be
|
132
|
+
lock.flock File::LOCK_UN
|
114
133
|
end
|
115
134
|
end
|
116
135
|
|
117
136
|
def report_results(test_results, options)
|
118
|
-
results = @runner.find_results(test_results.map { |result| result[:stdout] }*"")
|
137
|
+
results = @runner.find_results(test_results.map { |result| result[:stdout] } * "")
|
119
138
|
puts ""
|
120
139
|
puts @runner.summarize_results(results)
|
121
140
|
|
@@ -126,13 +145,12 @@ module ParallelTests
|
|
126
145
|
failing_sets = test_results.reject { |r| r[:exit_status] == 0 }
|
127
146
|
return if failing_sets.none?
|
128
147
|
|
129
|
-
if options[:verbose] || options[:
|
148
|
+
if options[:verbose] || options[:verbose_command]
|
130
149
|
puts "\n\nTests have failed for a parallel_test group. Use the following command to run the group again:\n\n"
|
131
150
|
failing_sets.each do |failing_set|
|
132
151
|
command = failing_set[:command]
|
133
|
-
command = command.gsub(/;export [A-Z_]+;/, ' ') # remove ugly export statements
|
134
152
|
command = @runner.command_with_seed(command, failing_set[:seed]) if failing_set[:seed]
|
135
|
-
|
153
|
+
@runner.print_command(command, failing_set[:env] || {})
|
136
154
|
end
|
137
155
|
end
|
138
156
|
end
|
@@ -140,20 +158,31 @@ module ParallelTests
|
|
140
158
|
def report_number_of_tests(groups)
|
141
159
|
name = @runner.test_file_name
|
142
160
|
num_processes = groups.size
|
143
|
-
num_tests = groups.map(&:size).
|
161
|
+
num_tests = groups.map(&:size).sum
|
144
162
|
tests_per_process = (num_processes == 0 ? 0 : num_tests / num_processes)
|
145
|
-
puts "#{num_processes}
|
163
|
+
puts "#{pluralize(num_processes, 'process')} for #{pluralize(num_tests, name)}, ~ #{pluralize(tests_per_process, name)} per process"
|
164
|
+
end
|
165
|
+
|
166
|
+
def pluralize(n, singular)
|
167
|
+
if n == 1
|
168
|
+
"1 #{singular}"
|
169
|
+
elsif singular.end_with?('s', 'sh', 'ch', 'x', 'z')
|
170
|
+
"#{n} #{singular}es"
|
171
|
+
else
|
172
|
+
"#{n} #{singular}s"
|
173
|
+
end
|
146
174
|
end
|
147
175
|
|
148
|
-
#exit with correct status code so rake parallel:test && echo 123 works
|
176
|
+
# exit with correct status code so rake parallel:test && echo 123 works
|
149
177
|
def any_test_failed?(test_results)
|
150
178
|
test_results.any? { |result| result[:exit_status] != 0 }
|
151
179
|
end
|
152
180
|
|
153
181
|
def parse_options!(argv)
|
182
|
+
newline_padding = " " * 37
|
154
183
|
options = {}
|
155
184
|
OptionParser.new do |opts|
|
156
|
-
opts.banner =
|
185
|
+
opts.banner = <<~BANNER
|
157
186
|
Run all tests in parallel, giving each process ENV['TEST_ENV_NUMBER'] ('', '2', '3', ...)
|
158
187
|
|
159
188
|
[optional] Only selected files & folders:
|
@@ -167,61 +196,78 @@ module ParallelTests
|
|
167
196
|
opts.on("-n [PROCESSES]", Integer, "How many processes to use, default: available CPUs") { |n| options[:count] = n }
|
168
197
|
opts.on("-p", "--pattern [PATTERN]", "run tests matching this regex pattern") { |pattern| options[:pattern] = /#{pattern}/ }
|
169
198
|
opts.on("--exclude-pattern", "--exclude-pattern [PATTERN]", "exclude tests matching this regex pattern") { |pattern| options[:exclude_pattern] = /#{pattern}/ }
|
170
|
-
opts.on(
|
171
|
-
group
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
199
|
+
opts.on(
|
200
|
+
"--group-by [TYPE]",
|
201
|
+
<<~TEXT.rstrip.split("\n").join("\n#{newline_padding}")
|
202
|
+
group tests by:
|
203
|
+
found - order of finding files
|
204
|
+
steps - number of cucumber/spinach steps
|
205
|
+
scenarios - individual cucumber scenarios
|
206
|
+
filesize - by size of the file
|
207
|
+
runtime - info from runtime log
|
208
|
+
default - runtime when runtime log is filled otherwise filesize
|
178
209
|
TEXT
|
179
|
-
|
180
|
-
opts.on("-m [FLOAT]", "--multiply-processes [FLOAT]", Float, "use given number as a multiplier of processes to run")
|
181
|
-
|
182
|
-
opts.on("-s [PATTERN]", "--single [PATTERN]",
|
183
|
-
"Run all matching files in the same process") do |pattern|
|
184
|
-
|
185
|
-
options[:single_process] ||= []
|
186
|
-
options[:single_process] << /#{pattern}/
|
210
|
+
) { |type| options[:group_by] = type.to_sym }
|
211
|
+
opts.on("-m [FLOAT]", "--multiply-processes [FLOAT]", Float, "use given number as a multiplier of processes to run") do |multiply|
|
212
|
+
options[:multiply] = multiply
|
187
213
|
end
|
188
214
|
|
189
|
-
opts.on("-
|
190
|
-
|
215
|
+
opts.on("-s [PATTERN]", "--single [PATTERN]", "Run all matching files in the same process") do |pattern|
|
216
|
+
(options[:single_process] ||= []) << /#{pattern}/
|
217
|
+
end
|
191
218
|
|
219
|
+
opts.on("-i", "--isolate", "Do not run any other tests in the group used by --single(-s)") do
|
192
220
|
options[:isolate] = true
|
193
221
|
end
|
194
222
|
|
195
|
-
opts.on(
|
223
|
+
opts.on(
|
224
|
+
"--isolate-n [PROCESSES]",
|
196
225
|
Integer,
|
197
|
-
"Use 'isolate' singles with number of processes, default: 1."
|
198
|
-
|
226
|
+
"Use 'isolate' singles with number of processes, default: 1."
|
227
|
+
) { |n| options[:isolate_count] = n }
|
228
|
+
|
229
|
+
opts.on("--highest-exit-status", "Exit with the highest exit status provided by test run(s)") do
|
230
|
+
options[:highest_exit_status] = true
|
199
231
|
end
|
200
232
|
|
201
|
-
opts.on(
|
233
|
+
opts.on(
|
234
|
+
"--specify-groups [SPECS]",
|
235
|
+
<<~TEXT.rstrip.split("\n").join("\n#{newline_padding}")
|
236
|
+
Use 'specify-groups' if you want to specify multiple specs running in multiple
|
237
|
+
processes in a specific formation. Commas indicate specs in the same process,
|
238
|
+
pipes indicate specs in a new process. Cannot use with --single, --isolate, or
|
239
|
+
--isolate-n. Ex.
|
240
|
+
$ parallel_test -n 3 . --specify-groups '1_spec.rb,2_spec.rb|3_spec.rb'
|
241
|
+
Process 1 will contain 1_spec.rb and 2_spec.rb
|
242
|
+
Process 2 will contain 3_spec.rb
|
243
|
+
Process 3 will contain all other specs
|
244
|
+
TEXT
|
245
|
+
) { |groups| options[:specify_groups] = groups }
|
246
|
+
|
247
|
+
opts.on("--only-group INT[,INT]", Array) { |groups| options[:only_group] = groups.map(&:to_i) }
|
202
248
|
|
203
|
-
opts.on("-e", "--exec [COMMAND]", "execute this code parallel and with ENV['TEST_ENV_NUMBER']") { |
|
204
|
-
opts.on("-o", "--test-options '[OPTIONS]'", "execute test commands with those options") { |arg| options[:test_options] = arg
|
249
|
+
opts.on("-e", "--exec [COMMAND]", "execute this code parallel and with ENV['TEST_ENV_NUMBER']") { |arg| options[:execute] = Shellwords.shellsplit(arg) }
|
250
|
+
opts.on("-o", "--test-options '[OPTIONS]'", "execute test commands with those options") { |arg| options[:test_options] = Shellwords.shellsplit(arg) }
|
205
251
|
opts.on("-t", "--type [TYPE]", "test(default) / rspec / cucumber / spinach") do |type|
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
abort
|
211
|
-
end
|
252
|
+
@runner = load_runner(type)
|
253
|
+
rescue NameError, LoadError => e
|
254
|
+
puts "Runner for `#{type}` type has not been found! (#{e})"
|
255
|
+
abort
|
212
256
|
end
|
213
|
-
opts.on(
|
214
|
-
|
215
|
-
|
216
|
-
|
257
|
+
opts.on(
|
258
|
+
"--suffix [PATTERN]",
|
259
|
+
<<~TEXT.rstrip.split("\n").join("\n#{newline_padding}")
|
260
|
+
override built in test file pattern (should match suffix):
|
261
|
+
'_spec\.rb$' - matches rspec files
|
262
|
+
'_(test|spec).rb$' - matches test or spec files
|
217
263
|
TEXT
|
218
|
-
|
264
|
+
) { |pattern| options[:suffix] = /#{pattern}/ }
|
219
265
|
opts.on("--serialize-stdout", "Serialize stdout output, nothing will be written until everything is done") { options[:serialize_stdout] = true }
|
220
266
|
opts.on("--prefix-output-with-test-env-number", "Prefixes test env number to the output when not using --serialize-stdout") { options[:prefix_output_with_test_env_number] = true }
|
221
267
|
opts.on("--combine-stderr", "Combine stderr into stdout, useful in conjunction with --serialize-stdout") { options[:combine_stderr] = true }
|
222
268
|
opts.on("--non-parallel", "execute same commands but do not in parallel, needs --exec") { options[:non_parallel] = true }
|
223
269
|
opts.on("--no-symlinks", "Do not traverse symbolic links to find test files") { options[:symlinks] = false }
|
224
|
-
opts.on('--ignore-tags [PATTERN]', 'When counting steps ignore scenarios with tags that match this pattern')
|
270
|
+
opts.on('--ignore-tags [PATTERN]', 'When counting steps ignore scenarios with tags that match this pattern') { |arg| options[:ignore_tag_pattern] = arg }
|
225
271
|
opts.on("--nice", "execute test commands with low priority.") { options[:nice] = true }
|
226
272
|
opts.on("--runtime-log [PATH]", "Location of previously recorded test runtimes") { |path| options[:runtime_log] = path }
|
227
273
|
opts.on("--allowed-missing [INT]", Integer, "Allowed percentage of missing runtimes (default = 50)") { |percent| options[:allowed_missing_percent] = percent }
|
@@ -229,16 +275,19 @@ module ParallelTests
|
|
229
275
|
opts.on("--first-is-1", "Use \"1\" as TEST_ENV_NUMBER to not reuse the default test environment") { options[:first_is_1] = true }
|
230
276
|
opts.on("--fail-fast", "Stop all groups when one group fails (best used with --test-options '--fail-fast' if supported") { options[:fail_fast] = true }
|
231
277
|
opts.on("--verbose", "Print debug output") { options[:verbose] = true }
|
232
|
-
opts.on("--verbose-
|
233
|
-
opts.on("--verbose-rerun-command", "When there are failures, displays the command executed by each process that failed") { options[:verbose_rerun_command] = true }
|
278
|
+
opts.on("--verbose-command", "Displays the command that will be executed by each process and when there are failures displays the command executed by each process that failed") { options[:verbose_command] = true }
|
234
279
|
opts.on("--quiet", "Print only tests output") { options[:quiet] = true }
|
235
|
-
opts.on("-v", "--version", "Show Version")
|
236
|
-
|
280
|
+
opts.on("-v", "--version", "Show Version") do
|
281
|
+
puts ParallelTests::VERSION
|
282
|
+
exit 0
|
283
|
+
end
|
284
|
+
opts.on("-h", "--help", "Show this.") do
|
285
|
+
puts opts
|
286
|
+
exit 0
|
287
|
+
end
|
237
288
|
end.parse!(argv)
|
238
289
|
|
239
|
-
if options[:verbose] && options[:quiet]
|
240
|
-
raise "Both options are mutually exclusive: verbose & quiet"
|
241
|
-
end
|
290
|
+
raise "Both options are mutually exclusive: verbose & quiet" if options[:verbose] && options[:quiet]
|
242
291
|
|
243
292
|
if options[:count] == 0
|
244
293
|
options.delete(:count)
|
@@ -247,7 +296,14 @@ module ParallelTests
|
|
247
296
|
|
248
297
|
files, remaining = extract_file_paths(argv)
|
249
298
|
unless options[:execute]
|
250
|
-
|
299
|
+
if files.empty?
|
300
|
+
default_test_folder = @runner.default_test_folder
|
301
|
+
if File.directory?(default_test_folder)
|
302
|
+
files = [default_test_folder]
|
303
|
+
else
|
304
|
+
abort "Pass files or folders to run"
|
305
|
+
end
|
306
|
+
end
|
251
307
|
options[:files] = files.map { |file_path| Pathname.new(file_path).cleanpath.to_s }
|
252
308
|
end
|
253
309
|
|
@@ -255,32 +311,38 @@ module ParallelTests
|
|
255
311
|
|
256
312
|
options[:group_by] ||= :filesize if options[:only_group]
|
257
313
|
|
258
|
-
|
314
|
+
if options[:group_by] == :found && options[:single_process]
|
315
|
+
raise "--group-by found and --single-process are not supported"
|
316
|
+
end
|
259
317
|
allowed = [:filesize, :runtime, :found]
|
260
318
|
if !allowed.include?(options[:group_by]) && options[:only_group]
|
261
319
|
raise "--group-by #{allowed.join(" or ")} is required for --only-group"
|
262
320
|
end
|
263
321
|
|
322
|
+
if options[:specify_groups] && (options.keys & [:single_process, :isolate, :isolate_count]).any?
|
323
|
+
raise "Can't pass --specify-groups with any of these keys: --single, --isolate, or --isolate-n"
|
324
|
+
end
|
325
|
+
|
264
326
|
options
|
265
327
|
end
|
266
328
|
|
267
329
|
def extract_file_paths(argv)
|
268
330
|
dash_index = argv.rindex("--")
|
269
331
|
file_args_at = (dash_index || -1) + 1
|
270
|
-
[argv[file_args_at
|
332
|
+
[argv[file_args_at..], argv[0...(dash_index || 0)]]
|
271
333
|
end
|
272
334
|
|
273
335
|
def extract_test_options(argv)
|
274
336
|
dash_index = argv.index("--") || -1
|
275
|
-
argv[dash_index+1
|
337
|
+
argv[dash_index + 1..]
|
276
338
|
end
|
277
339
|
|
278
340
|
def append_test_options(options, argv)
|
279
341
|
new_opts = extract_test_options(argv)
|
280
342
|
return if new_opts.empty?
|
281
343
|
|
282
|
-
|
283
|
-
options[:test_options]
|
344
|
+
options[:test_options] ||= []
|
345
|
+
options[:test_options] += new_opts
|
284
346
|
end
|
285
347
|
|
286
348
|
def load_runner(type)
|
@@ -290,9 +352,9 @@ module ParallelTests
|
|
290
352
|
klass_name.split('::').inject(Object) { |x, y| x.const_get(y) }
|
291
353
|
end
|
292
354
|
|
293
|
-
def
|
355
|
+
def execute_command_in_parallel(command, num_processes, options)
|
294
356
|
runs = if options[:only_group]
|
295
|
-
options[:only_group].map{|g| g - 1}
|
357
|
+
options[:only_group].map { |g| g - 1 }
|
296
358
|
else
|
297
359
|
(0...num_processes).to_a
|
298
360
|
end
|
@@ -311,13 +373,13 @@ module ParallelTests
|
|
311
373
|
abort if results.any? { |r| r[:exit_status] != 0 }
|
312
374
|
end
|
313
375
|
|
314
|
-
def report_time_taken
|
315
|
-
seconds = ParallelTests.delta
|
376
|
+
def report_time_taken(&block)
|
377
|
+
seconds = ParallelTests.delta(&block).to_i
|
316
378
|
puts "\nTook #{seconds} seconds#{detailed_duration(seconds)}"
|
317
379
|
end
|
318
380
|
|
319
381
|
def detailed_duration(seconds)
|
320
|
-
parts = [
|
382
|
+
parts = [seconds / 3600, seconds % 3600 / 60, seconds % 60].drop_while(&:zero?)
|
321
383
|
return if parts.size < 2
|
322
384
|
parts = parts.map { |i| "%02d" % i }.join(':').sub(/^0/, '')
|
323
385
|
" (#{parts})"
|
@@ -342,7 +404,7 @@ module ParallelTests
|
|
342
404
|
def simulate_output_for_ci(simulate)
|
343
405
|
if simulate
|
344
406
|
progress_indicator = Thread.new do
|
345
|
-
interval = Float(ENV
|
407
|
+
interval = Float(ENV['PARALLEL_TEST_HEARTBEAT_INTERVAL'] || 60)
|
346
408
|
loop do
|
347
409
|
sleep interval
|
348
410
|
print '.'
|