parallel_tests 3.11.0 → 4.2.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 6dc1e8ae73baee388849755336a036d94026c85e097253397df2a47e7332b468
4
- data.tar.gz: a28ea801be1d14fa7ec47c5793ecfb835881b0d19268b0ac83621b3ab8ebafe8
3
+ metadata.gz: 1bcc6e9cd8a207f7a7ec8253139040265cf1c1962e5d25629bca37e5cd2b111e
4
+ data.tar.gz: 7db53c69e2048799c12504a6ce7b56b6b7b4833ef77fdc298ba50c61fe8ed742
5
5
  SHA512:
6
- metadata.gz: 0075ef8c1a7387397f21a7471f96b54bf6548e54a8d2724de47b50610991b95386ddb85c3ccf65b36611dd331fa713219832385ead8f336a8cc6cf9454b36d3e
7
- data.tar.gz: cf68aa64bb77bc69cb2e49f1c5e83a97e73fb221ed50e9e5f002dad7e87567d5790eee0efad605e57d503c033fa53b9978d2535090cc03ba14f15d28e11d2974
6
+ metadata.gz: f682ef1d3752cd3893e186879d905954456b51900ce0d4cfd2213f1c226d13fb38d1a3f251dc75bccb8da663273f59ef977e0aa92707f3ede49db381a53c40f5
7
+ data.tar.gz: 7a3bad92876b0b225f381b60b89e7049f84e9069a89b710749ecad0e6beed6a60952ed3d850dbe90621a731a974b7694b9182bb02a0f5e3e5292ccb2b6226614
data/Readme.md CHANGED
@@ -1,8 +1,7 @@
1
1
  # parallel_tests
2
2
 
3
3
  [![Gem Version](https://badge.fury.io/rb/parallel_tests.svg)](https://rubygems.org/gems/parallel_tests)
4
- [![Build Status](https://travis-ci.org/grosser/parallel_tests.svg)](https://travis-ci.org/grosser/parallel_tests/builds)
5
- [![Build status](https://github.com/grosser/parallel_tests/workflows/windows/badge.svg)](https://github.com/grosser/parallel_tests/actions?query=workflow%3Awindows)
4
+ [![Build status](https://github.com/grosser/parallel_tests/workflows/test/badge.svg)](https://github.com/grosser/parallel_tests/actions?query=workflow%3Atest)
6
5
 
7
6
  Speedup Test::Unit + RSpec + Cucumber + Spinach by running parallel on multiple CPU cores.<br/>
8
7
  ParallelTests splits tests into even groups (by number of lines or runtime) and runs each group in a single process with its own database.
@@ -403,6 +402,8 @@ inspired by [pivotal labs](https://blog.pivotal.io/labs/labs/parallelize-your-rs
403
402
  - [Joshua Pinter](https://github.com/joshuapinter)
404
403
  - [Zach Dennis](https://github.com/zdennis)
405
404
  - [Jon Dufresne](https://github.com/jdufresne)
405
+ - [Eric Kessler](https://github.com/enkessler)
406
+ - [Adis Osmonov](https://github.com/adis-io)
406
407
 
407
408
  [Michael Grosser](http://grosser.it)<br/>
408
409
  michael@grosser.it<br/>
@@ -32,9 +32,23 @@ module ParallelTests
32
32
  @graceful_shutdown_attempted ||= false
33
33
  Kernel.exit if @graceful_shutdown_attempted
34
34
 
35
- # The Pid class's synchronize method can't be called directly from a trap
36
- # Using Thread workaround https://github.com/ddollar/foreman/issues/332
37
- Thread.new { ParallelTests.stop_all_processes }
35
+ # In a shell, all sub-processes also get an interrupt, so they shut themselves down.
36
+ # In a background process this does not happen and we need to do it ourselves.
37
+ # We cannot always send the interrupt since then the sub-processes would get interrupted twice when in foreground
38
+ # and that messes with interrupt handling.
39
+ #
40
+ # (can simulate detached with `(bundle exec parallel_rspec test/a_spec.rb -n 2 &)`)
41
+ # also the integration test "passes on int signal to child processes" is detached.
42
+ #
43
+ # On windows getpgid does not work so we resort to always killing which is the smaller bug.
44
+ #
45
+ # The ParallelTests::Pids `synchronize` method can't be called directly from a trap,
46
+ # using Thread workaround https://github.com/ddollar/foreman/issues/332
47
+ Thread.new do
48
+ if Gem.win_platform? || ((child_pid = ParallelTests.pids.all.first) && Process.getpgid(child_pid) != Process.pid)
49
+ ParallelTests.stop_all_processes
50
+ end
51
+ end
38
52
 
39
53
  @graceful_shutdown_attempted = true
40
54
  end
@@ -61,20 +75,15 @@ module ParallelTests
61
75
  groups = @runner.tests_in_groups(options[:files], num_processes, options)
62
76
  groups.reject!(&:empty?)
63
77
 
64
- test_results = if options[:only_group]
65
- groups_to_run = options[:only_group].map { |i| groups[i - 1] }.compact
66
- report_number_of_tests(groups_to_run) unless options[:quiet]
67
- execute_in_parallel(groups_to_run, groups_to_run.size, options) do |group|
68
- run_tests(group, groups_to_run.index(group), 1, options)
69
- end
70
- else
71
- report_number_of_tests(groups) unless options[:quiet]
72
-
73
- execute_in_parallel(groups, groups.size, options) do |group|
74
- run_tests(group, groups.index(group), num_processes, options)
75
- end
78
+ if options[:only_group]
79
+ groups = options[:only_group].map { |i| groups[i - 1] }.compact
80
+ num_processes = 1
76
81
  end
77
82
 
83
+ report_number_of_tests(groups) unless options[:quiet]
84
+ test_results = execute_in_parallel(groups, groups.size, options) do |group|
85
+ run_tests(group, groups.index(group), num_processes, options)
86
+ end
78
87
  report_results(test_results, options) unless options[:quiet]
79
88
  end
80
89
 
@@ -136,12 +145,12 @@ module ParallelTests
136
145
  failing_sets = test_results.reject { |r| r[:exit_status] == 0 }
137
146
  return if failing_sets.none?
138
147
 
139
- if options[:verbose] || options[:verbose_rerun_command]
148
+ if options[:verbose] || options[:verbose_command]
140
149
  puts "\n\nTests have failed for a parallel_test group. Use the following command to run the group again:\n\n"
141
150
  failing_sets.each do |failing_set|
142
151
  command = failing_set[:command]
143
152
  command = @runner.command_with_seed(command, failing_set[:seed]) if failing_set[:seed]
144
- puts Shellwords.shelljoin(command)
153
+ @runner.print_command(command, failing_set[:env] || {})
145
154
  end
146
155
  end
147
156
  end
@@ -266,8 +275,7 @@ module ParallelTests
266
275
  opts.on("--first-is-1", "Use \"1\" as TEST_ENV_NUMBER to not reuse the default test environment") { options[:first_is_1] = true }
267
276
  opts.on("--fail-fast", "Stop all groups when one group fails (best used with --test-options '--fail-fast' if supported") { options[:fail_fast] = true }
268
277
  opts.on("--verbose", "Print debug output") { options[:verbose] = true }
269
- opts.on("--verbose-process-command", "Displays only the command that will be executed by each process") { options[:verbose_process_command] = true }
270
- opts.on("--verbose-rerun-command", "When there are failures, displays the command executed by each process that failed") { options[:verbose_rerun_command] = true }
278
+ opts.on("--verbose-command", "Displays the command that will be executed by each process and when there are failures displays the command executed by each process that failed") { options[:verbose_command] = true }
271
279
  opts.on("--quiet", "Print only tests output") { options[:quiet] = true }
272
280
  opts.on("-v", "--version", "Show Version") do
273
281
  puts ParallelTests::VERSION
@@ -321,12 +329,12 @@ module ParallelTests
321
329
  def extract_file_paths(argv)
322
330
  dash_index = argv.rindex("--")
323
331
  file_args_at = (dash_index || -1) + 1
324
- [argv[file_args_at..-1], argv[0...(dash_index || 0)]]
332
+ [argv[file_args_at..], argv[0...(dash_index || 0)]]
325
333
  end
326
334
 
327
335
  def extract_test_options(argv)
328
336
  dash_index = argv.index("--") || -1
329
- argv[dash_index + 1..-1]
337
+ argv[dash_index + 1..]
330
338
  end
331
339
 
332
340
  def append_test_options(options, argv)
@@ -396,7 +404,7 @@ module ParallelTests
396
404
  def simulate_output_for_ci(simulate)
397
405
  if simulate
398
406
  progress_indicator = Thread.new do
399
- interval = Float(ENV.fetch('PARALLEL_TEST_HEARTBEAT_INTERVAL', 60))
407
+ interval = Float(ENV['PARALLEL_TEST_HEARTBEAT_INTERVAL'] || 60)
400
408
  loop do
401
409
  sleep interval
402
410
  print '.'
@@ -27,7 +27,7 @@ module ParallelTests
27
27
  example_tags = example.tags.map(&:name)
28
28
  example_tags = scenario_tags + example_tags
29
29
  next unless matches_tags?(example_tags)
30
- example.rows[1..-1].each do |row|
30
+ example.rows[1..].each do |row|
31
31
  test_line = row.source_line
32
32
  next if line_numbers.any? && !line_numbers.include?(test_line)
33
33
 
@@ -52,7 +52,9 @@ module ParallelTests
52
52
  feature_tags = feature.tags.map(&:name)
53
53
 
54
54
  # We loop on each children of the feature
55
- feature.tests.each do |test|
55
+ test_models = feature.tests
56
+ test_models += feature.rules.flat_map(&:tests) if feature.respond_to?(:rules) # cuke_modeler >= 3.2 supports rules
57
+ test_models.each do |test|
56
58
  # It's a scenario, we add it to the scenario_line_logger
57
59
  scenario_line_logger.visit_feature_element(document.path, test, feature_tags, line_numbers: test_lines)
58
60
  end
@@ -57,7 +57,7 @@ module ParallelTests
57
57
  plural = "s" if (word == group) && (number != 1)
58
58
  "#{number} #{word}#{plural}"
59
59
  end
60
- "#{sums[0]} (#{sums[1..-1].join(", ")})"
60
+ "#{sums[0]} (#{sums[1..].join(", ")})"
61
61
  end.compact.join("\n")
62
62
  end
63
63
 
@@ -38,7 +38,7 @@ module ParallelTests
38
38
  # add all files that should run in a multiple isolated processes to their own groups
39
39
  group_features_by_size(items_to_group(single_items), groups[0..(isolate_count - 1)])
40
40
  # group the non-isolated by size
41
- group_features_by_size(items_to_group(items), groups[isolate_count..-1])
41
+ group_features_by_size(items_to_group(items), groups[isolate_count..])
42
42
  else
43
43
  # add all files that should run in a single non-isolated process to first group
44
44
  single_items.each { |item, size| add_to_group(groups.first, item, size) }
@@ -73,7 +73,7 @@ module ParallelTests
73
73
  []
74
74
  end
75
75
  if runtimes.size * 1.5 > tests.size
76
- puts "Using recorded test runtime"
76
+ puts "Using recorded test runtime" unless options[:quiet]
77
77
  sort_by_runtime(tests, runtimes)
78
78
  else
79
79
  sort_by_filesize(tests)
@@ -86,24 +86,32 @@ module ParallelTests
86
86
  end
87
87
 
88
88
  def execute_command(cmd, process_number, num_processes, options)
89
+ number = test_env_number(process_number, options).to_s
89
90
  env = (options[:env] || {}).merge(
90
- "TEST_ENV_NUMBER" => test_env_number(process_number, options).to_s,
91
+ "TEST_ENV_NUMBER" => number,
91
92
  "PARALLEL_TEST_GROUPS" => num_processes.to_s,
92
93
  "PARALLEL_PID_FILE" => ParallelTests.pid_file_path
93
94
  )
94
95
  cmd = ["nice", *cmd] if options[:nice]
95
96
 
96
- puts Shellwords.shelljoin(cmd) if report_process_command?(options) && !options[:serialize_stdout]
97
+ # being able to run with for example `-output foo-$TEST_ENV_NUMBER` worked originally and is convenient
98
+ cmd.map! { |c| c.gsub("$TEST_ENV_NUMBER", number).gsub("${TEST_ENV_NUMBER}", number) }
99
+
100
+ print_command(cmd, env) if report_process_command?(options) && !options[:serialize_stdout]
97
101
 
98
102
  execute_command_and_capture_output(env, cmd, options)
99
103
  end
100
104
 
101
- def execute_command_and_capture_output(env, cmd, options)
102
- pid = nil
105
+ def print_command(command, env)
106
+ env_str = ['TEST_ENV_NUMBER', 'PARALLEL_TEST_GROUPS'].map { |e| "#{e}=#{env[e]}" }.join(' ')
107
+ puts [env_str, Shellwords.shelljoin(command)].compact.join(' ')
108
+ end
103
109
 
104
- popen_options = {}
110
+ def execute_command_and_capture_output(env, cmd, options)
111
+ popen_options = {} # do not add `pgroup: true`, it will break `binding.irb` inside the test
105
112
  popen_options[:err] = [:child, :out] if options[:combine_stderr]
106
113
 
114
+ pid = nil
107
115
  output = IO.popen(env, cmd, popen_options) do |io|
108
116
  pid = io.pid
109
117
  ParallelTests.pids.add(pid)
@@ -115,7 +123,7 @@ module ParallelTests
115
123
 
116
124
  output = "#{Shellwords.shelljoin(cmd)}\n#{output}" if report_process_command?(options) && options[:serialize_stdout]
117
125
 
118
- { stdout: output, exit_status: exitstatus, command: cmd, seed: seed }
126
+ { env: env, stdout: output, exit_status: exitstatus, command: cmd, seed: seed }
119
127
  end
120
128
 
121
129
  def find_results(test_output)
@@ -149,8 +157,8 @@ module ParallelTests
149
157
  protected
150
158
 
151
159
  def executable
152
- if ENV.include?('PARALLEL_TESTS_EXECUTABLE')
153
- [ENV['PARALLEL_TESTS_EXECUTABLE']]
160
+ if (executable = ENV['PARALLEL_TESTS_EXECUTABLE'])
161
+ [executable]
154
162
  else
155
163
  determine_executable
156
164
  end
@@ -282,7 +290,7 @@ module ParallelTests
282
290
  end
283
291
 
284
292
  def report_process_command?(options)
285
- options[:verbose] || options[:verbose_process_command]
293
+ options[:verbose] || options[:verbose_command]
286
294
  end
287
295
  end
288
296
  end
@@ -1,4 +1,4 @@
1
1
  # frozen_string_literal: true
2
2
  module ParallelTests
3
- VERSION = '3.11.0'
3
+ VERSION = '4.2.0'
4
4
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: parallel_tests
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.11.0
4
+ version: 4.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Michael Grosser
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-05-27 00:00:00.000000000 Z
11
+ date: 2023-02-06 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: parallel
@@ -24,7 +24,7 @@ dependencies:
24
24
  - - ">="
25
25
  - !ruby/object:Gem::Version
26
26
  version: '0'
27
- description:
27
+ description:
28
28
  email: michael@grosser.it
29
29
  executables:
30
30
  - parallel_spinach
@@ -68,10 +68,10 @@ licenses:
68
68
  - MIT
69
69
  metadata:
70
70
  bug_tracker_uri: https://github.com/grosser/parallel_tests/issues
71
- documentation_uri: https://github.com/grosser/parallel_tests/blob/v3.11.0/Readme.md
72
- source_code_uri: https://github.com/grosser/parallel_tests/tree/v3.11.0
71
+ documentation_uri: https://github.com/grosser/parallel_tests/blob/v4.2.0/Readme.md
72
+ source_code_uri: https://github.com/grosser/parallel_tests/tree/v4.2.0
73
73
  wiki_uri: https://github.com/grosser/parallel_tests/wiki
74
- post_install_message:
74
+ post_install_message:
75
75
  rdoc_options: []
76
76
  require_paths:
77
77
  - lib
@@ -79,15 +79,15 @@ required_ruby_version: !ruby/object:Gem::Requirement
79
79
  requirements:
80
80
  - - ">="
81
81
  - !ruby/object:Gem::Version
82
- version: 2.5.0
82
+ version: 2.7.0
83
83
  required_rubygems_version: !ruby/object:Gem::Requirement
84
84
  requirements:
85
85
  - - ">="
86
86
  - !ruby/object:Gem::Version
87
87
  version: '0'
88
88
  requirements: []
89
- rubygems_version: 3.0.3.1
90
- signing_key:
89
+ rubygems_version: 3.3.3
90
+ signing_key:
91
91
  specification_version: 4
92
92
  summary: Run Test::Unit / RSpec / Cucumber / Spinach in parallel
93
93
  test_files: []