ci-queue 0.81.0 → 0.83.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. checksums.yaml +4 -4
  2. data/Gemfile.lock +1 -1
  3. data/README.md +109 -0
  4. data/lib/ci/queue/build_record.rb +22 -10
  5. data/lib/ci/queue/class_resolver.rb +38 -0
  6. data/lib/ci/queue/configuration.rb +62 -1
  7. data/lib/ci/queue/file_loader.rb +101 -0
  8. data/lib/ci/queue/queue_entry.rb +56 -0
  9. data/lib/ci/queue/redis/_entry_helpers.lua +10 -0
  10. data/lib/ci/queue/redis/acknowledge.lua +10 -7
  11. data/lib/ci/queue/redis/base.rb +34 -8
  12. data/lib/ci/queue/redis/build_record.rb +89 -22
  13. data/lib/ci/queue/redis/grind_record.rb +17 -13
  14. data/lib/ci/queue/redis/heartbeat.lua +9 -4
  15. data/lib/ci/queue/redis/monitor.rb +19 -5
  16. data/lib/ci/queue/redis/requeue.lua +19 -11
  17. data/lib/ci/queue/redis/reserve.lua +47 -8
  18. data/lib/ci/queue/redis/reserve_lost.lua +5 -1
  19. data/lib/ci/queue/redis/supervisor.rb +3 -3
  20. data/lib/ci/queue/redis/worker.rb +216 -23
  21. data/lib/ci/queue/redis.rb +0 -1
  22. data/lib/ci/queue/version.rb +1 -1
  23. data/lib/ci/queue.rb +27 -0
  24. data/lib/minitest/queue/build_status_recorder.rb +32 -14
  25. data/lib/minitest/queue/grind_recorder.rb +3 -3
  26. data/lib/minitest/queue/junit_reporter.rb +2 -2
  27. data/lib/minitest/queue/lazy_entry_resolver.rb +55 -0
  28. data/lib/minitest/queue/lazy_test_discovery.rb +169 -0
  29. data/lib/minitest/queue/local_requeue_reporter.rb +11 -0
  30. data/lib/minitest/queue/order_reporter.rb +9 -2
  31. data/lib/minitest/queue/queue_population_strategy.rb +176 -0
  32. data/lib/minitest/queue/runner.rb +117 -27
  33. data/lib/minitest/queue/test_data.rb +14 -1
  34. data/lib/minitest/queue/worker_profile_reporter.rb +77 -0
  35. data/lib/minitest/queue.rb +271 -6
  36. metadata +10 -3
  37. data/lib/ci/queue/redis/key_shortener.rb +0 -53
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 6e62b7e80a81a6ec61a80810479ba63d150b23fabbd89eb29ee0cd9328158fc9
4
- data.tar.gz: ba9f408cb20679dc11476aa6b0207c8ad5cd1922778ee075182d4390f7b90ec8
3
+ metadata.gz: '08152aacde705d2472151b351e690db68a0c067c8930bf9574e1835825ee225b'
4
+ data.tar.gz: 37fc5a7e9174d9188ff39568a0e4a5bf4f1ae0e3bce0f08e4d9d66e2a5a5264b
5
5
  SHA512:
6
- metadata.gz: 426464c8b7ad0a5c5d2652484a24510c522addd20dbbaf7651fe0cb5624226809a7c8013ee3dacf64e1c0580616d1428c6ef070306fa008a4dbf89eb825c59b0
7
- data.tar.gz: '08926dd910a43f73e118880f8929e34844a81502f4cb28f3e9c144f5e584de6db2c9eacc6a5e463f50f0a9fd5ec76ca0b2835880e4dd43339325ac580365a390'
6
+ metadata.gz: 0ca31505b3ca58115b632d1a23e7884e2bbc25ae2360f4228b08192705200303ddecd03220ff86ebb6c6c82b7e580f83f7d86e5f79d10d59cd6949c4b6947d42
7
+ data.tar.gz: f47440362656cc4ddf8ad52a9c49c9289b70f0a64312eb2c0c3f82e6bc9a980e6414a2c03ccb6613508975bf7b64e6b6232d1083fbbfc4c159b6eac0cb522e51
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- ci-queue (0.81.0)
4
+ ci-queue (0.83.0)
5
5
  logger
6
6
 
7
7
  GEM
data/README.md CHANGED
@@ -38,6 +38,93 @@ minitest-queue --queue redis://example.com run -Itest test/**/*_test.rb
38
38
 
39
39
  Additionally you can configure the requeue settings (see main README) with `--max-requeues` and `--requeue-tolerance`.
40
40
 
41
+ #### Lazy loading (opt-in)
42
+
43
+ By default, all test files are loaded upfront before any tests run. Lazy loading changes this
44
+ so that test files are loaded on-demand as each test is dequeued, reducing peak memory usage.
45
+ This is supported only by `minitest-queue` (not `rspec-queue`).
46
+
47
+ ```bash
48
+ minitest-queue --queue redis://example.com --lazy-load run -Itest test/**/*_test.rb
49
+ ```
50
+
51
+ The leader discovers tests from the provided files, streams them to Redis in batches, and
52
+ workers start running tests as soon as the first batch arrives. Each worker only loads the
53
+ test files it actually needs.
54
+
55
+ In lazy-load mode, test files are not loaded at startup. If your test suite requires a boot
56
+ file (e.g., `test/test_helper.rb` for Rails), specify it so all workers load it before
57
+ running tests.
58
+
59
+ **CLI flags:**
60
+
61
+ | Flag | Description |
62
+ |---|---|
63
+ | `--lazy-load` | Enable lazy loading mode |
64
+ | `--lazy-load-stream-batch-size SIZE` | Number of tests per batch streamed to Redis (default: 5000) |
65
+ | `--lazy-load-stream-timeout SECONDS` | Max time for the leader to finish streaming (default: 300s or `--queue-init-timeout`, whichever is larger) |
66
+ | `--test-files FILE` | Read test file paths from FILE (one per line) instead of positional args. Avoids ARG_MAX limits for large suites (36K+ files). |
67
+
68
+ **Environment variables:**
69
+
70
+ | Variable | Description |
71
+ |---|---|
72
+ | `CI_QUEUE_LAZY_LOAD=1` | Enable lazy loading (equivalent to `--lazy-load`) |
73
+ | `CI_QUEUE_LAZY_LOAD_STREAM_BATCH_SIZE=N` | Same as `--lazy-load-stream-batch-size` |
74
+ | `CI_QUEUE_LAZY_LOAD_STREAM_TIMEOUT=N` | Same as `--lazy-load-stream-timeout` |
75
+ | `CI_QUEUE_LAZY_LOAD_TEST_HELPERS=path` | Comma-separated list of helper files to load at startup on all workers (e.g., `test/test_helper.rb`). No CLI equivalent. |
76
+
77
+ Backward-compatible env var aliases: `CI_QUEUE_STREAM_BATCH_SIZE`, `CI_QUEUE_STREAM_TIMEOUT`, `CI_QUEUE_TEST_HELPERS`.
78
+
79
+ When `CI_QUEUE_DEBUG=1` is set, file loading stats are printed at the end of the run.
80
+
81
+ #### Preresolved test names (opt-in)
82
+
83
+ For large test suites, you can pre-compute the full list of test names on a stable branch
84
+ (e.g., `main`) and cache it. On feature branches, ci-queue reads test names from the cache
85
+ instead of loading all test files to discover them. This eliminates the upfront discovery
86
+ cost and implies lazy-load mode for all workers.
87
+
88
+ ```bash
89
+ minitest-queue --queue redis://example.com run \
90
+ --preresolved-tests test_names.txt \
91
+ -I. -Itest
92
+ ```
93
+
94
+ The file format is one test per line: `TestClass#method_name|path/to/test_file.rb`.
95
+ The pipe-delimited file path tells ci-queue which file to load when a worker picks up that test.
96
+ The leader streams entries directly to Redis without loading any test files.
97
+
98
+ **Reconciliation**: The cached test list may become stale when test files change between
99
+ the cache build and the branch build (methods added, removed, or renamed). To handle this,
100
+ pass `--test-files` with a list of changed test files. The leader will discard preresolved
101
+ entries for those files and re-discover their current test methods by loading them:
102
+
103
+ ```bash
104
+ minitest-queue --queue redis://example.com run \
105
+ --preresolved-tests cached_test_names.txt \
106
+ --test-files changed_test_files.txt \
107
+ -I. -Itest
108
+ ```
109
+
110
+ Note: `--test-files` serves double duty. In plain lazy-load mode it provides the list of
111
+ test files to discover. In preresolved mode it acts as the reconciliation set.
112
+
113
+ **Stale entry handling**: Even with reconciliation, some preresolved entries may refer to
114
+ test methods that no longer exist (e.g., a helper file changed the set of dynamically
115
+ generated methods). By default, these cause an error on the worker. To skip them gracefully
116
+ as `Minitest::Skip` instead, set:
117
+
118
+ | Variable | Description |
119
+ |---|---|
120
+ | `CI_QUEUE_SKIP_STALE_TESTS=1` | Report stale preresolved entries as skips instead of errors. No CLI equivalent. |
121
+
122
+ **CLI flags:**
123
+
124
+ | Flag | Description |
125
+ |---|---|
126
+ | `--preresolved-tests FILE` | Read pre-computed test names from FILE. Implies `--lazy-load`. No env var equivalent. |
127
+ | `--test-files FILE` | In preresolved mode: reconciliation set of changed files to re-discover. |
41
128
 
42
129
  If you'd like to centralize the error reporting you can do so with:
43
130
 
@@ -71,6 +158,28 @@ rspec-queue --queue redis://example.com --timeout 600 --report
71
158
 
72
159
  Because of how `ci-queue` executes the examples, `before(:all)` and `after(:all)` hooks are not supported. `rspec-queue` will explicitly reject them.
73
160
 
161
+ ## Releasing a New Version
162
+
163
+ After merging changes to `main`, follow these steps to release and propagate the update:
164
+
165
+ 1. **Bump the version** in `ruby/lib/ci/queue/version.rb`:
166
+
167
+ ```ruby
168
+ VERSION = '0.XX.0'
169
+ ```
170
+
171
+ 2. **Update `Gemfile.lock`** by running `bundle install` in the `ruby/` directory (or manually updating the version string in `Gemfile.lock` if native dependencies prevent `bundle install`).
172
+
173
+ 3. **Commit and merge** the version bump to `main`. ShipIt will automatically publish the gem to RubyGems.
174
+
175
+ 4. **Update dependent apps/zones**: Any application that depends on `ci-queue` (e.g. via its `Gemfile`) needs to pick up the new version by running:
176
+
177
+ ```bash
178
+ bundle update ci-queue
179
+ ```
180
+
181
+ This updates the app's `Gemfile.lock` to reference the new `ci-queue` version. Commit the updated `Gemfile.lock` and deploy.
182
+
74
183
  ## Custom Redis Expiry
75
184
 
76
185
  `ci-queue` expects the Redis server to have an [eviction policy](https://redis.io/docs/manual/eviction/#eviction-policies) of `allkeys-lru`.
@@ -18,18 +18,35 @@ module CI
18
18
  @queue.exhausted?
19
19
  end
20
20
 
21
- def record_error(id, payload, stats: nil)
21
+ def record_error(id, payload, stat_delta: nil)
22
22
  error_reports[id] = payload
23
- record_stats(stats)
23
+ true
24
24
  end
25
25
 
26
- def record_success(id, stats: nil, skip_flaky_record: false, acknowledge: true)
26
+ def record_success(id, skip_flaky_record: false, acknowledge: true)
27
27
  error_reports.delete(id)
28
- record_stats(stats)
28
+ true
29
+ end
30
+
31
+ def record_requeue(id)
32
+ true
33
+ end
34
+
35
+ def record_stats(builds_stats)
36
+ return unless builds_stats
37
+ stats.merge!(builds_stats)
38
+ end
39
+
40
+ def record_stats_delta(delta, pipeline: nil)
41
+ return if delta.nil? || delta.empty?
42
+ delta.each do |stat_name, value|
43
+ next unless value.is_a?(Numeric) || value.to_s.match?(/\A-?\d+\.?\d*\z/)
44
+ stats[stat_name] = (stats[stat_name] || 0).to_f + value.to_f
45
+ end
29
46
  end
30
47
 
31
48
  def fetch_stats(stat_names)
32
- stat_names.zip(stats.values_at(*stat_names).map(&:to_f))
49
+ stat_names.zip(stats.values_at(*stat_names).map(&:to_f)).to_h
33
50
  end
34
51
 
35
52
  def reset_stats(stat_names)
@@ -47,11 +64,6 @@ module CI
47
64
  private
48
65
 
49
66
  attr_reader :stats
50
-
51
- def record_stats(builds_stats)
52
- return unless builds_stats
53
- stats.merge!(builds_stats)
54
- end
55
67
  end
56
68
  end
57
69
  end
@@ -0,0 +1,38 @@
1
+ # frozen_string_literal: true
2
+
3
+ module CI
4
+ module Queue
5
+ module ClassResolver
6
+ def self.resolve(class_name, file_path: nil, loader: nil)
7
+ klass = try_direct_lookup(class_name)
8
+ return klass if klass
9
+
10
+ if file_path && loader
11
+ loader.load_file(file_path)
12
+ klass = try_direct_lookup(class_name)
13
+ return klass if klass
14
+ end
15
+
16
+ raise ClassNotFoundError, "Unable to resolve class #{class_name}"
17
+ end
18
+
19
+ def self.try_direct_lookup(class_name)
20
+ parts = class_name.sub(/\A::/, '').split('::')
21
+ current = Object
22
+
23
+ parts.each do |name|
24
+ return nil unless current.const_defined?(name, false)
25
+
26
+ current = current.const_get(name, false)
27
+ end
28
+
29
+ return nil unless current.is_a?(Class)
30
+
31
+ current
32
+ rescue NameError
33
+ nil
34
+ end
35
+ private_class_method :try_direct_lookup
36
+ end
37
+ end
38
+ end
@@ -6,12 +6,18 @@ module CI
6
6
  attr_accessor :requeue_tolerance, :namespace, :failing_test, :statsd_endpoint
7
7
  attr_accessor :max_test_duration, :max_test_duration_percentile, :track_test_duration
8
8
  attr_accessor :max_test_failed, :redis_ttl, :warnings_file, :debug_log, :max_missed_heartbeat_seconds
9
+ attr_accessor :lazy_load, :lazy_load_stream_batch_size
10
+ attr_writer :lazy_load_streaming_timeout
11
+ attr_accessor :lazy_load_test_helpers
12
+ attr_accessor :skip_stale_tests
9
13
  attr_reader :circuit_breakers
10
14
  attr_writer :seed, :build_id
11
15
  attr_writer :queue_init_timeout, :report_timeout, :inactive_workers_timeout
12
16
 
13
17
  class << self
14
18
  def from_env(env)
19
+ lazy_load_value = env['CI_QUEUE_LAZY_LOAD']
20
+ lazy_load = lazy_load_value && !lazy_load_value.strip.empty? && !%w(0 false).include?(lazy_load_value.strip.downcase)
15
21
  new(
16
22
  build_id: env['CIRCLE_BUILD_URL'] || env['BUILDKITE_BUILD_ID'] || env['TRAVIS_BUILD_ID'] || env['HEROKU_TEST_RUN_ID'] || env['SEMAPHORE_PIPELINE_ID'],
17
23
  worker_id: env['CIRCLE_NODE_INDEX'] || env['BUILDKITE_PARALLEL_JOB'] || env['CI_NODE_INDEX'] || env['SEMAPHORE_JOB_ID'],
@@ -22,6 +28,11 @@ module CI
22
28
  debug_log: env['CI_QUEUE_DEBUG_LOG'],
23
29
  max_requeues: env['CI_QUEUE_MAX_REQUEUES']&.to_i || 0,
24
30
  requeue_tolerance: env['CI_QUEUE_REQUEUE_TOLERANCE']&.to_f || 0,
31
+ lazy_load: lazy_load || false,
32
+ lazy_load_stream_batch_size: (env['CI_QUEUE_LAZY_LOAD_STREAM_BATCH_SIZE'] || env['CI_QUEUE_STREAM_BATCH_SIZE'])&.to_i,
33
+ lazy_load_streaming_timeout: (env['CI_QUEUE_LAZY_LOAD_STREAM_TIMEOUT'] || env['CI_QUEUE_STREAM_TIMEOUT'])&.to_i,
34
+ lazy_load_test_helpers: env['CI_QUEUE_LAZY_LOAD_TEST_HELPERS'] || env['CI_QUEUE_TEST_HELPERS'],
35
+ skip_stale_tests: %w(1 true).include?(env['CI_QUEUE_SKIP_STALE_TESTS']&.strip&.downcase),
25
36
  )
26
37
  end
27
38
 
@@ -46,7 +57,9 @@ module CI
46
57
  grind_count: nil, max_duration: nil, failure_file: nil, max_test_duration: nil,
47
58
  max_test_duration_percentile: 0.5, track_test_duration: false, max_test_failed: nil,
48
59
  queue_init_timeout: nil, redis_ttl: 8 * 60 * 60, report_timeout: nil, inactive_workers_timeout: nil,
49
- export_flaky_tests_file: nil, warnings_file: nil, debug_log: nil, max_missed_heartbeat_seconds: nil)
60
+ export_flaky_tests_file: nil, warnings_file: nil, debug_log: nil, max_missed_heartbeat_seconds: nil,
61
+ lazy_load: false, lazy_load_stream_batch_size: nil, lazy_load_streaming_timeout: nil, lazy_load_test_helpers: nil,
62
+ skip_stale_tests: false)
50
63
  @build_id = build_id
51
64
  @circuit_breakers = [CircuitBreaker::Disabled]
52
65
  @failure_file = failure_file
@@ -73,6 +86,17 @@ module CI
73
86
  @warnings_file = warnings_file
74
87
  @debug_log = debug_log
75
88
  @max_missed_heartbeat_seconds = max_missed_heartbeat_seconds
89
+ @lazy_load = lazy_load
90
+ @lazy_load_stream_batch_size = lazy_load_stream_batch_size || 5_000
91
+ @lazy_load_streaming_timeout = lazy_load_streaming_timeout
92
+ @lazy_load_test_helpers = lazy_load_test_helpers
93
+ @skip_stale_tests = skip_stale_tests
94
+ end
95
+
96
+ def lazy_load_test_helper_paths
97
+ return [] unless @lazy_load_test_helpers
98
+
99
+ @lazy_load_test_helpers.split(',').map(&:strip)
76
100
  end
77
101
 
78
102
  def queue_init_timeout
@@ -83,6 +107,43 @@ module CI
83
107
  @report_timeout || timeout
84
108
  end
85
109
 
110
+ def lazy_load_streaming_timeout
111
+ if @lazy_load_streaming_timeout && @lazy_load_streaming_timeout > 0
112
+ @lazy_load_streaming_timeout
113
+ else
114
+ [queue_init_timeout, 300].max
115
+ end
116
+ end
117
+
118
+ # Backward-compatible aliases for existing callers.
119
+ def stream_batch_size
120
+ lazy_load_stream_batch_size
121
+ end
122
+
123
+ def stream_batch_size=(value)
124
+ self.lazy_load_stream_batch_size = value
125
+ end
126
+
127
+ def streaming_timeout
128
+ lazy_load_streaming_timeout
129
+ end
130
+
131
+ def streaming_timeout=(value)
132
+ self.lazy_load_streaming_timeout = value
133
+ end
134
+
135
+ def test_helpers
136
+ lazy_load_test_helpers
137
+ end
138
+
139
+ def test_helpers=(value)
140
+ self.lazy_load_test_helpers = value
141
+ end
142
+
143
+ def test_helper_paths
144
+ lazy_load_test_helper_paths
145
+ end
146
+
86
147
  def inactive_workers_timeout
87
148
  @inactive_workers_timeout || timeout
88
149
  end
@@ -0,0 +1,101 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'set'
4
+
5
+ module CI
6
+ module Queue
7
+ class FileLoader
8
+ attr_reader :load_stats
9
+
10
+ def initialize
11
+ @loaded_files = Set.new
12
+ @failed_files = {}
13
+ @pid = Process.pid
14
+ @forked = false
15
+ @load_stats = {}
16
+ @loaded_features = nil
17
+ end
18
+
19
+ def load_file(file_path)
20
+ detect_fork!
21
+ expanded = ::File.expand_path(file_path)
22
+ return if @loaded_files.include?(expanded)
23
+
24
+ if (cached_error = @failed_files[expanded])
25
+ raise cached_error
26
+ end
27
+
28
+ start = Process.clock_gettime(Process::CLOCK_MONOTONIC)
29
+ error = nil
30
+
31
+ begin
32
+ required = with_warning_suppression { require expanded }
33
+ if should_force_load_after_fork?(required, expanded)
34
+ with_warning_suppression { load expanded }
35
+ end
36
+ rescue Exception => e
37
+ raise if e.is_a?(SignalException) || e.is_a?(SystemExit)
38
+ error = e
39
+ ensure
40
+ duration = Process.clock_gettime(Process::CLOCK_MONOTONIC) - start
41
+ @load_stats[expanded] = duration
42
+ end
43
+
44
+ if error
45
+ load_error = FileLoadError.new(file_path, error)
46
+ @failed_files[expanded] = load_error
47
+ raise load_error
48
+ end
49
+
50
+ remember_loaded_feature(expanded)
51
+ @loaded_files.add(expanded)
52
+ nil
53
+ end
54
+
55
+ def total_load_time
56
+ load_stats.values.sum
57
+ end
58
+
59
+ def slowest_files(limit = 10)
60
+ load_stats.sort_by { |_, duration| -duration }.take(limit)
61
+ end
62
+
63
+ private
64
+
65
+ def detect_fork!
66
+ return if @pid == Process.pid
67
+
68
+ @pid = Process.pid
69
+ @forked = true
70
+ @loaded_files.clear
71
+ @failed_files.clear
72
+ @load_stats.clear
73
+ @loaded_features = nil
74
+ end
75
+
76
+ def file_in_loaded_features?(file_path)
77
+ loaded_features.include?(::File.expand_path(file_path))
78
+ end
79
+
80
+ def loaded_features
81
+ @loaded_features ||= Set.new($LOADED_FEATURES.map { |loaded| ::File.expand_path(loaded) })
82
+ end
83
+
84
+ def remember_loaded_feature(file_path)
85
+ loaded_features.add(::File.expand_path(file_path))
86
+ end
87
+
88
+ def should_force_load_after_fork?(required, file_path)
89
+ @forked && !required && file_in_loaded_features?(file_path)
90
+ end
91
+
92
+ def with_warning_suppression
93
+ previous = $VERBOSE
94
+ $VERBOSE = nil
95
+ yield
96
+ ensure
97
+ $VERBOSE = previous
98
+ end
99
+ end
100
+ end
101
+ end
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'base64'
4
+ require 'json'
5
+
6
+ module CI
7
+ module Queue
8
+ module QueueEntry
9
+ DELIMITER = "\t"
10
+ LOAD_ERROR_PREFIX = '__ciq_load_error__:'.freeze
11
+
12
+ def self.test_id(entry)
13
+ pos = entry.index(DELIMITER)
14
+ pos ? entry[0, pos] : entry
15
+ end
16
+
17
+ def self.parse(entry)
18
+ return { test_id: entry, file_path: nil } unless entry.include?(DELIMITER)
19
+
20
+ test_id, file_path = entry.split(DELIMITER, 2)
21
+ file_path = nil if file_path == ""
22
+ { test_id: test_id, file_path: file_path }
23
+ end
24
+
25
+ def self.format(test_id, file_path)
26
+ return test_id if file_path.nil? || file_path == ""
27
+
28
+ "#{test_id}#{DELIMITER}#{file_path}"
29
+ end
30
+
31
+ def self.load_error_payload?(file_path)
32
+ file_path&.start_with?(LOAD_ERROR_PREFIX)
33
+ end
34
+
35
+ def self.encode_load_error(file_path, error)
36
+ original = error.respond_to?(:original_error) ? error.original_error : error
37
+ payload = {
38
+ 'file_path' => file_path,
39
+ 'error_class' => original.class.name,
40
+ 'error_message' => original.message,
41
+ 'backtrace' => original.backtrace,
42
+ }
43
+ "#{LOAD_ERROR_PREFIX}#{Base64.strict_encode64(JSON.dump(payload))}"
44
+ end
45
+
46
+ def self.decode_load_error(file_path)
47
+ return nil unless load_error_payload?(file_path)
48
+
49
+ encoded = file_path.sub(LOAD_ERROR_PREFIX, '')
50
+ JSON.parse(Base64.strict_decode64(encoded))
51
+ rescue ArgumentError, JSON::ParserError
52
+ nil
53
+ end
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,10 @@
1
+ -- AUTOGENERATED FILE DO NOT EDIT DIRECTLY
2
+ local function test_id_from_entry(value, delimiter)
3
+ if delimiter then
4
+ local pos = string.find(value, delimiter, 1, true)
5
+ if pos then
6
+ return string.sub(value, 1, pos - 1)
7
+ end
8
+ end
9
+ return value
10
+ end
@@ -3,16 +3,19 @@ local zset_key = KEYS[1]
3
3
  local processed_key = KEYS[2]
4
4
  local owners_key = KEYS[3]
5
5
  local error_reports_key = KEYS[4]
6
+ local requeued_by_key = KEYS[5]
6
7
 
7
- local test = ARGV[1]
8
- local error = ARGV[2]
9
- local ttl = ARGV[3]
10
- redis.call('zrem', zset_key, test)
11
- redis.call('hdel', owners_key, test) -- Doesn't matter if it was reclaimed by another workers
12
- local acknowledged = redis.call('sadd', processed_key, test) == 1
8
+ local entry = ARGV[1]
9
+ local test_id = ARGV[2]
10
+ local error = ARGV[3]
11
+ local ttl = ARGV[4]
12
+ redis.call('zrem', zset_key, entry)
13
+ redis.call('hdel', owners_key, entry) -- Doesn't matter if it was reclaimed by another workers
14
+ redis.call('hdel', requeued_by_key, entry)
15
+ local acknowledged = redis.call('sadd', processed_key, test_id) == 1
13
16
 
14
17
  if acknowledged and error ~= "" then
15
- redis.call('hset', error_reports_key, test, error)
18
+ redis.call('hset', error_reports_key, test_id, error)
16
19
  redis.call('expire', error_reports_key, ttl)
17
20
  end
18
21
 
@@ -144,19 +144,26 @@ module CI
144
144
  end
145
145
 
146
146
  def to_a
147
- test_ids.reverse.map { |k| index.fetch(k) }
147
+ test_ids.reverse.map do |entry|
148
+ index.fetch(entry) do
149
+ test_id = CI::Queue::QueueEntry.test_id(entry)
150
+ index.fetch(test_id)
151
+ end
152
+ end
148
153
  end
149
154
 
150
155
  def progress
151
- total - size
156
+ progress = total - size
157
+ progress < 0 ? 0 : progress
152
158
  end
153
159
 
154
- def wait_for_master(timeout: 30)
160
+ def wait_for_master(timeout: 30, allow_streaming: false)
155
161
  return true if master?
156
162
  return true if queue_initialized?
163
+ return true if allow_streaming && streaming?
157
164
 
158
165
  (timeout * 10 + 1).to_i.times do
159
- if queue_initialized?
166
+ if queue_initialized? || (allow_streaming && streaming?)
160
167
  return true
161
168
  else
162
169
  sleep 0.1
@@ -177,6 +184,10 @@ module CI
177
184
  end
178
185
  end
179
186
 
187
+ def streaming?
188
+ master_status == 'streaming'
189
+ end
190
+
180
191
  def queue_initializing?
181
192
  master_status == 'setup'
182
193
  end
@@ -214,7 +225,7 @@ module CI
214
225
  end
215
226
 
216
227
  def key(*args)
217
- KeyShortener.key(config.build_id, *args)
228
+ ['build', build_id, *args].join(':')
218
229
  end
219
230
 
220
231
  def build_id
@@ -235,18 +246,31 @@ module CI
235
246
  end
236
247
 
237
248
  def read_script(name)
238
- ::File.read(::File.join(CI::Queue::DEV_SCRIPTS_ROOT, "#{name}.lua"))
249
+ resolve_lua_includes(
250
+ ::File.read(::File.join(CI::Queue::DEV_SCRIPTS_ROOT, "#{name}.lua")),
251
+ CI::Queue::DEV_SCRIPTS_ROOT,
252
+ )
239
253
  rescue SystemCallError
240
- ::File.read(::File.join(CI::Queue::RELEASE_SCRIPTS_ROOT, "#{name}.lua"))
254
+ resolve_lua_includes(
255
+ ::File.read(::File.join(CI::Queue::RELEASE_SCRIPTS_ROOT, "#{name}.lua")),
256
+ CI::Queue::RELEASE_SCRIPTS_ROOT,
257
+ )
258
+ end
259
+
260
+ def resolve_lua_includes(script, root)
261
+ script.gsub(/^-- @include (\S+)$/) do
262
+ ::File.read(::File.join(root, "#{$1}.lua"))
263
+ end
241
264
  end
242
265
 
243
266
  class HeartbeatProcess
244
- def initialize(redis_url, zset_key, processed_key, owners_key, worker_queue_key)
267
+ def initialize(redis_url, zset_key, processed_key, owners_key, worker_queue_key, entry_delimiter:)
245
268
  @redis_url = redis_url
246
269
  @zset_key = zset_key
247
270
  @processed_key = processed_key
248
271
  @owners_key = owners_key
249
272
  @worker_queue_key = worker_queue_key
273
+ @entry_delimiter = entry_delimiter
250
274
  end
251
275
 
252
276
  def boot!
@@ -261,6 +285,7 @@ module CI
261
285
  @processed_key,
262
286
  @owners_key,
263
287
  @worker_queue_key,
288
+ @entry_delimiter,
264
289
  in: child_read,
265
290
  out: child_write,
266
291
  )
@@ -335,6 +360,7 @@ module CI
335
360
  key('processed'),
336
361
  key('owners'),
337
362
  key('worker', worker_id, 'queue'),
363
+ entry_delimiter: CI::Queue::QueueEntry::DELIMITER,
338
364
  )
339
365
  end
340
366