specwrk 0.7.1 → 0.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 690e5d97390d650309a664688c09c777308ad8a0e8c52ef853fd7893a23d73f3
4
- data.tar.gz: b9dfe358291cad4ea4e1a40bebc82305c9d98dc05f2620ee7268c2a60990dbc0
3
+ metadata.gz: 657cd8e611ea7ea6344dc5bd540892e69d801ea5afd410b1be2e7a6d3f73658e
4
+ data.tar.gz: c066775a6f57452cd55c49eec7604b8e264881ad085d7847ff176240a9ecbe05
5
5
  SHA512:
6
- metadata.gz: d5fe7fa3b2cdcf27b542a7a75a49168e7dba16f6c92982d77fd66a3d60776a12f4d5e500a3e34945d18913e7b3f4493906116bed47266f115ddffed29373995e
7
- data.tar.gz: b64f866c748808b8e7e5ba788c14a33fbd57f9bf5f8589761ec36fc3745c25cea95bdef291802137530a387577a816128a09dac34db6107133204efe51d34fee
6
+ metadata.gz: 076a41a98119ccfe0108c76c32341d8cbf6642da3f37f80276a61ed885fbca9ddaf31c597b8462ddeb608c67d7e8787f5cefd606cb4b977aeef715e9a5cdb0bb
7
+ data.tar.gz: 39891db9a0400196ddaba587e7014aa00e485c65ce37efc997998f854dd98d8cabfef39806657bec15e71c43a29743dda4081b35c2022a917a5503e4e161cea4
@@ -4,4 +4,4 @@ export THRUSTER_HTTP_PORT=${PORT:-5138}
4
4
  export THRUSTER_TARGET_PORT=3000
5
5
  export THRUSTER_HTTP_IDLE_TIMEOUT=${IDLE_TIMEOUT:-305}
6
6
 
7
- exec thrust puma --workers 0 --bind tcp://127.0.0.1:3000 --threads ${PUMA_THREADS:-1}
7
+ exec thrust puma --workers 0 --bind tcp://127.0.0.1:3000 --threads ${PUMA_THREADS:-1} --workers ${PUMA_WORKERS:-0}
@@ -0,0 +1,180 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "json"
4
+ require "base64"
5
+
6
+ require "specwrk/store"
7
+
8
+ module Specwrk
9
+ class Store
10
+ class FileAdapter
11
+ EXT = ".wrk.json"
12
+
13
+ THREAD_POOL = Class.new do
14
+ @work_queue = Queue.new
15
+
16
+ @threads = Array.new(ENV.fetch("SPECWRK_SRV_FILE_ADAPTER_THREAD_COUNT", "4").to_i) do
17
+ Thread.new do
18
+ loop do
19
+ @work_queue.pop.call
20
+ end
21
+ end
22
+ end
23
+
24
+ class << self
25
+ def schedule(&blk)
26
+ @work_queue.push blk
27
+ end
28
+ end
29
+ end
30
+
31
+ def initialize(path)
32
+ @path = path
33
+ FileUtils.mkdir_p(@path)
34
+ end
35
+
36
+ def [](key)
37
+ content = read(key.to_s)
38
+ return unless content
39
+
40
+ JSON.parse(content, symbolize_names: true)
41
+ end
42
+
43
+ def []=(key, value)
44
+ key_string = key.to_s
45
+ if value.nil?
46
+ delete(key_string)
47
+ else
48
+ filename = filename_for_key(key_string)
49
+ write(filename, JSON.generate(value))
50
+ known_key_pairs[key_string] = filename
51
+ end
52
+ end
53
+
54
+ def keys
55
+ known_key_pairs.keys
56
+ end
57
+
58
+ def clear
59
+ FileUtils.rm_rf(@path)
60
+ FileUtils.mkdir_p(@path)
61
+
62
+ @known_key_pairs = nil
63
+ end
64
+
65
+ def delete(*keys)
66
+ filenames = keys.map { |key| known_key_pairs[key] }.compact
67
+
68
+ FileUtils.rm_f(filenames)
69
+
70
+ keys.each { |key| known_key_pairs.delete(key) }
71
+ end
72
+
73
+ def merge!(h2)
74
+ multi_write(h2)
75
+ end
76
+
77
+ def multi_read(*read_keys)
78
+ known_key_pairs # precache before each thread tries to look them up
79
+
80
+ result_queue = Queue.new
81
+
82
+ read_keys.each do |key|
83
+ THREAD_POOL.schedule do
84
+ result_queue.push([key.to_s, read(key)])
85
+ end
86
+ end
87
+
88
+ Thread.pass until result_queue.length == read_keys.length
89
+
90
+ results = {}
91
+ until result_queue.empty?
92
+ result = result_queue.pop
93
+ next if result.last.nil?
94
+
95
+ results[result.first] = JSON.parse(result.last, symbolize_names: true)
96
+ end
97
+
98
+ read_keys.map { |key| [key.to_s, results[key.to_s]] if results.key?(key.to_s) }.compact.to_h # respect order requested in the returned hash
99
+ end
100
+
101
+ def multi_write(hash)
102
+ known_key_pairs # precache before each thread tries to look them up
103
+
104
+ result_queue = Queue.new
105
+
106
+ hash_with_filenames = hash.map { |key, value| [key.to_s, [filename_for_key(key.to_s), value]] }.to_h
107
+ hash_with_filenames.each do |key, (filename, value)|
108
+ content = JSON.generate(value)
109
+
110
+ THREAD_POOL.schedule do
111
+ result_queue << write(filename, content)
112
+ end
113
+ end
114
+
115
+ Thread.pass until result_queue.length == hash.length
116
+ hash_with_filenames.each { |key, (filename, _value)| known_key_pairs[key] = filename }
117
+ end
118
+
119
+ def empty?
120
+ Dir.empty? @path
121
+ end
122
+
123
+ private
124
+
125
+ def write(filename, content)
126
+ tmp_filename = [filename, "tmp"].join(".")
127
+
128
+ File.binwrite(tmp_filename, content)
129
+
130
+ FileUtils.mv tmp_filename, filename
131
+ true
132
+ end
133
+
134
+ def read(key)
135
+ File.read(known_key_pairs[key]) if known_key_pairs.key? key
136
+ end
137
+
138
+ def filename_for_key(key)
139
+ File.join(
140
+ @path,
141
+ [
142
+ counter_prefix(key),
143
+ encode_key(key)
144
+ ].join("_")
145
+ ) + EXT
146
+ end
147
+
148
+ def counter_prefix(key)
149
+ count = keys.index(key) || counter.tap { @counter += 1 }
150
+
151
+ "%012d" % count
152
+ end
153
+
154
+ def counter
155
+ @counter ||= keys.length
156
+ end
157
+
158
+ def encode_key(key)
159
+ Base64.urlsafe_encode64(key).delete("=")
160
+ end
161
+
162
+ def decode_key(key)
163
+ encoded_key_part = File.basename(key).delete_suffix(EXT).split(/\A\d+_/).last
164
+ padding_count = (4 - encoded_key_part.length % 4) % 4
165
+
166
+ Base64.urlsafe_decode64(encoded_key_part + ("=" * padding_count))
167
+ end
168
+
169
+ def known_key_pairs
170
+ @known_key_pairs ||= Dir.entries(@path).sort.map do |filename|
171
+ next if filename.start_with? "."
172
+ next unless filename.end_with? EXT
173
+
174
+ file_path = File.join(@path, filename)
175
+ [decode_key(filename), file_path]
176
+ end.compact.to_h
177
+ end
178
+ end
179
+ end
180
+ end
@@ -0,0 +1,241 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "time"
4
+ require "json"
5
+
6
+ require "specwrk/store/file_adapter"
7
+
8
+ module Specwrk
9
+ class Store
10
+ MUTEXES = {}
11
+ MUTEXES_MUTEX = Mutex.new # 🐢🐢🐢🐢
12
+
13
+ class << self
14
+ def mutex_for(path)
15
+ MUTEXES_MUTEX.synchronize do
16
+ MUTEXES[path] ||= Mutex.new
17
+ end
18
+ end
19
+ end
20
+
21
+ def initialize(path, thread_safe_reads: true)
22
+ @path = path
23
+ @thread_safe_reads = thread_safe_reads
24
+ end
25
+
26
+ def [](key)
27
+ sync(thread_safe: thread_safe_reads) { adapter[key.to_s] }
28
+ end
29
+
30
+ def multi_read(*keys)
31
+ sync(thread_safe: thread_safe_reads) { adapter.multi_read(*keys) }
32
+ end
33
+
34
+ def []=(key, value)
35
+ sync do
36
+ adapter[key.to_s] = value
37
+ end
38
+ end
39
+
40
+ def keys
41
+ all_keys = sync(thread_safe: thread_safe_reads) do
42
+ adapter.keys
43
+ end
44
+
45
+ all_keys.reject { |k| k.start_with? "____" }
46
+ end
47
+
48
+ def length
49
+ keys.length
50
+ end
51
+
52
+ def any?
53
+ !empty?
54
+ end
55
+
56
+ def empty?
57
+ sync(thread_safe: thread_safe_reads) do
58
+ adapter.empty?
59
+ end
60
+ end
61
+
62
+ def delete(*keys)
63
+ sync { adapter.delete(*keys) }
64
+ end
65
+
66
+ def merge!(h2)
67
+ h2.transform_keys!(&:to_s)
68
+ sync { adapter.merge!(h2) }
69
+ end
70
+
71
+ def clear
72
+ sync { adapter.clear }
73
+ end
74
+
75
+ def to_h
76
+ sync(thread_safe: thread_safe_reads) do
77
+ adapter.multi_read(*keys).transform_keys!(&:to_sym)
78
+ end
79
+ end
80
+
81
+ def inspect
82
+ reload.to_h.dup
83
+ end
84
+
85
+ # Bypass any cached values. Helpful when you have two instances
86
+ # of the same store where one mutates data and the other needs to check
87
+ # on the status of that data (i.e. endpoint tests)
88
+ def reload
89
+ @adapter = nil
90
+ self
91
+ end
92
+
93
+ private
94
+
95
+ attr_reader :thread_safe_reads
96
+
97
+ def sync(thread_safe: true)
98
+ if !thread_safe || mutex.owned?
99
+ yield
100
+ else
101
+ mutex.synchronize { yield }
102
+ end
103
+ end
104
+
105
+ def adapter
106
+ @adapter ||= FileAdapter.new(@path)
107
+ end
108
+
109
+ def mutex
110
+ @mutex ||= self.class.mutex_for(@path)
111
+ end
112
+ end
113
+
114
+ class PendingStore < Store
115
+ RUN_TIME_BUCKET_MAXIMUM_KEY = :____run_time_bucket_maximum
116
+
117
+ def run_time_bucket_maximum=(val)
118
+ @run_time_bucket_maximum = self[RUN_TIME_BUCKET_MAXIMUM_KEY] = val
119
+ end
120
+
121
+ def run_time_bucket_maximum
122
+ @run_time_bucket_maximum ||= self[RUN_TIME_BUCKET_MAXIMUM_KEY]
123
+ end
124
+
125
+ def shift_bucket
126
+ sync do
127
+ return bucket_by_file unless run_time_bucket_maximum&.positive?
128
+
129
+ case ENV["SPECWRK_SRV_GROUP_BY"]
130
+ when "file"
131
+ bucket_by_file
132
+ else
133
+ bucket_by_timings
134
+ end
135
+ end
136
+ end
137
+
138
+ private
139
+
140
+ # Take elements from the hash where the file_path is the same
141
+ # Expects that the examples were merged in order of filename
142
+ def bucket_by_file
143
+ bucket = []
144
+ consumed_keys = []
145
+
146
+ all_keys = keys
147
+ key = all_keys.first
148
+ return [] if key.nil?
149
+
150
+ file_path = self[key][:file_path]
151
+
152
+ catch(:full) do
153
+ all_keys.each_slice(24).each do |key_group|
154
+ examples = multi_read(*key_group)
155
+
156
+ examples.each do |key, example|
157
+ throw :full unless example[:file_path] == file_path
158
+
159
+ bucket << example
160
+ consumed_keys << key
161
+ end
162
+ end
163
+ end
164
+
165
+ delete(*consumed_keys)
166
+ bucket
167
+ end
168
+
169
+ # Take elements from the hash until the average runtime bucket has filled
170
+ def bucket_by_timings
171
+ bucket = []
172
+ consumed_keys = []
173
+
174
+ estimated_run_time_total = 0
175
+
176
+ catch(:full) do
177
+ keys.each_slice(25).each do |key_group|
178
+ examples = multi_read(*key_group)
179
+
180
+ examples.each do |key, example|
181
+ estimated_run_time_total += example[:expected_run_time] || run_time_bucket_maximum
182
+ throw :full if estimated_run_time_total > run_time_bucket_maximum && bucket.length.positive?
183
+
184
+ bucket << example
185
+ consumed_keys << key
186
+ end
187
+ end
188
+ end
189
+
190
+ delete(*consumed_keys)
191
+ bucket
192
+ end
193
+ end
194
+
195
+ class CompletedStore < Store
196
+ def dump
197
+ @run_times = []
198
+ @first_started_at = Time.new(2999, 1, 1, 0, 0, 0) # TODO: Make future proof /s
199
+ @last_finished_at = Time.new(1900, 1, 1, 0, 0, 0)
200
+
201
+ @output = {
202
+ file_totals: Hash.new { |h, filename| h[filename] = 0.0 },
203
+ meta: {failures: 0, passes: 0, pending: 0},
204
+ examples: {}
205
+ }
206
+
207
+ to_h.values.each { |example| calculate(example) }
208
+
209
+ @output[:meta][:total_run_time] = @run_times.sum
210
+ @output[:meta][:average_run_time] = @output[:meta][:total_run_time] / [@run_times.length, 1].max.to_f
211
+ @output[:meta][:first_started_at] = @first_started_at.iso8601(6)
212
+ @output[:meta][:last_finished_at] = @last_finished_at.iso8601(6)
213
+
214
+ @output
215
+ end
216
+
217
+ private
218
+
219
+ def calculate(example)
220
+ @run_times << example[:run_time]
221
+ @output[:file_totals][example[:file_path]] += example[:run_time]
222
+
223
+ started_at = Time.parse(example[:started_at])
224
+ finished_at = Time.parse(example[:finished_at])
225
+
226
+ @first_started_at = started_at if started_at < @first_started_at
227
+ @last_finished_at = finished_at if finished_at > @last_finished_at
228
+
229
+ case example[:status]
230
+ when "passed"
231
+ @output[:meta][:passes] += 1
232
+ when "failed"
233
+ @output[:meta][:failures] += 1
234
+ when "pending"
235
+ @output[:meta][:pending] += 1
236
+ end
237
+
238
+ @output[:examples][example[:id]] = example
239
+ end
240
+ end
241
+ end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Specwrk
4
- VERSION = "0.7.1"
4
+ VERSION = "0.9.1"
5
5
  end
@@ -2,41 +2,38 @@
2
2
 
3
3
  require "json"
4
4
 
5
+ require "specwrk/store"
6
+
5
7
  module Specwrk
6
8
  class Web
7
9
  module Endpoints
8
10
  class Base
9
- attr_reader :pending_queue, :processing_queue, :completed_queue, :workers, :started_at
11
+ attr_reader :started_at
10
12
 
11
13
  def initialize(request)
12
14
  @request = request
13
15
  end
14
16
 
15
17
  def response
18
+ before_lock
19
+
16
20
  return with_response unless run_id # No run_id, no datastore usage in the endpoint
17
21
 
18
- datastore.with_lock do |db|
19
- @started_at = if db[:started_at]
20
- Time.parse(db[:started_at])
21
- else
22
- db[:started_at] = Time.now
23
- end
22
+ payload # parse the payload before any locking
24
23
 
25
- @pending_queue = PendingQueue.new.merge!(db[:pending] || {})
26
- @processing_queue = Queue.new.merge!(db[:processing] || {})
27
- @completed_queue = CompletedQueue.new.merge!(db[:completed] || {})
28
- @workers = db[:workers] ||= {}
24
+ worker[:first_seen_at] ||= Time.now.iso8601
25
+ worker[:last_seen_at] = Time.now.iso8601
29
26
 
30
- worker[:first_seen_at] ||= Time.now
31
- worker[:last_seen_at] = Time.now
27
+ final_response = with_lock do
28
+ started_at = metadata[:started_at] ||= Time.now.iso8601
29
+ @started_at = Time.parse(started_at)
32
30
 
33
- with_response.tap do
34
- db[:pending] = pending_queue.to_h
35
- db[:processing] = processing_queue.to_h
36
- db[:completed] = completed_queue.to_h
37
- db[:workers] = workers.to_h
38
- end
31
+ with_response
39
32
  end
33
+
34
+ after_lock
35
+
36
+ final_response
40
37
  end
41
38
 
42
39
  def with_response
@@ -47,6 +44,12 @@ module Specwrk
47
44
 
48
45
  attr_reader :request
49
46
 
47
+ def before_lock
48
+ end
49
+
50
+ def after_lock
51
+ end
52
+
50
53
  def not_found
51
54
  [404, {"Content-Type" => "text/plain"}, ["This is not the path you're looking for, 'ol chap..."]]
52
55
  end
@@ -56,6 +59,10 @@ module Specwrk
56
59
  end
57
60
 
58
61
  def payload
62
+ return unless request.content_type&.start_with?("application/json")
63
+ return unless request.post? || request.put? || request.delete?
64
+ return if body.empty?
65
+
59
66
  @payload ||= JSON.parse(body, symbolize_names: true)
60
67
  end
61
68
 
@@ -63,8 +70,28 @@ module Specwrk
63
70
  @body ||= request.body.read
64
71
  end
65
72
 
73
+ def pending
74
+ @pending ||= PendingStore.new(File.join(datastore_path, "pending"))
75
+ end
76
+
77
+ def processing
78
+ @processing ||= Store.new(File.join(datastore_path, "processing"))
79
+ end
80
+
81
+ def completed
82
+ @completed ||= CompletedStore.new(File.join(datastore_path, "completed"))
83
+ end
84
+
85
+ def metadata
86
+ @metadata ||= Store.new(File.join(datastore_path, "metadata"), thread_safe_reads: false)
87
+ end
88
+
89
+ def run_times
90
+ @run_times ||= Store.new(File.join(ENV["SPECWRK_OUT"], "run_times"), thread_safe_reads: false)
91
+ end
92
+
66
93
  def worker
67
- workers[request.get_header("HTTP_X_SPECWRK_ID")] ||= {}
94
+ @worker ||= Store.new(File.join(datastore_path, "workers", request.get_header("HTTP_X_SPECWRK_ID").to_s))
68
95
  end
69
96
 
70
97
  def run_id
@@ -72,11 +99,24 @@ module Specwrk
72
99
  end
73
100
 
74
101
  def run_report_file_path
75
- @run_report_file_path ||= File.join(ENV["SPECWRK_OUT"], run_id, "#{started_at.strftime("%Y%m%dT%H%M%S")}-report.json").to_s
102
+ @run_report_file_path ||= File.join(datastore_path, "#{started_at.strftime("%Y%m%dT%H%M%S")}-report.json").to_s
76
103
  end
77
104
 
78
- def datastore
79
- Web.datastore[File.join(ENV["SPECWRK_OUT"], run_id, "queues.json").to_s]
105
+ def datastore_path
106
+ @datastore_path ||= File.join(ENV["SPECWRK_OUT"], run_id).to_s.tap do |path|
107
+ FileUtils.mkdir_p(path) unless File.directory?(path)
108
+ end
109
+ end
110
+
111
+ def with_lock
112
+ Thread.pass until lock_file.flock(File::LOCK_EX)
113
+ yield
114
+ ensure
115
+ lock_file.flock(File::LOCK_UN)
116
+ end
117
+
118
+ def lock_file
119
+ @lock_file ||= File.open(File.join(datastore_path, "lock"), "a")
80
120
  end
81
121
  end
82
122
 
@@ -96,45 +136,109 @@ module Specwrk
96
136
  end
97
137
 
98
138
  class Seed < Base
139
+ def before_lock
140
+ examples_with_run_times if persist_seeds?
141
+ end
142
+
99
143
  def with_response
100
- if ENV["SPECWRK_SRV_SINGLE_SEED_PER_RUN"].nil? || pending_queue.length.zero?
101
- examples = payload.map { |hash| [hash[:id], hash] }.to_h
102
- pending_queue.merge_with_previous_run_times!(examples)
144
+ if persist_seeds?
145
+ new_run_time_bucket_maximums = [pending.run_time_bucket_maximum, @seeds_run_time_bucket_maximum.to_f].compact
146
+ pending.run_time_bucket_maximum = new_run_time_bucket_maximums.sum.to_f / new_run_time_bucket_maximums.length.to_f
147
+
148
+ pending.merge!(examples_with_run_times)
103
149
  end
104
150
 
151
+ processing.clear
152
+ completed.clear
153
+
105
154
  ok
106
155
  end
107
- end
108
156
 
109
- class Complete < Base
110
- def with_response
111
- payload.each do |example|
112
- next unless processing_queue.delete(example[:id].to_sym)
113
- completed_queue[example[:id].to_sym] = example
157
+ def examples_with_run_times
158
+ @examples_with_run_times ||= begin
159
+ unsorted_examples_with_run_times = []
160
+ all_ids = payload.map { |example| example[:id] }
161
+ all_run_times = run_times.multi_read(*all_ids)
162
+
163
+ payload.each do |example|
164
+ run_time = all_run_times[example[:id]]
165
+
166
+ unsorted_examples_with_run_times << [example[:id], example.merge(expected_run_time: run_time)]
167
+ end
168
+
169
+ sorted_examples_with_run_times = if sort_by == :timings
170
+ unsorted_examples_with_run_times.sort_by do |entry|
171
+ -(entry.last[:expected_run_time] || Float::INFINITY)
172
+ end
173
+ else
174
+ unsorted_examples_with_run_times.sort_by do |entry|
175
+ entry.last[:file_path]
176
+ end
177
+ end
178
+
179
+ @seeds_run_time_bucket_maximum = run_time_bucket_maximum(all_run_times.values.compact)
180
+ @examples_with_run_times = sorted_examples_with_run_times.to_h
114
181
  end
182
+ end
183
+
184
+ private
185
+
186
+ # Average + standard deviation
187
+ def run_time_bucket_maximum(values)
188
+ return 0 if values.length.zero?
189
+
190
+ mean = values.sum.to_f / values.size
191
+ variance = values.map { |v| (v - mean)**2 }.sum / values.size
192
+ (mean + Math.sqrt(variance)).round(2)
193
+ end
115
194
 
116
- if pending_queue.length.zero? && processing_queue.length.zero? && completed_queue.length.positive? && ENV["SPECWRK_OUT"]
117
- completed_queue.dump_and_write(run_report_file_path)
118
- FileUtils.ln_sf(run_report_file_path, File.join(ENV["SPECWRK_OUT"], "report.json"))
195
+ def persist_seeds?
196
+ ENV["SPECWRK_SRV_SINGLE_SEED_PER_RUN"].nil? || pending.empty?
197
+ end
198
+
199
+ def sort_by
200
+ if ENV["SPECWRK_SRV_GROUP_BY"] == "file" || run_times.empty?
201
+ :file
202
+ else
203
+ :timings
119
204
  end
205
+ end
206
+ end
207
+
208
+ class Complete < Base
209
+ def with_response
210
+ completed.merge!(completed_examples)
211
+ processing.delete(*completed_examples.keys)
120
212
 
121
213
  ok
122
214
  end
215
+
216
+ private
217
+
218
+ def completed_examples
219
+ @completed_data ||= payload.map { |example| [example[:id], example] if processing[example[:id]] }.compact.to_h
220
+ end
221
+
222
+ # We don't care about exact values here, just approximate run times are fine
223
+ # So if we overwrite run times from another process it is nbd
224
+ def after_lock
225
+ run_time_data = payload.map { |example| [example[:id], example[:run_time]] }.to_h
226
+ run_times.merge! run_time_data
227
+ end
123
228
  end
124
229
 
125
230
  class Pop < Base
126
231
  def with_response
127
- @examples = pending_queue.shift_bucket
232
+ @examples = pending.shift_bucket
128
233
 
129
- @examples.each do |example|
130
- processing_queue[example[:id]] = example
131
- end
234
+ processing_data = @examples.map { |example| [example[:id], example] }.to_h
235
+ processing.merge!(processing_data)
132
236
 
133
- if @examples.length.positive?
237
+ if @examples.any?
134
238
  [200, {"Content-Type" => "application/json"}, [JSON.generate(@examples)]]
135
- elsif pending_queue.length.zero? && processing_queue.length.zero? && completed_queue.length.zero?
239
+ elsif pending.empty? && processing.empty? && completed.empty?
136
240
  [204, {"Content-Type" => "text/plain"}, ["Waiting for sample to be seeded."]]
137
- elsif completed_queue.length.positive? && processing_queue.length.zero?
241
+ elsif completed.any? && processing.empty?
138
242
  [410, {"Content-Type" => "text/plain"}, ["That's a good lad. Run along now and go home."]]
139
243
  else
140
244
  not_found
@@ -144,34 +248,15 @@ module Specwrk
144
248
 
145
249
  class Report < Base
146
250
  def with_response
147
- if data
148
- [200, {"Content-Type" => "application/json"}, [data]]
149
- else
150
- [404, {"Content-Type" => "text/plain"}, ["Unable to report on run #{run_id}; no file matching #{"*-report-#{run_id}.json"}"]]
151
- end
152
- end
153
-
154
- private
155
-
156
- def data
157
- return @data if defined? @data
158
-
159
- return unless most_recent_run_report_file
160
- return unless File.exist?(most_recent_run_report_file)
161
-
162
- @data = File.open(most_recent_run_report_file, "r") do |file|
163
- file.flock(File::LOCK_SH)
164
- file.read
165
- end
166
- end
167
-
168
- def most_recent_run_report_file
169
- @most_recent_run_report_file ||= Dir.glob(File.join(ENV["SPECWRK_OUT"], run_id, "*-report.json")).last
251
+ [200, {"Content-Type" => "application/json"}, [JSON.generate(completed.dump)]]
170
252
  end
171
253
  end
172
254
 
173
255
  class Shutdown < Base
174
256
  def with_response
257
+ pending.clear
258
+ processing.clear
259
+
175
260
  interupt! if ENV["SPECWRK_SRV_SINGLE_RUN"]
176
261
 
177
262
  [200, {"Content-Type" => "text/plain"}, ["✌️"]]
data/lib/specwrk/web.rb CHANGED
@@ -1,14 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "specwrk/queue"
4
- require "specwrk/filestore"
5
-
6
3
  module Specwrk
7
4
  class Web
8
- class << self
9
- def datastore
10
- Filestore
11
- end
12
- end
13
5
  end
14
6
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: specwrk
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.7.1
4
+ version: 0.9.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Daniel Westendorf
@@ -9,6 +9,34 @@ bindir: exe
9
9
  cert_chain: []
10
10
  date: 1980-01-02 00:00:00.000000000 Z
11
11
  dependencies:
12
+ - !ruby/object:Gem::Dependency
13
+ name: json
14
+ requirement: !ruby/object:Gem::Requirement
15
+ requirements:
16
+ - - ">="
17
+ - !ruby/object:Gem::Version
18
+ version: '0'
19
+ type: :runtime
20
+ prerelease: false
21
+ version_requirements: !ruby/object:Gem::Requirement
22
+ requirements:
23
+ - - ">="
24
+ - !ruby/object:Gem::Version
25
+ version: '0'
26
+ - !ruby/object:Gem::Dependency
27
+ name: base64
28
+ requirement: !ruby/object:Gem::Requirement
29
+ requirements:
30
+ - - ">="
31
+ - !ruby/object:Gem::Version
32
+ version: '0'
33
+ type: :runtime
34
+ prerelease: false
35
+ version_requirements: !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - ">="
38
+ - !ruby/object:Gem::Version
39
+ version: '0'
12
40
  - !ruby/object:Gem::Dependency
13
41
  name: dry-cli
14
42
  requirement: !ruby/object:Gem::Requirement
@@ -160,10 +188,10 @@ files:
160
188
  - lib/specwrk/cli.rb
161
189
  - lib/specwrk/cli_reporter.rb
162
190
  - lib/specwrk/client.rb
163
- - lib/specwrk/filestore.rb
164
191
  - lib/specwrk/hookable.rb
165
192
  - lib/specwrk/list_examples.rb
166
- - lib/specwrk/queue.rb
193
+ - lib/specwrk/store.rb
194
+ - lib/specwrk/store/file_adapter.rb
167
195
  - lib/specwrk/version.rb
168
196
  - lib/specwrk/web.rb
169
197
  - lib/specwrk/web/app.rb
@@ -196,7 +224,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
196
224
  - !ruby/object:Gem::Version
197
225
  version: '0'
198
226
  requirements: []
199
- rubygems_version: 3.6.7
227
+ rubygems_version: 3.6.9
200
228
  specification_version: 4
201
229
  summary: Parallel rspec test runner from a queue of pending jobs.
202
230
  test_files: []
@@ -1,64 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require "fileutils"
4
-
5
- module Specwrk
6
- class Filestore
7
- @mutexes = {}
8
- @mutexes_mutex = Mutex.new # 🐢🐢🐢🐢
9
-
10
- class << self
11
- def [](path)
12
- new(path)
13
- end
14
-
15
- def mutex_for(path)
16
- @mutexes_mutex.synchronize do
17
- @mutexes[path] ||= Mutex.new
18
- end
19
- end
20
- end
21
-
22
- def initialize(path)
23
- @path = path
24
- @tmpfile_path = @path + ".tmp"
25
- @lock_path = @path + ".lock"
26
-
27
- FileUtils.mkdir_p File.dirname(@path)
28
- File.open(@path, "a") {} # multi-process and multi-thread safe touch
29
- end
30
-
31
- def with_lock
32
- self.class.mutex_for(@path).synchronize do
33
- lock_file.flock(File::LOCK_EX)
34
-
35
- hash = read
36
- result = yield(hash)
37
-
38
- # Will truncate if already exists
39
- File.open(@tmpfile_path, "w") do |tmpfile|
40
- tmpfile.write(hash.to_json)
41
- tmpfile.fsync
42
- tmpfile.close
43
- end
44
-
45
- File.rename(@tmpfile_path, @path)
46
- result
47
- ensure
48
- lock_file.flock(File::LOCK_UN)
49
- end
50
- end
51
-
52
- private
53
-
54
- def lock_file
55
- @lock_file ||= File.open(@lock_path, "w")
56
- end
57
-
58
- def read
59
- JSON.parse(File.read(@path), symbolize_names: true)
60
- rescue JSON::ParserError
61
- {}
62
- end
63
- end
64
- end
data/lib/specwrk/queue.rb DELETED
@@ -1,180 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require "time"
4
- require "json"
5
-
6
- module Specwrk
7
- Queue = Class.new(Hash)
8
-
9
- class PendingQueue < Queue
10
- def shift_bucket
11
- return bucket_by_file unless previous_run_times
12
-
13
- case ENV["SPECWRK_SRV_GROUP_BY"]
14
- when "file"
15
- bucket_by_file
16
- else
17
- bucket_by_timings
18
- end
19
- end
20
-
21
- def run_time_bucket_threshold
22
- return 1 unless previous_run_times
23
-
24
- previous_run_times.dig(:meta, :average_run_time)
25
- end
26
-
27
- def previous_run_times
28
- return unless ENV["SPECWRK_OUT"]
29
-
30
- @previous_run_times ||= begin
31
- return unless previous_run_times_file_path
32
- return unless File.exist? previous_run_times_file_path
33
-
34
- raw_data = File.open(previous_run_times_file_path, "r") do |file|
35
- file.flock(File::LOCK_SH)
36
- file.read
37
- end
38
-
39
- @previous_run_times = JSON.parse(raw_data, symbolize_names: true)
40
- rescue JSON::ParserError => e
41
- warn "#{e.inspect} in file #{previous_run_times_file_path}"
42
- nil
43
- end
44
- end
45
-
46
- def merge_with_previous_run_times!(h2)
47
- h2.each { |_id, example| merge_example(example) }
48
-
49
- # Sort by exepcted run time, slowest to fastest
50
- new_h = sort_by { |_, example| example[:expected_run_time] }.reverse.to_h
51
- clear
52
- merge!(new_h)
53
- end
54
-
55
- private
56
-
57
- # We want the most recently modified run time file
58
- # report files are prefixed with a timestamp, and Dir.glob should order
59
- # alphanumericly
60
- def previous_run_times_file_path
61
- return unless ENV["SPECWRK_OUT"]
62
-
63
- @previous_run_times_file_path ||= Dir.glob(File.join(ENV["SPECWRK_OUT"], "*-report-*.json")).last
64
- end
65
-
66
- # Take elements from the hash where the file_path is the same
67
- def bucket_by_file
68
- bucket = []
69
-
70
- key = keys.first
71
- return [] if key.nil?
72
-
73
- file_path = self[key][:file_path]
74
- each do |id, example|
75
- next unless example[:file_path] == file_path
76
-
77
- bucket << example
78
- delete id
79
- end
80
-
81
- bucket
82
- end
83
-
84
- # Take elements from the hash until the average runtime bucket has filled
85
- def bucket_by_timings
86
- bucket = []
87
-
88
- estimated_run_time_total = 0
89
-
90
- while estimated_run_time_total < run_time_bucket_threshold
91
- key = keys.first
92
- break if key.nil?
93
-
94
- estimated_run_time_total += dig(key, :expected_run_time)
95
- break if estimated_run_time_total > run_time_bucket_threshold && bucket.length.positive?
96
-
97
- bucket << self[key]
98
- delete key
99
- end
100
-
101
- bucket
102
- end
103
-
104
- def merge_example(example)
105
- return if key? example[:id]
106
- return if key? example[:file_path]
107
-
108
- self[example[:id]] = if previous_run_times
109
- example.merge!(
110
- expected_run_time: previous_run_times.dig(:examples, example[:id].to_sym, :run_time) || 99999.9 # run "unknown" files first
111
- )
112
- else
113
- example.merge!(
114
- expected_run_time: 99999.9 # run "unknown" files first
115
- )
116
- end
117
- end
118
- end
119
-
120
- class CompletedQueue < Queue
121
- def dump_and_write(path)
122
- write_output_to(path, dump)
123
- end
124
-
125
- def dump
126
- @run_times = []
127
- @first_started_at = Time.new(2999, 1, 1, 0, 0, 0) # TODO: Make future proof /s
128
- @last_finished_at = Time.new(1900, 1, 1, 0, 0, 0)
129
-
130
- @output = {
131
- file_totals: Hash.new { |h, filename| h[filename] = 0.0 },
132
- meta: {failures: 0, passes: 0, pending: 0},
133
- examples: {}
134
- }
135
-
136
- values.each { |example| calculate(example) }
137
-
138
- @output[:meta][:total_run_time] = @run_times.sum
139
- @output[:meta][:average_run_time] = @output[:meta][:total_run_time] / [@run_times.length, 1].max.to_f
140
- @output[:meta][:first_started_at] = @first_started_at.iso8601(6)
141
- @output[:meta][:last_finished_at] = @last_finished_at.iso8601(6)
142
-
143
- @output
144
- end
145
-
146
- private
147
-
148
- def calculate(example)
149
- @run_times << example[:run_time]
150
- @output[:file_totals][example[:file_path]] += example[:run_time]
151
-
152
- started_at = Time.parse(example[:started_at])
153
- finished_at = Time.parse(example[:finished_at])
154
-
155
- @first_started_at = started_at if started_at < @first_started_at
156
- @last_finished_at = finished_at if finished_at > @last_finished_at
157
-
158
- case example[:status]
159
- when "passed"
160
- @output[:meta][:passes] += 1
161
- when "failed"
162
- @output[:meta][:failures] += 1
163
- when "pending"
164
- @output[:meta][:pending] += 1
165
- end
166
-
167
- @output[:examples][example[:id]] = example
168
- end
169
-
170
- def write_output_to(path, output)
171
- File.open(path, "w") do |file|
172
- file.flock(File::LOCK_EX)
173
-
174
- file.write JSON.pretty_generate(output)
175
-
176
- file.flock(File::LOCK_UN)
177
- end
178
- end
179
- end
180
- end