polyrun 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CODE_OF_CONDUCT.md +31 -0
- data/CONTRIBUTING.md +84 -0
- data/LICENSE +21 -0
- data/README.md +140 -0
- data/SECURITY.md +27 -0
- data/bin/polyrun +6 -0
- data/docs/SETUP_PROFILE.md +106 -0
- data/lib/polyrun/cli/coverage_commands.rb +150 -0
- data/lib/polyrun/cli/coverage_merge_io.rb +124 -0
- data/lib/polyrun/cli/database_commands.rb +149 -0
- data/lib/polyrun/cli/env_commands.rb +43 -0
- data/lib/polyrun/cli/helpers.rb +113 -0
- data/lib/polyrun/cli/init_command.rb +99 -0
- data/lib/polyrun/cli/plan_command.rb +134 -0
- data/lib/polyrun/cli/prepare_command.rb +71 -0
- data/lib/polyrun/cli/prepare_recipe.rb +77 -0
- data/lib/polyrun/cli/queue_command.rb +101 -0
- data/lib/polyrun/cli/quick_command.rb +13 -0
- data/lib/polyrun/cli/report_commands.rb +94 -0
- data/lib/polyrun/cli/run_shards_command.rb +88 -0
- data/lib/polyrun/cli/run_shards_plan_boot_phases.rb +91 -0
- data/lib/polyrun/cli/run_shards_plan_options.rb +45 -0
- data/lib/polyrun/cli/run_shards_planning.rb +124 -0
- data/lib/polyrun/cli/run_shards_run.rb +168 -0
- data/lib/polyrun/cli/start_bootstrap.rb +99 -0
- data/lib/polyrun/cli/timing_command.rb +31 -0
- data/lib/polyrun/cli.rb +184 -0
- data/lib/polyrun/config.rb +61 -0
- data/lib/polyrun/coverage/cobertura_zero_lines.rb +32 -0
- data/lib/polyrun/coverage/collector.rb +184 -0
- data/lib/polyrun/coverage/collector_finish.rb +95 -0
- data/lib/polyrun/coverage/filter.rb +22 -0
- data/lib/polyrun/coverage/formatter.rb +115 -0
- data/lib/polyrun/coverage/merge/formatters.rb +181 -0
- data/lib/polyrun/coverage/merge/formatters_html.rb +55 -0
- data/lib/polyrun/coverage/merge.rb +127 -0
- data/lib/polyrun/coverage/merge_fragment_meta.rb +47 -0
- data/lib/polyrun/coverage/merge_merge_two.rb +117 -0
- data/lib/polyrun/coverage/rails.rb +128 -0
- data/lib/polyrun/coverage/reporting.rb +41 -0
- data/lib/polyrun/coverage/result.rb +18 -0
- data/lib/polyrun/coverage/track_files.rb +141 -0
- data/lib/polyrun/data/cached_fixtures.rb +122 -0
- data/lib/polyrun/data/factory_counts.rb +35 -0
- data/lib/polyrun/data/factory_instrumentation.rb +50 -0
- data/lib/polyrun/data/fixtures.rb +68 -0
- data/lib/polyrun/data/parallel_provisioning.rb +93 -0
- data/lib/polyrun/data/snapshot.rb +84 -0
- data/lib/polyrun/database/clone_shards.rb +81 -0
- data/lib/polyrun/database/provision.rb +72 -0
- data/lib/polyrun/database/shard.rb +63 -0
- data/lib/polyrun/database/url_builder/connection/infer.rb +49 -0
- data/lib/polyrun/database/url_builder/connection/url_builders.rb +43 -0
- data/lib/polyrun/database/url_builder/connection.rb +191 -0
- data/lib/polyrun/database/url_builder/template_prepare.rb +21 -0
- data/lib/polyrun/database/url_builder.rb +160 -0
- data/lib/polyrun/debug.rb +81 -0
- data/lib/polyrun/env/ci.rb +65 -0
- data/lib/polyrun/log.rb +70 -0
- data/lib/polyrun/minitest.rb +17 -0
- data/lib/polyrun/partition/constraints.rb +69 -0
- data/lib/polyrun/partition/hrw.rb +33 -0
- data/lib/polyrun/partition/min_heap.rb +64 -0
- data/lib/polyrun/partition/paths.rb +28 -0
- data/lib/polyrun/partition/paths_build.rb +128 -0
- data/lib/polyrun/partition/plan.rb +189 -0
- data/lib/polyrun/partition/plan_lpt.rb +49 -0
- data/lib/polyrun/partition/plan_sharding.rb +48 -0
- data/lib/polyrun/partition/stable_shuffle.rb +18 -0
- data/lib/polyrun/prepare/artifacts.rb +40 -0
- data/lib/polyrun/prepare/assets.rb +57 -0
- data/lib/polyrun/queue/file_store.rb +199 -0
- data/lib/polyrun/queue/file_store_pending.rb +48 -0
- data/lib/polyrun/quick/assertions.rb +32 -0
- data/lib/polyrun/quick/errors.rb +6 -0
- data/lib/polyrun/quick/example_group.rb +66 -0
- data/lib/polyrun/quick/example_runner.rb +93 -0
- data/lib/polyrun/quick/matchers.rb +156 -0
- data/lib/polyrun/quick/reporter.rb +42 -0
- data/lib/polyrun/quick/runner.rb +180 -0
- data/lib/polyrun/quick.rb +1 -0
- data/lib/polyrun/railtie.rb +7 -0
- data/lib/polyrun/reporting/junit.rb +125 -0
- data/lib/polyrun/reporting/junit_emit.rb +58 -0
- data/lib/polyrun/reporting/rspec_junit.rb +39 -0
- data/lib/polyrun/rspec.rb +15 -0
- data/lib/polyrun/templates/POLYRUN.md +45 -0
- data/lib/polyrun/templates/ci_matrix.polyrun.yml +14 -0
- data/lib/polyrun/templates/minimal_gem.polyrun.yml +13 -0
- data/lib/polyrun/templates/rails_prepare.polyrun.yml +31 -0
- data/lib/polyrun/timing/merge.rb +35 -0
- data/lib/polyrun/timing/summary.rb +25 -0
- data/lib/polyrun/version.rb +3 -0
- data/lib/polyrun.rb +58 -0
- data/polyrun.gemspec +37 -0
- data/sig/polyrun/cli.rbs +6 -0
- data/sig/polyrun/config.rbs +20 -0
- data/sig/polyrun/debug.rbs +12 -0
- data/sig/polyrun/log.rbs +12 -0
- data/sig/polyrun/minitest.rbs +5 -0
- data/sig/polyrun/quick.rbs +19 -0
- data/sig/polyrun/rspec.rbs +5 -0
- data/sig/polyrun.rbs +11 -0
- metadata +288 -0
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
require "digest/md5"
|
|
2
|
+
require "fileutils"
|
|
3
|
+
require "open3"
|
|
4
|
+
|
|
5
|
+
module Polyrun
|
|
6
|
+
module Prepare
|
|
7
|
+
# Asset digest and optional Rails +assets:precompile+, stdlib only.
|
|
8
|
+
module Assets
|
|
9
|
+
module_function
|
|
10
|
+
|
|
11
|
+
# Stable digest of a list of files (sorted). Directories are expanded to all files recursively.
|
|
12
|
+
def digest_sources(*paths)
|
|
13
|
+
files = []
|
|
14
|
+
paths.flatten.compact.each do |p|
|
|
15
|
+
next unless p
|
|
16
|
+
|
|
17
|
+
path = p.to_s
|
|
18
|
+
if File.directory?(path)
|
|
19
|
+
Dir.glob(File.join(path, "**", "*"), File::FNM_DOTMATCH).each do |f|
|
|
20
|
+
files << f if File.file?(f)
|
|
21
|
+
end
|
|
22
|
+
elsif File.file?(path)
|
|
23
|
+
files << path
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
files.sort!
|
|
27
|
+
combined = files.map { |f| "#{f}:#{Digest::MD5.file(f).hexdigest}" }.join("|")
|
|
28
|
+
Digest::MD5.hexdigest(combined)
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
# Writes digest to +marker_path+ if missing or content differs (caller runs compile when needed).
|
|
32
|
+
def stale?(marker_path, *digest_paths)
|
|
33
|
+
return true unless File.file?(marker_path)
|
|
34
|
+
|
|
35
|
+
File.read(marker_path).strip != digest_sources(*digest_paths)
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
def write_marker!(marker_path, *digest_paths)
|
|
39
|
+
FileUtils.mkdir_p(File.dirname(marker_path))
|
|
40
|
+
File.write(marker_path, digest_sources(*digest_paths))
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
# Shells out to +bin/rails assets:precompile+ when +rails_root+ contains +bin/rails+.
|
|
44
|
+
def precompile!(rails_root:, silent: true)
|
|
45
|
+
exe = File.join(rails_root, "bin", "rails")
|
|
46
|
+
raise Polyrun::Error, "Prepare::Assets: no #{exe}" unless File.executable?(exe)
|
|
47
|
+
|
|
48
|
+
cmd = [exe, "assets:precompile"]
|
|
49
|
+
_out, err, st = Open3.capture3(*cmd, chdir: rails_root)
|
|
50
|
+
Polyrun::Log.warn err if !silent && !err.empty?
|
|
51
|
+
raise Polyrun::Error, "assets:precompile failed: #{err}" unless st.success?
|
|
52
|
+
|
|
53
|
+
true
|
|
54
|
+
end
|
|
55
|
+
end
|
|
56
|
+
end
|
|
57
|
+
end
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
require "fileutils"
|
|
2
|
+
require "json"
|
|
3
|
+
require "securerandom"
|
|
4
|
+
require "time"
|
|
5
|
+
module Polyrun
|
|
6
|
+
module Queue
|
|
7
|
+
# File-backed queue (spec_queue.md): +queue.json+, +pending/*.json+ chunks, +done.jsonl+, +leases.json+ (OS flock).
|
|
8
|
+
class FileStore
|
|
9
|
+
CHUNK_SIZE = 500
|
|
10
|
+
|
|
11
|
+
attr_reader :root
|
|
12
|
+
|
|
13
|
+
def initialize(root)
|
|
14
|
+
@root = File.expand_path(root)
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def init!(items)
|
|
18
|
+
FileUtils.mkdir_p(@root)
|
|
19
|
+
raise Polyrun::Error, "queue already exists: #{queue_path}" if File.file?(queue_path)
|
|
20
|
+
|
|
21
|
+
items = items.map(&:to_s)
|
|
22
|
+
meta = base_meta(items.size)
|
|
23
|
+
FileUtils.mkdir_p(pending_dir)
|
|
24
|
+
write_pending_chunks!(items, meta)
|
|
25
|
+
atomic_write(queue_path, JSON.generate(meta))
|
|
26
|
+
atomic_write(ledger_path, "")
|
|
27
|
+
true
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def claim!(worker_id:, batch_size:)
|
|
31
|
+
batch_size = Integer(batch_size)
|
|
32
|
+
raise Polyrun::Error, "batch_size must be >= 1" if batch_size < 1
|
|
33
|
+
|
|
34
|
+
lease_id = SecureRandom.uuid
|
|
35
|
+
batch = []
|
|
36
|
+
with_lock do
|
|
37
|
+
meta = load_meta!
|
|
38
|
+
batch = take_pending_batch!(meta, batch_size)
|
|
39
|
+
leases = read_leases
|
|
40
|
+
leases[lease_id] = {
|
|
41
|
+
"worker_id" => worker_id.to_s,
|
|
42
|
+
"paths" => batch,
|
|
43
|
+
"claimed_at" => Time.now.utc.iso8601
|
|
44
|
+
}
|
|
45
|
+
write_meta!(meta)
|
|
46
|
+
write_leases!(leases)
|
|
47
|
+
append_ledger(
|
|
48
|
+
"CLAIM" => lease_id,
|
|
49
|
+
"worker_id" => worker_id.to_s,
|
|
50
|
+
"paths" => batch,
|
|
51
|
+
"pending_remaining" => meta["pending_count"]
|
|
52
|
+
)
|
|
53
|
+
end
|
|
54
|
+
{"lease_id" => lease_id, "paths" => batch}
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def ack!(lease_id:, worker_id:)
|
|
58
|
+
with_lock do
|
|
59
|
+
leases = read_leases
|
|
60
|
+
lease = leases[lease_id]
|
|
61
|
+
raise Polyrun::Error, "unknown lease: #{lease_id}" unless lease
|
|
62
|
+
|
|
63
|
+
if lease["worker_id"].to_s != worker_id.to_s
|
|
64
|
+
raise Polyrun::Error, "lease worker mismatch"
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
leases.delete(lease_id)
|
|
68
|
+
write_leases!(leases)
|
|
69
|
+
|
|
70
|
+
paths = lease["paths"] || []
|
|
71
|
+
meta = load_meta!
|
|
72
|
+
meta["done_count"] = Integer(meta["done_count"]) + paths.size
|
|
73
|
+
append_done_lines!(paths)
|
|
74
|
+
write_meta!(meta)
|
|
75
|
+
append_ledger("ACK" => lease_id, "worker_id" => worker_id.to_s, "paths" => paths)
|
|
76
|
+
end
|
|
77
|
+
true
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
def status
|
|
81
|
+
with_lock do
|
|
82
|
+
meta = load_meta!
|
|
83
|
+
{
|
|
84
|
+
"pending" => Integer(meta["pending_count"]),
|
|
85
|
+
"done" => Integer(meta["done_count"]),
|
|
86
|
+
"leases" => read_leases.keys.size
|
|
87
|
+
}
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
private
|
|
92
|
+
|
|
93
|
+
def queue_path
|
|
94
|
+
File.join(@root, "queue.json")
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
def leases_path
|
|
98
|
+
File.join(@root, "leases.json")
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
def ledger_path
|
|
102
|
+
File.join(@root, "ledger.jsonl")
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
def lock_path
|
|
106
|
+
File.join(@root, "lock")
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
def pending_dir
|
|
110
|
+
File.join(@root, "pending")
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
def done_path
|
|
114
|
+
File.join(@root, "done.jsonl")
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
def with_lock
|
|
118
|
+
FileUtils.mkdir_p(@root)
|
|
119
|
+
File.open(lock_path, File::CREAT | File::RDWR) do |f|
|
|
120
|
+
f.flock(File::LOCK_EX)
|
|
121
|
+
yield
|
|
122
|
+
end
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
def base_meta(pending_count)
|
|
126
|
+
{
|
|
127
|
+
"created_at" => Time.now.utc.iso8601,
|
|
128
|
+
"pending_count" => pending_count,
|
|
129
|
+
"done_count" => 0,
|
|
130
|
+
"chunk_size" => CHUNK_SIZE
|
|
131
|
+
}
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
def meta_chunk_size(meta)
|
|
135
|
+
(meta["chunk_size"] || CHUNK_SIZE).to_i
|
|
136
|
+
end
|
|
137
|
+
|
|
138
|
+
def load_meta!
|
|
139
|
+
p = queue_path
|
|
140
|
+
raise Polyrun::Error, "queue not initialized; run queue init" unless File.file?(p)
|
|
141
|
+
|
|
142
|
+
data = JSON.parse(File.read(p))
|
|
143
|
+
raise Polyrun::Error, "invalid queue.json: #{p}" unless meta_ok?(data)
|
|
144
|
+
|
|
145
|
+
data
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
def meta_ok?(data)
|
|
149
|
+
data.is_a?(Hash) &&
|
|
150
|
+
data.key?("pending_count") &&
|
|
151
|
+
data.key?("done_count") &&
|
|
152
|
+
data.key?("chunk_size")
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
def write_pending_chunks!(items, meta)
|
|
156
|
+
chunk_size = meta_chunk_size(meta)
|
|
157
|
+
FileUtils.mkdir_p(pending_dir)
|
|
158
|
+
items.each_slice(chunk_size).with_index(1) do |slice, idx|
|
|
159
|
+
atomic_write(File.join(pending_dir, format("%06d.json", idx)), JSON.generate(slice))
|
|
160
|
+
end
|
|
161
|
+
end
|
|
162
|
+
|
|
163
|
+
def write_meta!(meta)
|
|
164
|
+
atomic_write(queue_path, JSON.generate(meta))
|
|
165
|
+
end
|
|
166
|
+
|
|
167
|
+
def append_done_lines!(paths)
|
|
168
|
+
return if paths.empty?
|
|
169
|
+
|
|
170
|
+
File.open(done_path, "a") do |io|
|
|
171
|
+
paths.each { |p| io.puts(JSON.generate(p.to_s)) }
|
|
172
|
+
end
|
|
173
|
+
end
|
|
174
|
+
|
|
175
|
+
def read_leases
|
|
176
|
+
return {} unless File.file?(leases_path)
|
|
177
|
+
|
|
178
|
+
JSON.parse(File.read(leases_path))
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
def write_leases!(h)
|
|
182
|
+
atomic_write(leases_path, JSON.generate(h))
|
|
183
|
+
end
|
|
184
|
+
|
|
185
|
+
def append_ledger(entry)
|
|
186
|
+
line = JSON.generate(entry.merge("at" => Time.now.utc.iso8601)) + "\n"
|
|
187
|
+
File.open(ledger_path, "a") { |f| f.write(line) }
|
|
188
|
+
end
|
|
189
|
+
|
|
190
|
+
def atomic_write(path, body)
|
|
191
|
+
tmp = "#{path}.tmp.#{$$}"
|
|
192
|
+
File.write(tmp, body)
|
|
193
|
+
File.rename(tmp, path)
|
|
194
|
+
end
|
|
195
|
+
end
|
|
196
|
+
end
|
|
197
|
+
end
|
|
198
|
+
|
|
199
|
+
require_relative "file_store_pending"
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
module Polyrun
|
|
2
|
+
module Queue
|
|
3
|
+
class FileStore
|
|
4
|
+
private
|
|
5
|
+
|
|
6
|
+
def sorted_chunk_files
|
|
7
|
+
Dir.glob(File.join(pending_dir, "[0-9][0-9][0-9][0-9][0-9][0-9].json")).sort
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
def take_pending_batch!(meta, batch_size)
|
|
11
|
+
remaining = Integer(meta["pending_count"])
|
|
12
|
+
return [] if remaining <= 0 || batch_size <= 0
|
|
13
|
+
|
|
14
|
+
batch = []
|
|
15
|
+
files = sorted_chunk_files
|
|
16
|
+
while batch.size < batch_size
|
|
17
|
+
break if files.empty?
|
|
18
|
+
|
|
19
|
+
head = files.first
|
|
20
|
+
append_from_next_chunk!(batch, batch_size, head)
|
|
21
|
+
files.shift unless File.file?(head)
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
meta["pending_count"] = [remaining - batch.size, 0].max
|
|
25
|
+
if meta["pending_count"].positive? && sorted_chunk_files.empty?
|
|
26
|
+
raise Polyrun::Error,
|
|
27
|
+
"queue corrupt: pending_count=#{meta["pending_count"]} but no pending chunk files under #{pending_dir}"
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
batch
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def append_from_next_chunk!(batch, batch_size, path)
|
|
34
|
+
chunk = JSON.parse(File.read(path))
|
|
35
|
+
raise Polyrun::Error, "corrupt queue chunk: #{path}" unless chunk.is_a?(Array)
|
|
36
|
+
|
|
37
|
+
need = batch_size - batch.size
|
|
38
|
+
taken = chunk.shift(need)
|
|
39
|
+
batch.concat(taken)
|
|
40
|
+
if chunk.empty?
|
|
41
|
+
FileUtils.rm_f(path)
|
|
42
|
+
else
|
|
43
|
+
atomic_write(path, JSON.generate(chunk))
|
|
44
|
+
end
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
end
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
require_relative "errors"
|
|
2
|
+
|
|
3
|
+
module Polyrun
|
|
4
|
+
module Quick
|
|
5
|
+
module Assertions
|
|
6
|
+
def assert(condition, message = "assertion failed")
|
|
7
|
+
raise AssertionFailed, message unless condition
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
def assert_equal(expected, actual, message = nil)
|
|
11
|
+
return if expected == actual
|
|
12
|
+
|
|
13
|
+
raise AssertionFailed,
|
|
14
|
+
message || "expected #{expected.inspect}, got #{actual.inspect}"
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def assert_nil(obj, message = nil)
|
|
18
|
+
return if obj.nil?
|
|
19
|
+
|
|
20
|
+
raise AssertionFailed, message || "expected nil, got #{obj.inspect}"
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def assert_raises(exception_class = StandardError)
|
|
24
|
+
yield
|
|
25
|
+
rescue exception_class
|
|
26
|
+
nil
|
|
27
|
+
else
|
|
28
|
+
raise AssertionFailed, "expected #{exception_class} to be raised"
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
end
|
|
32
|
+
end
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
module Polyrun
|
|
2
|
+
module Quick
|
|
3
|
+
# One +describe+ block (possibly nested). Holds +it+ / +test+ examples and hooks.
|
|
4
|
+
class ExampleGroup
|
|
5
|
+
attr_reader :name, :parent, :children, :examples, :before_hooks, :after_hooks, :lets, :let_bang_order
|
|
6
|
+
|
|
7
|
+
def initialize(name, parent: nil)
|
|
8
|
+
@name = name.to_s
|
|
9
|
+
@parent = parent
|
|
10
|
+
@children = []
|
|
11
|
+
@examples = []
|
|
12
|
+
@before_hooks = []
|
|
13
|
+
@after_hooks = []
|
|
14
|
+
@lets = {}
|
|
15
|
+
@let_bang_order = []
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def full_name
|
|
19
|
+
return @name if parent.nil?
|
|
20
|
+
|
|
21
|
+
"#{parent.full_name} #{@name}".strip
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def describe(name, &block)
|
|
25
|
+
child = ExampleGroup.new(name, parent: self)
|
|
26
|
+
@children << child
|
|
27
|
+
child.instance_eval(&block) if block
|
|
28
|
+
child
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def it(description, &block)
|
|
32
|
+
@examples << [description.to_s, block]
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
alias_method :test, :it
|
|
36
|
+
|
|
37
|
+
def before(&block)
|
|
38
|
+
@before_hooks << block
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def after(&block)
|
|
42
|
+
@after_hooks << block
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
def let(name, &block)
|
|
46
|
+
@lets[name.to_sym] = block
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def let!(name, &block)
|
|
50
|
+
sym = name.to_sym
|
|
51
|
+
@lets[sym] = block
|
|
52
|
+
@let_bang_order << sym
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
def each_example_with_ancestors(ancestors = [], &visitor)
|
|
56
|
+
chain = ancestors + [self]
|
|
57
|
+
@examples.each do |desc, block|
|
|
58
|
+
visitor.call(chain, desc, block)
|
|
59
|
+
end
|
|
60
|
+
@children.each do |child|
|
|
61
|
+
child.each_example_with_ancestors(chain, &visitor)
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
end
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
require_relative "assertions"
|
|
2
|
+
require_relative "errors"
|
|
3
|
+
require_relative "matchers"
|
|
4
|
+
|
|
5
|
+
module Polyrun
|
|
6
|
+
module Quick
|
|
7
|
+
# Per-example execution: merged lets, hooks, assertions, optional Capybara::DSL.
|
|
8
|
+
class ExampleRunner
|
|
9
|
+
include Assertions
|
|
10
|
+
include Matchers
|
|
11
|
+
|
|
12
|
+
def initialize(reporter)
|
|
13
|
+
@reporter = reporter
|
|
14
|
+
@_let_cache = {}
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def run(group_name:, description:, ancestor_chain:, block:)
|
|
18
|
+
@_let_cache = {}
|
|
19
|
+
merge_lets_from_chain(ancestor_chain)
|
|
20
|
+
define_let_methods!
|
|
21
|
+
run_let_bangs_from_chain
|
|
22
|
+
extend_capybara_if_enabled!
|
|
23
|
+
begin
|
|
24
|
+
run_before_hooks_from_chain(ancestor_chain)
|
|
25
|
+
instance_eval(&block)
|
|
26
|
+
@reporter.pass(group_name, description)
|
|
27
|
+
rescue AssertionFailed => e
|
|
28
|
+
@reporter.fail(group_name, description, e)
|
|
29
|
+
rescue => e
|
|
30
|
+
@reporter.error(group_name, description, e)
|
|
31
|
+
ensure
|
|
32
|
+
run_after_hooks_from_chain(ancestor_chain)
|
|
33
|
+
reset_capybara_if_enabled!
|
|
34
|
+
@_let_cache = {}
|
|
35
|
+
end
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
private
|
|
39
|
+
|
|
40
|
+
def merge_lets_from_chain(ancestor_chain)
|
|
41
|
+
@merged_lets = {}
|
|
42
|
+
ancestor_chain.each do |g|
|
|
43
|
+
@merged_lets.merge!(g.lets)
|
|
44
|
+
end
|
|
45
|
+
@let_bang_order = []
|
|
46
|
+
ancestor_chain.each do |g|
|
|
47
|
+
@let_bang_order.concat(g.let_bang_order)
|
|
48
|
+
end
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
def define_let_methods!
|
|
52
|
+
@merged_lets.each do |sym, proc|
|
|
53
|
+
define_singleton_method(sym) do
|
|
54
|
+
@_let_cache[sym] ||= instance_eval(&proc) # rubocop:disable ThreadSafety/ClassInstanceVariable -- per-example runner memo
|
|
55
|
+
end
|
|
56
|
+
end
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
def run_let_bangs_from_chain
|
|
60
|
+
@let_bang_order.each { |sym| public_send(sym) }
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def run_before_hooks_from_chain(ancestor_chain)
|
|
64
|
+
ancestor_chain.each do |g|
|
|
65
|
+
g.before_hooks.each { |h| instance_eval(&h) }
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
def run_after_hooks_from_chain(ancestor_chain)
|
|
70
|
+
ancestor_chain.reverse_each do |g|
|
|
71
|
+
g.after_hooks.reverse_each { |h| instance_eval(&h) }
|
|
72
|
+
end
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
def extend_capybara_if_enabled!
|
|
76
|
+
return unless Quick.capybara?
|
|
77
|
+
return unless defined?(::Capybara)
|
|
78
|
+
return unless defined?(::Capybara::DSL)
|
|
79
|
+
|
|
80
|
+
extend ::Capybara::DSL
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
def reset_capybara_if_enabled!
|
|
84
|
+
return unless Quick.capybara?
|
|
85
|
+
return unless defined?(::Capybara)
|
|
86
|
+
|
|
87
|
+
::Capybara.reset_sessions!
|
|
88
|
+
rescue
|
|
89
|
+
# Driver/session may be absent in non-Capybara runs
|
|
90
|
+
end
|
|
91
|
+
end
|
|
92
|
+
end
|
|
93
|
+
end
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
require_relative "errors"
|
|
2
|
+
|
|
3
|
+
module Polyrun
|
|
4
|
+
module Quick
|
|
5
|
+
# Minimal +expect(x).to …+ chain (RSpec-ish) without RSpec.
|
|
6
|
+
class Expectation
|
|
7
|
+
def initialize(actual)
|
|
8
|
+
@actual = actual
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def to(matcher)
|
|
12
|
+
return if matcher.matches?(@actual)
|
|
13
|
+
|
|
14
|
+
raise AssertionFailed, matcher.failure_message(@actual)
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def not_to(matcher)
|
|
18
|
+
return if matcher.does_not_match?(@actual)
|
|
19
|
+
|
|
20
|
+
raise AssertionFailed, matcher.failure_message_when_negated(@actual)
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
module Matchers
|
|
25
|
+
def expect(actual)
|
|
26
|
+
Expectation.new(actual)
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def eq(expected)
|
|
30
|
+
EqMatcher.new(expected)
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def be_truthy
|
|
34
|
+
TruthyMatcher.new
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def be_falsey
|
|
38
|
+
FalseyMatcher.new
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def include(*expected)
|
|
42
|
+
IncludeMatcher.new(expected)
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
def match(pattern)
|
|
46
|
+
RegexMatcher.new(pattern)
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
class EqMatcher
|
|
51
|
+
def initialize(expected)
|
|
52
|
+
@expected = expected
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
def matches?(actual)
|
|
56
|
+
@expected == actual
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
def does_not_match?(actual)
|
|
60
|
+
!matches?(actual)
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def failure_message(actual)
|
|
64
|
+
"expected #{@expected.inspect}, got #{actual.inspect}"
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
def failure_message_when_negated(actual)
|
|
68
|
+
"expected #{actual.inspect} not to eq #{@expected.inspect}"
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
class TruthyMatcher
|
|
73
|
+
def matches?(actual)
|
|
74
|
+
!!actual
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
def does_not_match?(actual)
|
|
78
|
+
!matches?(actual)
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
def failure_message(actual)
|
|
82
|
+
"expected truthy, got #{actual.inspect}"
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
def failure_message_when_negated(actual)
|
|
86
|
+
"expected falsey, got #{actual.inspect}"
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
class FalseyMatcher
|
|
91
|
+
def matches?(actual)
|
|
92
|
+
!actual
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
def does_not_match?(actual)
|
|
96
|
+
!matches?(actual)
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
def failure_message(actual)
|
|
100
|
+
"expected falsey, got #{actual.inspect}"
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
def failure_message_when_negated(actual)
|
|
104
|
+
"expected truthy, got #{actual.inspect}"
|
|
105
|
+
end
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
class IncludeMatcher
|
|
109
|
+
def initialize(expected_parts)
|
|
110
|
+
@expected_parts = expected_parts
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
def matches?(actual)
|
|
114
|
+
return false unless actual.respond_to?(:include?)
|
|
115
|
+
|
|
116
|
+
@expected_parts.all? { |part| actual.include?(part) }
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
def does_not_match?(actual)
|
|
120
|
+
!matches?(actual)
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
def failure_message(actual)
|
|
124
|
+
"expected #{actual.inspect} to include #{@expected_parts.map(&:inspect).join(", ")}"
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
def failure_message_when_negated(actual)
|
|
128
|
+
"expected #{actual.inspect} not to include #{@expected_parts.map(&:inspect).join(", ")}"
|
|
129
|
+
end
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
class RegexMatcher
|
|
133
|
+
def initialize(pattern)
|
|
134
|
+
@pattern = pattern
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
def matches?(actual)
|
|
138
|
+
return false unless actual.respond_to?(:to_s)
|
|
139
|
+
|
|
140
|
+
@pattern === actual.to_s
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
def does_not_match?(actual)
|
|
144
|
+
!matches?(actual)
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
def failure_message(actual)
|
|
148
|
+
"expected #{actual.inspect} to match #{@pattern.inspect}"
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
def failure_message_when_negated(actual)
|
|
152
|
+
"expected #{actual.inspect} not to match #{@pattern.inspect}"
|
|
153
|
+
end
|
|
154
|
+
end
|
|
155
|
+
end
|
|
156
|
+
end
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
module Polyrun
|
|
2
|
+
module Quick
|
|
3
|
+
class Reporter
|
|
4
|
+
def initialize(out, err, verbose)
|
|
5
|
+
@out = out
|
|
6
|
+
@err = err
|
|
7
|
+
@verbose = verbose
|
|
8
|
+
@passed = 0
|
|
9
|
+
@failed = 0
|
|
10
|
+
@errors = 0
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
def pass(group, description)
|
|
14
|
+
@passed += 1
|
|
15
|
+
return unless @verbose
|
|
16
|
+
|
|
17
|
+
@out.puts " ok #{group} #{description}"
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def fail(group, description, exc)
|
|
21
|
+
@failed += 1
|
|
22
|
+
@err.puts " FAIL #{group} #{description}"
|
|
23
|
+
@err.puts " #{exc.message}"
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def error(group, description, exc)
|
|
27
|
+
@errors += 1
|
|
28
|
+
@err.puts " ERROR #{group} #{description}"
|
|
29
|
+
@err.puts " #{exc.class}: #{exc.message}"
|
|
30
|
+
loc = exc.backtrace&.first
|
|
31
|
+
@err.puts " #{loc}" if loc
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def summary
|
|
35
|
+
total = @passed + @failed + @errors
|
|
36
|
+
@out.puts
|
|
37
|
+
@out.puts "Polyrun::Quick: #{@passed} passed, #{@failed} failed, #{@errors} errors (#{total} examples)"
|
|
38
|
+
(@failed + @errors).positive? ? 1 : 0
|
|
39
|
+
end
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
end
|