polyrun 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CODE_OF_CONDUCT.md +31 -0
- data/CONTRIBUTING.md +84 -0
- data/LICENSE +21 -0
- data/README.md +140 -0
- data/SECURITY.md +27 -0
- data/bin/polyrun +6 -0
- data/docs/SETUP_PROFILE.md +106 -0
- data/lib/polyrun/cli/coverage_commands.rb +150 -0
- data/lib/polyrun/cli/coverage_merge_io.rb +124 -0
- data/lib/polyrun/cli/database_commands.rb +149 -0
- data/lib/polyrun/cli/env_commands.rb +43 -0
- data/lib/polyrun/cli/helpers.rb +113 -0
- data/lib/polyrun/cli/init_command.rb +99 -0
- data/lib/polyrun/cli/plan_command.rb +134 -0
- data/lib/polyrun/cli/prepare_command.rb +71 -0
- data/lib/polyrun/cli/prepare_recipe.rb +77 -0
- data/lib/polyrun/cli/queue_command.rb +101 -0
- data/lib/polyrun/cli/quick_command.rb +13 -0
- data/lib/polyrun/cli/report_commands.rb +94 -0
- data/lib/polyrun/cli/run_shards_command.rb +88 -0
- data/lib/polyrun/cli/run_shards_plan_boot_phases.rb +91 -0
- data/lib/polyrun/cli/run_shards_plan_options.rb +45 -0
- data/lib/polyrun/cli/run_shards_planning.rb +124 -0
- data/lib/polyrun/cli/run_shards_run.rb +168 -0
- data/lib/polyrun/cli/start_bootstrap.rb +99 -0
- data/lib/polyrun/cli/timing_command.rb +31 -0
- data/lib/polyrun/cli.rb +184 -0
- data/lib/polyrun/config.rb +61 -0
- data/lib/polyrun/coverage/cobertura_zero_lines.rb +32 -0
- data/lib/polyrun/coverage/collector.rb +184 -0
- data/lib/polyrun/coverage/collector_finish.rb +95 -0
- data/lib/polyrun/coverage/filter.rb +22 -0
- data/lib/polyrun/coverage/formatter.rb +115 -0
- data/lib/polyrun/coverage/merge/formatters.rb +181 -0
- data/lib/polyrun/coverage/merge/formatters_html.rb +55 -0
- data/lib/polyrun/coverage/merge.rb +127 -0
- data/lib/polyrun/coverage/merge_fragment_meta.rb +47 -0
- data/lib/polyrun/coverage/merge_merge_two.rb +117 -0
- data/lib/polyrun/coverage/rails.rb +128 -0
- data/lib/polyrun/coverage/reporting.rb +41 -0
- data/lib/polyrun/coverage/result.rb +18 -0
- data/lib/polyrun/coverage/track_files.rb +141 -0
- data/lib/polyrun/data/cached_fixtures.rb +122 -0
- data/lib/polyrun/data/factory_counts.rb +35 -0
- data/lib/polyrun/data/factory_instrumentation.rb +50 -0
- data/lib/polyrun/data/fixtures.rb +68 -0
- data/lib/polyrun/data/parallel_provisioning.rb +93 -0
- data/lib/polyrun/data/snapshot.rb +84 -0
- data/lib/polyrun/database/clone_shards.rb +81 -0
- data/lib/polyrun/database/provision.rb +72 -0
- data/lib/polyrun/database/shard.rb +63 -0
- data/lib/polyrun/database/url_builder/connection/infer.rb +49 -0
- data/lib/polyrun/database/url_builder/connection/url_builders.rb +43 -0
- data/lib/polyrun/database/url_builder/connection.rb +191 -0
- data/lib/polyrun/database/url_builder/template_prepare.rb +21 -0
- data/lib/polyrun/database/url_builder.rb +160 -0
- data/lib/polyrun/debug.rb +81 -0
- data/lib/polyrun/env/ci.rb +65 -0
- data/lib/polyrun/log.rb +70 -0
- data/lib/polyrun/minitest.rb +17 -0
- data/lib/polyrun/partition/constraints.rb +69 -0
- data/lib/polyrun/partition/hrw.rb +33 -0
- data/lib/polyrun/partition/min_heap.rb +64 -0
- data/lib/polyrun/partition/paths.rb +28 -0
- data/lib/polyrun/partition/paths_build.rb +128 -0
- data/lib/polyrun/partition/plan.rb +189 -0
- data/lib/polyrun/partition/plan_lpt.rb +49 -0
- data/lib/polyrun/partition/plan_sharding.rb +48 -0
- data/lib/polyrun/partition/stable_shuffle.rb +18 -0
- data/lib/polyrun/prepare/artifacts.rb +40 -0
- data/lib/polyrun/prepare/assets.rb +57 -0
- data/lib/polyrun/queue/file_store.rb +199 -0
- data/lib/polyrun/queue/file_store_pending.rb +48 -0
- data/lib/polyrun/quick/assertions.rb +32 -0
- data/lib/polyrun/quick/errors.rb +6 -0
- data/lib/polyrun/quick/example_group.rb +66 -0
- data/lib/polyrun/quick/example_runner.rb +93 -0
- data/lib/polyrun/quick/matchers.rb +156 -0
- data/lib/polyrun/quick/reporter.rb +42 -0
- data/lib/polyrun/quick/runner.rb +180 -0
- data/lib/polyrun/quick.rb +1 -0
- data/lib/polyrun/railtie.rb +7 -0
- data/lib/polyrun/reporting/junit.rb +125 -0
- data/lib/polyrun/reporting/junit_emit.rb +58 -0
- data/lib/polyrun/reporting/rspec_junit.rb +39 -0
- data/lib/polyrun/rspec.rb +15 -0
- data/lib/polyrun/templates/POLYRUN.md +45 -0
- data/lib/polyrun/templates/ci_matrix.polyrun.yml +14 -0
- data/lib/polyrun/templates/minimal_gem.polyrun.yml +13 -0
- data/lib/polyrun/templates/rails_prepare.polyrun.yml +31 -0
- data/lib/polyrun/timing/merge.rb +35 -0
- data/lib/polyrun/timing/summary.rb +25 -0
- data/lib/polyrun/version.rb +3 -0
- data/lib/polyrun.rb +58 -0
- data/polyrun.gemspec +37 -0
- data/sig/polyrun/cli.rbs +6 -0
- data/sig/polyrun/config.rbs +20 -0
- data/sig/polyrun/debug.rbs +12 -0
- data/sig/polyrun/log.rbs +12 -0
- data/sig/polyrun/minitest.rbs +5 -0
- data/sig/polyrun/quick.rbs +19 -0
- data/sig/polyrun/rspec.rbs +5 -0
- data/sig/polyrun.rbs +11 -0
- metadata +288 -0
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
module Polyrun
|
|
2
|
+
module Database
|
|
3
|
+
module UrlBuilder
|
|
4
|
+
# Infer canonical adapter name from +polyrun.yml+ +databases:+ hash.
|
|
5
|
+
module ConnectionInfer
|
|
6
|
+
module_function
|
|
7
|
+
|
|
8
|
+
INFER_ADAPTER_FROM_NESTED = [
|
|
9
|
+
%w[postgresql postgresql],
|
|
10
|
+
%w[trilogy trilogy],
|
|
11
|
+
%w[mysql2 mysql2],
|
|
12
|
+
%w[mysql mysql2],
|
|
13
|
+
%w[sqlserver sqlserver],
|
|
14
|
+
%w[mssql sqlserver],
|
|
15
|
+
%w[sqlite3 sqlite3],
|
|
16
|
+
%w[sqlite sqlite3],
|
|
17
|
+
%w[mongodb mongodb],
|
|
18
|
+
%w[mongo mongodb]
|
|
19
|
+
].freeze
|
|
20
|
+
|
|
21
|
+
def infer_adapter_name(dh)
|
|
22
|
+
explicit = (dh["adapter"] || dh[:adapter]).to_s.strip.downcase
|
|
23
|
+
explicit = normalize_adapter_alias(explicit)
|
|
24
|
+
return explicit unless explicit.empty?
|
|
25
|
+
|
|
26
|
+
INFER_ADAPTER_FROM_NESTED.each do |key, name|
|
|
27
|
+
return name if nested_hash?(dh, key)
|
|
28
|
+
end
|
|
29
|
+
"postgresql"
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
def normalize_adapter_alias(name)
|
|
33
|
+
case name
|
|
34
|
+
when "postgres", "pg" then "postgresql"
|
|
35
|
+
when "mysql" then "mysql2"
|
|
36
|
+
when "mongo" then "mongodb"
|
|
37
|
+
when "mssql" then "sqlserver"
|
|
38
|
+
when "sqlite" then "sqlite3"
|
|
39
|
+
else name
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def nested_hash?(dh, key)
|
|
44
|
+
dh[key].is_a?(Hash) || dh[key.to_sym].is_a?(Hash)
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
end
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
require "uri"
|
|
2
|
+
|
|
3
|
+
module Polyrun
|
|
4
|
+
module Database
|
|
5
|
+
module UrlBuilder
|
|
6
|
+
# String construction for +DATABASE_URL+ values.
|
|
7
|
+
module ConnectionUrlBuilders
|
|
8
|
+
module_function
|
|
9
|
+
|
|
10
|
+
def build_sqlite_url(database)
|
|
11
|
+
"sqlite3:#{database}"
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def build_url_authority(scheme, host, port, user, password, database)
|
|
15
|
+
if password && !password.to_s.empty?
|
|
16
|
+
u = URI.encode_www_form_component(user.to_s)
|
|
17
|
+
p = URI.encode_www_form_component(password.to_s)
|
|
18
|
+
"#{scheme}://#{u}:#{p}@#{host}:#{port}/#{database}"
|
|
19
|
+
elsif !user.to_s.empty?
|
|
20
|
+
u = URI.encode_www_form_component(user.to_s)
|
|
21
|
+
"#{scheme}://#{u}@#{host}:#{port}/#{database}"
|
|
22
|
+
else
|
|
23
|
+
"#{scheme}://#{host}:#{port}/#{database}"
|
|
24
|
+
end
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def build_mongodb_url(host, port, user, password, database)
|
|
28
|
+
if user.to_s.empty?
|
|
29
|
+
return "mongodb://#{host}:#{port}/#{database}"
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
u = URI.encode_www_form_component(user.to_s)
|
|
33
|
+
if password && !password.to_s.empty?
|
|
34
|
+
p = URI.encode_www_form_component(password.to_s)
|
|
35
|
+
"mongodb://#{u}:#{p}@#{host}:#{port}/#{database}"
|
|
36
|
+
else
|
|
37
|
+
"mongodb://#{u}@#{host}:#{port}/#{database}"
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
end
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
require_relative "connection/infer"
|
|
2
|
+
require_relative "connection/url_builders"
|
|
3
|
+
|
|
4
|
+
module Polyrun
|
|
5
|
+
module Database
|
|
6
|
+
module UrlBuilder
|
|
7
|
+
# Adapter detection, ENV fallbacks, and URL string construction (Rails +DATABASE_URL+ conventions).
|
|
8
|
+
module Connection
|
|
9
|
+
module_function
|
|
10
|
+
|
|
11
|
+
def resolve_connection(databases_hash)
|
|
12
|
+
dh = normalize_hash(databases_hash)
|
|
13
|
+
profile = connection_profile(dh)
|
|
14
|
+
blk = merged_connection_block(dh, profile)
|
|
15
|
+
scheme = profile[:scheme]
|
|
16
|
+
host = (blk["host"] || env_first(profile[:env_host]) || profile[:default_host]).to_s
|
|
17
|
+
port = (blk["port"] || env_first(profile[:env_port]) || profile[:default_port]).to_s
|
|
18
|
+
user = (blk["username"] || blk["user"] || env_first(profile[:env_user]) || profile[:default_user]).to_s
|
|
19
|
+
password = blk["password"] || env_first(profile[:env_password])
|
|
20
|
+
|
|
21
|
+
{
|
|
22
|
+
scheme: scheme,
|
|
23
|
+
host: host,
|
|
24
|
+
port: port,
|
|
25
|
+
user: user,
|
|
26
|
+
password: password
|
|
27
|
+
}
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def build_database_url(database, conn)
|
|
31
|
+
scheme = conn[:scheme].to_s
|
|
32
|
+
host = conn[:host].to_s
|
|
33
|
+
port = conn[:port].to_s
|
|
34
|
+
user = conn[:user].to_s
|
|
35
|
+
password = conn[:password]
|
|
36
|
+
db = database.to_s
|
|
37
|
+
|
|
38
|
+
case scheme
|
|
39
|
+
when "postgres"
|
|
40
|
+
ConnectionUrlBuilders.build_url_authority("postgres", host, port, user, password, db)
|
|
41
|
+
when "mysql2", "trilogy"
|
|
42
|
+
ConnectionUrlBuilders.build_url_authority(scheme, host, port, user, password, db)
|
|
43
|
+
when "sqlserver"
|
|
44
|
+
ConnectionUrlBuilders.build_url_authority("sqlserver", host, port, user, password, db)
|
|
45
|
+
when "mongodb"
|
|
46
|
+
ConnectionUrlBuilders.build_mongodb_url(host, port, user, password, db)
|
|
47
|
+
when "sqlite3"
|
|
48
|
+
ConnectionUrlBuilders.build_sqlite_url(db)
|
|
49
|
+
else
|
|
50
|
+
raise Polyrun::Error, "unsupported URL scheme: #{scheme.inspect}"
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
def connection_profile(dh)
|
|
55
|
+
name = ConnectionInfer.infer_adapter_name(dh)
|
|
56
|
+
case name
|
|
57
|
+
when "postgresql"
|
|
58
|
+
{
|
|
59
|
+
scheme: "postgres",
|
|
60
|
+
nested_key: "postgresql",
|
|
61
|
+
default_host: "localhost",
|
|
62
|
+
default_port: "5432",
|
|
63
|
+
default_user: "postgres",
|
|
64
|
+
env_host: %w[PGHOST],
|
|
65
|
+
env_port: %w[PGPORT],
|
|
66
|
+
env_user: %w[PGUSER],
|
|
67
|
+
env_password: %w[PGPASSWORD]
|
|
68
|
+
}
|
|
69
|
+
when "mysql2"
|
|
70
|
+
nested_mysql_key(dh)
|
|
71
|
+
when "trilogy"
|
|
72
|
+
{
|
|
73
|
+
scheme: "trilogy",
|
|
74
|
+
nested_key: "trilogy",
|
|
75
|
+
default_host: "localhost",
|
|
76
|
+
default_port: "3306",
|
|
77
|
+
default_user: "root",
|
|
78
|
+
env_host: %w[MYSQL_HOST MYSQL_ADDRESS TRILOGY_HOST],
|
|
79
|
+
env_port: %w[MYSQL_PORT MYSQL_TCP_PORT TRILOGY_PORT],
|
|
80
|
+
env_user: %w[MYSQL_USER MYSQL_USERNAME TRILOGY_USER],
|
|
81
|
+
env_password: %w[MYSQL_PASSWORD MYSQL_PWD TRILOGY_PASSWORD]
|
|
82
|
+
}
|
|
83
|
+
when "sqlserver"
|
|
84
|
+
nested =
|
|
85
|
+
if ConnectionInfer.nested_hash?(dh, "sqlserver")
|
|
86
|
+
"sqlserver"
|
|
87
|
+
else
|
|
88
|
+
"mssql"
|
|
89
|
+
end
|
|
90
|
+
{
|
|
91
|
+
scheme: "sqlserver",
|
|
92
|
+
nested_key: nested,
|
|
93
|
+
default_host: "localhost",
|
|
94
|
+
default_port: "1433",
|
|
95
|
+
default_user: "sa",
|
|
96
|
+
env_host: %w[SQLSERVER_HOST MSSQL_HOST TDS_HOST],
|
|
97
|
+
env_port: %w[SQLSERVER_PORT MSSQL_PORT TDS_PORT],
|
|
98
|
+
env_user: %w[SQLSERVER_USER MSSQL_USER SA_USER],
|
|
99
|
+
env_password: %w[SQLSERVER_PASSWORD MSSQL_PASSWORD SA_PASSWORD]
|
|
100
|
+
}
|
|
101
|
+
when "sqlite3"
|
|
102
|
+
nested =
|
|
103
|
+
if ConnectionInfer.nested_hash?(dh, "sqlite3")
|
|
104
|
+
"sqlite3"
|
|
105
|
+
else
|
|
106
|
+
"sqlite"
|
|
107
|
+
end
|
|
108
|
+
{
|
|
109
|
+
scheme: "sqlite3",
|
|
110
|
+
nested_key: nested,
|
|
111
|
+
default_host: "",
|
|
112
|
+
default_port: "",
|
|
113
|
+
default_user: "",
|
|
114
|
+
env_host: [],
|
|
115
|
+
env_port: [],
|
|
116
|
+
env_user: [],
|
|
117
|
+
env_password: []
|
|
118
|
+
}
|
|
119
|
+
when "mongodb"
|
|
120
|
+
nested_mongo_key(dh)
|
|
121
|
+
else
|
|
122
|
+
raise Polyrun::Error, "unsupported databases.adapter: #{name.inspect}"
|
|
123
|
+
end
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
def nested_mysql_key(dh)
|
|
127
|
+
nested =
|
|
128
|
+
if ConnectionInfer.nested_hash?(dh, "mysql2")
|
|
129
|
+
"mysql2"
|
|
130
|
+
else
|
|
131
|
+
"mysql"
|
|
132
|
+
end
|
|
133
|
+
{
|
|
134
|
+
scheme: "mysql2",
|
|
135
|
+
nested_key: nested,
|
|
136
|
+
default_host: "localhost",
|
|
137
|
+
default_port: "3306",
|
|
138
|
+
default_user: "root",
|
|
139
|
+
env_host: %w[MYSQL_HOST MYSQL_ADDRESS],
|
|
140
|
+
env_port: %w[MYSQL_PORT MYSQL_TCP_PORT],
|
|
141
|
+
env_user: %w[MYSQL_USER MYSQL_USERNAME],
|
|
142
|
+
env_password: %w[MYSQL_PASSWORD MYSQL_PWD]
|
|
143
|
+
}
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
def nested_mongo_key(dh)
|
|
147
|
+
nested =
|
|
148
|
+
if ConnectionInfer.nested_hash?(dh, "mongodb")
|
|
149
|
+
"mongodb"
|
|
150
|
+
else
|
|
151
|
+
"mongo"
|
|
152
|
+
end
|
|
153
|
+
{
|
|
154
|
+
scheme: "mongodb",
|
|
155
|
+
nested_key: nested,
|
|
156
|
+
default_host: "localhost",
|
|
157
|
+
default_port: "27017",
|
|
158
|
+
default_user: "",
|
|
159
|
+
env_host: %w[MONGO_HOST MONGODB_HOST],
|
|
160
|
+
env_port: %w[MONGO_PORT MONGODB_PORT],
|
|
161
|
+
env_user: %w[MONGO_USER MONGODB_USER],
|
|
162
|
+
env_password: %w[MONGO_PASSWORD MONGODB_PASSWORD]
|
|
163
|
+
}
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
def merged_connection_block(dh, profile)
|
|
167
|
+
nk = profile[:nested_key]
|
|
168
|
+
nested = dh[nk].is_a?(Hash) ? normalize_hash(dh[nk]) : {}
|
|
169
|
+
top = dh.slice("host", "port", "username", "user", "password").transform_keys(&:to_s)
|
|
170
|
+
nested.merge(top)
|
|
171
|
+
end
|
|
172
|
+
|
|
173
|
+
def normalize_hash(h)
|
|
174
|
+
h.is_a?(Hash) ? h.transform_keys(&:to_s) : {}
|
|
175
|
+
end
|
|
176
|
+
|
|
177
|
+
def env_first(keys)
|
|
178
|
+
Array(keys).each do |k|
|
|
179
|
+
v = ENV[k]
|
|
180
|
+
return v if present?(v)
|
|
181
|
+
end
|
|
182
|
+
nil
|
|
183
|
+
end
|
|
184
|
+
|
|
185
|
+
def present?(s)
|
|
186
|
+
!s.nil? && !s.to_s.empty?
|
|
187
|
+
end
|
|
188
|
+
end
|
|
189
|
+
end
|
|
190
|
+
end
|
|
191
|
+
end
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
module Polyrun
|
|
2
|
+
module Database
|
|
3
|
+
module UrlBuilder
|
|
4
|
+
module_function
|
|
5
|
+
|
|
6
|
+
def template_prepare_env_fill_connections!(dh, primary_template, out)
|
|
7
|
+
Array(dh["connections"] || dh[:connections]).each do |c|
|
|
8
|
+
nm = (c["name"] || c[:name]).to_s
|
|
9
|
+
key = (c["env_key"] || c[:env_key]).to_s.strip
|
|
10
|
+
key = "DATABASE_URL_#{nm.upcase.tr("-", "_")}" if key.empty? && !nm.empty?
|
|
11
|
+
next if key.empty?
|
|
12
|
+
|
|
13
|
+
tname = (c["template_db"] || c[:template_db]).to_s
|
|
14
|
+
tname = primary_template if tname.empty?
|
|
15
|
+
out[key] = url_for_database_name(dh, tname)
|
|
16
|
+
end
|
|
17
|
+
out
|
|
18
|
+
end
|
|
19
|
+
end
|
|
20
|
+
end
|
|
21
|
+
end
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
require "shellwords"
|
|
2
|
+
require "uri"
|
|
3
|
+
require_relative "url_builder/connection"
|
|
4
|
+
|
|
5
|
+
module Polyrun
|
|
6
|
+
module Database
|
|
7
|
+
# Builds database URLs from +polyrun.yml+ +databases+ (spec2 §5.2) — no Liquid; use ENV fallbacks.
|
|
8
|
+
# Use +adapter:+ or nested blocks: +postgresql+, +mysql+ / +mysql2+ / +trilogy+, +sqlserver+ / +mssql+,
|
|
9
|
+
# +sqlite3+ / +sqlite+, +mongodb+ / +mongo+.
|
|
10
|
+
module UrlBuilder
|
|
11
|
+
module_function
|
|
12
|
+
|
|
13
|
+
def postgres_url_for_template(databases_hash)
|
|
14
|
+
url_for_template(databases_hash)
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def postgres_url_for_database_name(databases_hash, database_name)
|
|
18
|
+
url_for_database_name(databases_hash, database_name)
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def postgres_url_for_shard(databases_hash, shard_index:, connection: nil)
|
|
22
|
+
url_for_shard(databases_hash, shard_index: shard_index, connection: connection)
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def url_for_template(databases_hash)
|
|
26
|
+
dh = databases_hash.is_a?(Hash) ? databases_hash : {}
|
|
27
|
+
dbname = dh["template_db"] || dh[:template_db]
|
|
28
|
+
raise Polyrun::Error, "databases.template_db is required" if dbname.nil? || dbname.to_s.empty?
|
|
29
|
+
|
|
30
|
+
url_for_database_name(dh, dbname.to_s)
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def url_for_database_name(databases_hash, database_name)
|
|
34
|
+
conn = Connection.resolve_connection(databases_hash)
|
|
35
|
+
Connection.build_database_url(database_name.to_s, conn)
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
# ENV overrides so +bin/rails db:prepare+ runs once for multi-DB apps: each connection keeps its own
|
|
39
|
+
# +migrations_paths+ (e.g. db/cache_migrate) instead of pointing DATABASE_URL at every template in turn.
|
|
40
|
+
def template_prepare_env(databases_hash)
|
|
41
|
+
dh = databases_hash.is_a?(Hash) ? databases_hash : {}
|
|
42
|
+
pt = (dh["template_db"] || dh[:template_db]).to_s
|
|
43
|
+
raise Polyrun::Error, "template_prepare_env: set databases.template_db" if pt.empty?
|
|
44
|
+
|
|
45
|
+
out = {}
|
|
46
|
+
out["DATABASE_URL"] = url_for_database_name(dh, pt)
|
|
47
|
+
template_prepare_env_fill_connections!(dh, pt, out)
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
def template_prepare_env_shell_log(databases_hash)
|
|
51
|
+
template_prepare_env(databases_hash).sort.map { |k, v| "#{k}=#{Shellwords.escape(v.to_s)}" }.join(" ")
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
def unique_template_migrate_urls(databases_hash)
|
|
55
|
+
dh = databases_hash.is_a?(Hash) ? databases_hash : {}
|
|
56
|
+
seen = {}
|
|
57
|
+
out = []
|
|
58
|
+
pt = (dh["template_db"] || dh[:template_db]).to_s
|
|
59
|
+
unless pt.empty?
|
|
60
|
+
out << url_for_database_name(dh, pt)
|
|
61
|
+
seen[pt] = true
|
|
62
|
+
end
|
|
63
|
+
Array(dh["connections"] || dh[:connections]).each do |c|
|
|
64
|
+
t = (c["template_db"] || c[:template_db]).to_s
|
|
65
|
+
t = pt if t.empty?
|
|
66
|
+
next if t.empty?
|
|
67
|
+
next if seen[t]
|
|
68
|
+
|
|
69
|
+
out << url_for_database_name(dh, t)
|
|
70
|
+
seen[t] = true
|
|
71
|
+
end
|
|
72
|
+
out
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
def shard_database_name(databases_hash, shard_index:, connection: nil)
|
|
76
|
+
extract_db_name(url_for_shard(databases_hash, shard_index: shard_index, connection: connection))
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
def template_database_name_for(databases_hash, connection: nil)
|
|
80
|
+
dh = databases_hash.is_a?(Hash) ? databases_hash : {}
|
|
81
|
+
if connection.nil?
|
|
82
|
+
return (dh["template_db"] || dh[:template_db]).to_s
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
c = Array(dh["connections"] || dh[:connections]).find { |x| (x["name"] || x[:name]).to_s == connection.to_s }
|
|
86
|
+
return "" unless c
|
|
87
|
+
|
|
88
|
+
(c["template_db"] || c[:template_db] || dh["template_db"] || dh[:template_db]).to_s
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
def shard_database_plan(databases_hash, shard_index:)
|
|
92
|
+
dh = databases_hash.is_a?(Hash) ? databases_hash : {}
|
|
93
|
+
rows = []
|
|
94
|
+
primary_shard = shard_database_name(dh, shard_index: shard_index, connection: nil)
|
|
95
|
+
primary_tmpl = template_database_name_for(dh, connection: nil)
|
|
96
|
+
if !primary_shard.empty? && !primary_tmpl.empty?
|
|
97
|
+
rows << {new_db: primary_shard, template_db: primary_tmpl}
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
Array(dh["connections"] || dh[:connections]).each do |c|
|
|
101
|
+
nm = (c["name"] || c[:name]).to_s
|
|
102
|
+
next if nm.empty?
|
|
103
|
+
|
|
104
|
+
sname = shard_database_name(dh, shard_index: shard_index, connection: nm)
|
|
105
|
+
tname = template_database_name_for(dh, connection: nm)
|
|
106
|
+
rows << {new_db: sname, template_db: tname} if !sname.empty? && !tname.empty?
|
|
107
|
+
end
|
|
108
|
+
rows
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
def url_for_shard(databases_hash, shard_index:, connection: nil)
|
|
112
|
+
dh = databases_hash.is_a?(Hash) ? databases_hash : {}
|
|
113
|
+
conn = Connection.resolve_connection(dh)
|
|
114
|
+
pattern =
|
|
115
|
+
if connection
|
|
116
|
+
conns = dh["connections"] || dh[:connections] || []
|
|
117
|
+
c = Array(conns).find { |x| (x["name"] || x[:name]).to_s == connection.to_s }
|
|
118
|
+
(c && (c["shard_db_pattern"] || c[:shard_db_pattern])) || dh["shard_db_pattern"]
|
|
119
|
+
else
|
|
120
|
+
dh["shard_db_pattern"] || dh[:shard_db_pattern]
|
|
121
|
+
end
|
|
122
|
+
pattern ||= "app_test_%{shard}"
|
|
123
|
+
|
|
124
|
+
dbname = pattern.to_s.gsub("%{shard}", Integer(shard_index).to_s).gsub("%<shard>d", format("%d", Integer(shard_index)))
|
|
125
|
+
Connection.build_database_url(dbname, conn)
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
def env_exports_for_databases(databases_hash, shard_index:)
|
|
129
|
+
dh = databases_hash.is_a?(Hash) ? databases_hash : {}
|
|
130
|
+
out = {}
|
|
131
|
+
primary_url = url_for_shard(dh, shard_index: shard_index)
|
|
132
|
+
out["DATABASE_URL"] = primary_url
|
|
133
|
+
out["TEST_DB_NAME"] = extract_db_name(primary_url)
|
|
134
|
+
|
|
135
|
+
conns = dh["connections"] || dh[:connections] || []
|
|
136
|
+
Array(conns).each do |c|
|
|
137
|
+
name = (c["name"] || c[:name]).to_s
|
|
138
|
+
next if name.empty?
|
|
139
|
+
|
|
140
|
+
u = url_for_shard(dh, shard_index: shard_index, connection: name)
|
|
141
|
+
key = (c["env_key"] || c[:env_key]).to_s.strip
|
|
142
|
+
key = "DATABASE_URL_#{name.upcase.tr("-", "_")}" if key.empty?
|
|
143
|
+
out[key] = u
|
|
144
|
+
end
|
|
145
|
+
out
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
def extract_db_name(url)
|
|
149
|
+
s = url.to_s
|
|
150
|
+
return s.sub(/\Asqlite3:/i, "") if s.match?(/\Asqlite3:/i)
|
|
151
|
+
|
|
152
|
+
URI.parse(s).path.delete_prefix("/").split("?", 2).first
|
|
153
|
+
rescue URI::InvalidURIError
|
|
154
|
+
nil
|
|
155
|
+
end
|
|
156
|
+
end
|
|
157
|
+
end
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
require_relative "url_builder/template_prepare"
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
module Polyrun
|
|
2
|
+
# Opt-in tracing: set +DEBUG=1+ or +POLYRUN_DEBUG=1+ (or +true+).
|
|
3
|
+
# Logs to stderr with wall-clock timestamps; use +.time+ for monotonic durations.
|
|
4
|
+
module Debug
|
|
5
|
+
module_function
|
|
6
|
+
|
|
7
|
+
def enabled?
|
|
8
|
+
truthy?(ENV["DEBUG"]) || truthy?(ENV["POLYRUN_DEBUG"])
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def log(message)
|
|
12
|
+
return unless enabled?
|
|
13
|
+
|
|
14
|
+
Polyrun::Log.warn "[polyrun debug #{wall_clock}] #{message}"
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def log_kv(pairs)
|
|
18
|
+
return unless enabled?
|
|
19
|
+
|
|
20
|
+
log(pairs.map { |k, v| "#{k}=#{v.inspect}" }.join(" "))
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
# Same as +log+, but tags lines from parallel RSpec workers so they are not confused with the parent +polyrun+ process (stderr interleaves arbitrarily).
|
|
24
|
+
def log_worker(message)
|
|
25
|
+
return unless enabled?
|
|
26
|
+
|
|
27
|
+
if parallel_worker_process?
|
|
28
|
+
Polyrun::Log.warn "[polyrun debug #{wall_clock}] [worker pid=#{$$} shard=#{ENV.fetch("POLYRUN_SHARD_INDEX", "?")}] #{message}"
|
|
29
|
+
else
|
|
30
|
+
Polyrun::Log.warn "[polyrun debug #{wall_clock}] #{message}"
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def log_worker_kv(pairs)
|
|
35
|
+
return unless enabled?
|
|
36
|
+
|
|
37
|
+
if parallel_worker_process?
|
|
38
|
+
log_kv({role: "worker", pid: $$, shard: ENV["POLYRUN_SHARD_INDEX"]}.merge(pairs))
|
|
39
|
+
else
|
|
40
|
+
log_kv(pairs)
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
def parallel_worker_process?
|
|
45
|
+
ENV["POLYRUN_SHARD_TOTAL"].to_i > 1
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
# Yields and logs monotonic duration; re-raises after logging failures.
|
|
49
|
+
def time(label)
|
|
50
|
+
t0 = nil
|
|
51
|
+
return yield unless enabled?
|
|
52
|
+
|
|
53
|
+
t0 = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
|
54
|
+
log("#{label} … start")
|
|
55
|
+
result = yield
|
|
56
|
+
elapsed = Process.clock_gettime(Process::CLOCK_MONOTONIC) - t0
|
|
57
|
+
elapsed_s = format("%0.3f", elapsed)
|
|
58
|
+
log("#{label} … done in #{elapsed_s}s")
|
|
59
|
+
result
|
|
60
|
+
rescue => e
|
|
61
|
+
if t0
|
|
62
|
+
elapsed = Process.clock_gettime(Process::CLOCK_MONOTONIC) - t0
|
|
63
|
+
elapsed_s = format("%0.3f", elapsed)
|
|
64
|
+
log("#{label} … failed after #{elapsed_s}s: #{e.class}: #{e.message}")
|
|
65
|
+
end
|
|
66
|
+
raise
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
def wall_clock
|
|
70
|
+
Time.now.getlocal.strftime("%H:%M:%S.%6N")
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
def truthy?(value)
|
|
74
|
+
return false if value.nil?
|
|
75
|
+
|
|
76
|
+
v = value.to_s.strip.downcase
|
|
77
|
+
%w[1 true yes on].include?(v)
|
|
78
|
+
end
|
|
79
|
+
private_class_method :truthy?
|
|
80
|
+
end
|
|
81
|
+
end
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
module Polyrun
|
|
2
|
+
module Env
|
|
3
|
+
# CI-native shard index/total (spec2 §6.4) without extra gems.
|
|
4
|
+
module Ci
|
|
5
|
+
module_function
|
|
6
|
+
|
|
7
|
+
# Returns Integer shard index or nil if not inferable from CI env.
|
|
8
|
+
def detect_shard_index
|
|
9
|
+
return Integer(ENV["POLYRUN_SHARD_INDEX"]) if present?(ENV["POLYRUN_SHARD_INDEX"])
|
|
10
|
+
|
|
11
|
+
ci = truthy?(ENV["CI"])
|
|
12
|
+
if present?(ENV["CI_NODE_INDEX"]) && ci
|
|
13
|
+
return Integer(ENV["CI_NODE_INDEX"])
|
|
14
|
+
end
|
|
15
|
+
if present?(ENV["BUILDKITE_PARALLEL_JOB"]) && ci
|
|
16
|
+
return Integer(ENV["BUILDKITE_PARALLEL_JOB"])
|
|
17
|
+
end
|
|
18
|
+
if present?(ENV["CIRCLE_NODE_INDEX"]) && ci
|
|
19
|
+
return Integer(ENV["CIRCLE_NODE_INDEX"])
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
nil
|
|
23
|
+
rescue ArgumentError, TypeError
|
|
24
|
+
nil
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
# Returns Integer shard total or nil.
|
|
28
|
+
def detect_shard_total
|
|
29
|
+
return Integer(ENV["POLYRUN_SHARD_TOTAL"]) if present?(ENV["POLYRUN_SHARD_TOTAL"])
|
|
30
|
+
|
|
31
|
+
ci = truthy?(ENV["CI"])
|
|
32
|
+
if present?(ENV["CI_NODE_TOTAL"]) && ci
|
|
33
|
+
return Integer(ENV["CI_NODE_TOTAL"])
|
|
34
|
+
end
|
|
35
|
+
if present?(ENV["BUILDKITE_PARALLEL_JOB_COUNT"]) && ci
|
|
36
|
+
return Integer(ENV["BUILDKITE_PARALLEL_JOB_COUNT"])
|
|
37
|
+
end
|
|
38
|
+
if present?(ENV["CIRCLE_NODE_TOTAL"]) && ci
|
|
39
|
+
return Integer(ENV["CIRCLE_NODE_TOTAL"])
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
nil
|
|
43
|
+
rescue ArgumentError, TypeError
|
|
44
|
+
nil
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def polyrun_env
|
|
48
|
+
e = ENV["POLYRUN_ENV"]&.strip
|
|
49
|
+
return e if present?(e)
|
|
50
|
+
|
|
51
|
+
return "ci" if truthy?(ENV["CI"])
|
|
52
|
+
|
|
53
|
+
"local"
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
def present?(s)
|
|
57
|
+
!s.nil? && !s.to_s.empty?
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
def truthy?(s)
|
|
61
|
+
%w[1 true yes].include?(s.to_s.downcase)
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
end
|
data/lib/polyrun/log.rb
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
# rubocop:disable ThreadSafety/ClassAndModuleAttributes, ThreadSafety/ClassInstanceVariable -- process-global IO routing for CLI
|
|
2
|
+
module Polyrun
|
|
3
|
+
# Swappable sinks for CLI and library output. Defaults match +Kernel#warn+ (stderr) and +puts+/+print+ (stdout).
|
|
4
|
+
#
|
|
5
|
+
# Assign an IO, +StringIO+, Ruby +Logger+, or any object responding to +puts+, +write+, or +warn+ (Logger).
|
|
6
|
+
#
|
|
7
|
+
# Polyrun::Log.stderr = Logger.new($stderr)
|
|
8
|
+
# Polyrun::Log.stdout = StringIO.new
|
|
9
|
+
module Log
|
|
10
|
+
class << self
|
|
11
|
+
attr_writer :stderr
|
|
12
|
+
attr_writer :stdout
|
|
13
|
+
|
|
14
|
+
def stderr
|
|
15
|
+
@stderr || $stderr
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def stdout
|
|
19
|
+
@stdout || $stdout
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def warn(msg = nil)
|
|
23
|
+
return if msg.nil?
|
|
24
|
+
|
|
25
|
+
emit_line(stderr, msg)
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
def puts(msg = "")
|
|
29
|
+
if msg.nil?
|
|
30
|
+
stdout.write("\n")
|
|
31
|
+
else
|
|
32
|
+
emit_line(stdout, msg)
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
def print(msg = "")
|
|
37
|
+
io = stdout
|
|
38
|
+
if io.respond_to?(:write)
|
|
39
|
+
io.write(msg.to_s)
|
|
40
|
+
elsif io.respond_to?(:print)
|
|
41
|
+
io.print(msg.to_s)
|
|
42
|
+
end
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
# Clears custom sinks so +stderr+ / +stdout+ resolve to the current global +$stderr+ / +$stdout+ (e.g. after tests).
|
|
46
|
+
def reset_io!
|
|
47
|
+
@stderr = nil
|
|
48
|
+
@stdout = nil
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
private
|
|
52
|
+
|
|
53
|
+
def emit_line(io, msg)
|
|
54
|
+
s = msg.to_s
|
|
55
|
+
if logger_like?(io)
|
|
56
|
+
io.warn(s.chomp)
|
|
57
|
+
elsif io.respond_to?(:puts)
|
|
58
|
+
io.puts(s)
|
|
59
|
+
else
|
|
60
|
+
io.write(s.end_with?("\n") ? s : "#{s}\n")
|
|
61
|
+
end
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
def logger_like?(io)
|
|
65
|
+
io.respond_to?(:warn) && !io.is_a?(IO)
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
end
|
|
70
|
+
# rubocop:enable ThreadSafety/ClassAndModuleAttributes, ThreadSafety/ClassInstanceVariable
|