peak_flow_utils 0.1.12 → 0.1.16
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/app/controllers/peak_flow_utils/pings/postgres_connections_controller.rb +1 -0
- data/app/controllers/peak_flow_utils/pings/sidekiq_controller.rb +1 -0
- data/app/services/peak_flow_utils/database_initializer_service.rb +3 -3
- data/app/services/peak_flow_utils/deep_merger.rb +62 -0
- data/app/services/peak_flow_utils/handlers_finder_service.rb +2 -2
- data/app/services/peak_flow_utils/translations_parser_service.rb +3 -3
- data/lib/peak_flow_utils/inherited_local_var.rb +84 -0
- data/lib/peak_flow_utils/migrations/{20150907090900_create_handlers.rb → 20150907070908_create_handlers.rb} +0 -0
- data/lib/peak_flow_utils/notifier.rb +73 -26
- data/lib/peak_flow_utils/notifier_rack.rb +6 -6
- data/lib/peak_flow_utils/thread_callbacks_patch.rb +23 -0
- data/lib/peak_flow_utils/version.rb +1 -1
- data/lib/peak_flow_utils.rb +6 -3
- metadata +37 -9
- data/bin/peak_flow_rspec_files +0 -21
- data/lib/peak_flow_utils/rspec_helper.rb +0 -195
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: c01a814a1aad134f668f7144f6addea59f8b8afdc75a2ae8ff2280dbe3a9867c
|
4
|
+
data.tar.gz: b6746741a1ff204c7b2a31319409d84a4529c14a55f8e9cfbf7b7fcad37eacea
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5a82e952b87453cfde4fbcb42fd43a0f2bca93f43dc2b3dbdca3109c6d25a21dd587745c602680d89c446cda1c1be251167b46248b98bc1e5354f408ae9f2758
|
7
|
+
data.tar.gz: d2020073122505feaa6cb3923f79a76ff6f8608ac45af16625cae83f79c6232a9f4ac677e834faa015bfd1d9e287946b4cc0662557507dcfab89ea7c3faf665e
|
@@ -3,6 +3,7 @@ class PeakFlowUtils::Pings::PostgresConnectionsController < PeakFlowUtils::Appli
|
|
3
3
|
postgres_connections_count = ActiveRecord::Base.connection.execute("SELECT SUM(numbackends) AS connections_count FROM pg_stat_database").to_a.first
|
4
4
|
|
5
5
|
render json: {
|
6
|
+
check_json_status: "OK",
|
6
7
|
postgres_connections_count: postgres_connections_count.fetch("connections_count")
|
7
8
|
}
|
8
9
|
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
class PeakFlowUtils::DatabaseInitializerService < PeakFlowUtils::ApplicationService
|
2
|
-
def
|
3
|
-
path = File.realpath("#{
|
2
|
+
def perform
|
3
|
+
path = File.realpath("#{__dir__}/../../../lib/peak_flow_utils/migrations")
|
4
4
|
create_schema_table unless schema_table_exists?
|
5
5
|
|
6
6
|
Dir["#{path}/[0-9]*_*.rb"].sort.map do |filename|
|
@@ -22,7 +22,7 @@ class PeakFlowUtils::DatabaseInitializerService < PeakFlowUtils::ApplicationServ
|
|
22
22
|
private
|
23
23
|
|
24
24
|
def create_schema_table
|
25
|
-
PeakFlowUtils::ApplicationRecord.connection.execute("CREATE TABLE schema_migrations (version
|
25
|
+
PeakFlowUtils::ApplicationRecord.connection.execute("CREATE TABLE schema_migrations (version VARCHAR)")
|
26
26
|
end
|
27
27
|
|
28
28
|
def register_migration_migrated(version)
|
@@ -0,0 +1,62 @@
|
|
1
|
+
class PeakFlowUtils::DeepMerger < PeakFlowUtils::ApplicationService
|
2
|
+
attr_reader :hashes, :object_mappings
|
3
|
+
|
4
|
+
def initialize(hashes:, object_mappings: {})
|
5
|
+
@hashes = hashes
|
6
|
+
@object_mappings = object_mappings
|
7
|
+
end
|
8
|
+
|
9
|
+
def perform
|
10
|
+
merged = {}
|
11
|
+
|
12
|
+
hashes.each do |hash|
|
13
|
+
merge_hash(hash, merged)
|
14
|
+
end
|
15
|
+
|
16
|
+
succeed! merged
|
17
|
+
end
|
18
|
+
|
19
|
+
def clone_something(object)
|
20
|
+
if object.is_a?(Hash)
|
21
|
+
new_hash = {}
|
22
|
+
merge_hash(object, new_hash)
|
23
|
+
new_hash
|
24
|
+
elsif object.is_a?(Array)
|
25
|
+
new_array = []
|
26
|
+
merge_array(object, new_array)
|
27
|
+
new_array
|
28
|
+
else
|
29
|
+
object
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
def merge_something(object, merged)
|
34
|
+
if object.is_a?(Array)
|
35
|
+
merge_array(object, merged)
|
36
|
+
elsif object.is_a?(Hash)
|
37
|
+
merge_hash(object, merged)
|
38
|
+
else
|
39
|
+
raise "Unknown object: #{object.class.name}"
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
def merge_array(array, merged)
|
44
|
+
array.each do |value|
|
45
|
+
merged << clone_something(value)
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
def merge_hash(hash, merged)
|
50
|
+
hash.each do |key, value|
|
51
|
+
if value.is_a?(Array)
|
52
|
+
merged[key] = []
|
53
|
+
merge_array(value, merged[key])
|
54
|
+
elsif value.is_a?(Hash)
|
55
|
+
merged[key] ||= {}
|
56
|
+
merge_hash(value, merged[key])
|
57
|
+
else
|
58
|
+
merged[key] = clone_something(value)
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
@@ -1,8 +1,8 @@
|
|
1
1
|
class PeakFlowUtils::HandlersFinderService < PeakFlowUtils::ApplicationService
|
2
|
-
def
|
2
|
+
def perform
|
3
3
|
handlers = []
|
4
4
|
|
5
|
-
Dir.foreach("#{
|
5
|
+
Dir.foreach("#{__dir__}/../../handlers/peak_flow_utils") do |file|
|
6
6
|
match = file.match(/\A(.+)_handler\.rb\Z/)
|
7
7
|
next unless match
|
8
8
|
|
@@ -1,7 +1,7 @@
|
|
1
1
|
class PeakFlowUtils::TranslationsParserService < PeakFlowUtils::ApplicationService
|
2
2
|
attr_reader :db
|
3
3
|
|
4
|
-
def
|
4
|
+
def perform
|
5
5
|
PeakFlowUtils::DatabaseInitializerService.execute!
|
6
6
|
|
7
7
|
cache_translations_in_dir(Rails.root.join("config/locales"))
|
@@ -103,11 +103,11 @@ private
|
|
103
103
|
puts message.to_s if @debug # rubocop:disable Rails/Output
|
104
104
|
end
|
105
105
|
|
106
|
-
def
|
106
|
+
def perform_migrations
|
107
107
|
require "baza_migrations"
|
108
108
|
|
109
109
|
executor = BazaMigrations::MigrationsExecutor.new(db: @db)
|
110
|
-
executor.add_dir "#{
|
110
|
+
executor.add_dir "#{__dir__}/../../db/baza_translations_migrations"
|
111
111
|
executor.execute_migrations
|
112
112
|
end
|
113
113
|
|
@@ -0,0 +1,84 @@
|
|
1
|
+
require "monitor"
|
2
|
+
require_relative "thread_callbacks_patch"
|
3
|
+
|
4
|
+
Thread.on_initialize do |parent:, thread:|
|
5
|
+
thread.instance_variable_set(:@_inherited_local_vars, parent.instance_variable_get(:@_inherited_local_vars))
|
6
|
+
end
|
7
|
+
|
8
|
+
Thread.class_eval do
|
9
|
+
def self.inherited_local_vars_mutex
|
10
|
+
@inherited_local_vars_mutex ||= Mutex.new
|
11
|
+
end
|
12
|
+
|
13
|
+
def self._inherited_local_vars
|
14
|
+
Thread.current.instance_variable_set(:@_inherited_local_vars, {}) unless Thread.current.instance_variable_get(:@_inherited_local_vars)
|
15
|
+
Thread.current.instance_variable_get(:@_inherited_local_vars)
|
16
|
+
end
|
17
|
+
|
18
|
+
def self.inherited_local_vars_reset
|
19
|
+
ObjectSpace.each_object(Thread) do |thread|
|
20
|
+
inherited_local_vars_mutex.synchronize do
|
21
|
+
thread.instance_variable_set(:@_inherited_local_vars, nil)
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
def self.inherited_local_vars_delete(key)
|
27
|
+
inherited_local_vars_mutex.synchronize do
|
28
|
+
raise "Key didn't exist: #{key}" unless _inherited_local_vars.key?(key)
|
29
|
+
|
30
|
+
_inherited_local_vars.delete(key)
|
31
|
+
end
|
32
|
+
rescue ThreadError # This can happen when process is closing down
|
33
|
+
_inherited_local_vars.delete(key)
|
34
|
+
end
|
35
|
+
|
36
|
+
def self.inherited_local_vars_fetch(key)
|
37
|
+
inherited_local_vars_mutex.synchronize do
|
38
|
+
return _inherited_local_vars.fetch(key)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def self.inherited_local_vars_get(key)
|
43
|
+
inherited_local_vars_mutex.synchronize do
|
44
|
+
return _inherited_local_vars[key]
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
def self.inherited_local_vars_set(values)
|
49
|
+
inherited_local_vars_mutex.synchronize do
|
50
|
+
current_vars = _inherited_local_vars
|
51
|
+
new_vars = PeakFlowUtils::DeepMerger.execute!(hashes: [current_vars, values])
|
52
|
+
Thread.current.instance_variable_set(:@_inherited_local_vars, new_vars)
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
class PeakFlowUtils::InheritedLocalVar
|
58
|
+
attr_reader :identifier
|
59
|
+
|
60
|
+
def self.finalize(inherited_local_var_object_id)
|
61
|
+
Thread.inherited_local_vars_delete("inherited_local_var_#{inherited_local_var_object_id}")
|
62
|
+
rescue Exception => e # rubocop:disable Lint/RescueException
|
63
|
+
puts e.inspect # rubocop:disable Rails/Output
|
64
|
+
puts e.backtrace # rubocop:disable Rails/Output
|
65
|
+
|
66
|
+
raise e
|
67
|
+
end
|
68
|
+
|
69
|
+
def initialize(new_value = nil)
|
70
|
+
ObjectSpace.define_finalizer(self, PeakFlowUtils::InheritedLocalVar.method(:finalize))
|
71
|
+
|
72
|
+
@identifier = "inherited_local_var_#{__id__}"
|
73
|
+
|
74
|
+
Thread.inherited_local_vars_set(identifier => new_value)
|
75
|
+
end
|
76
|
+
|
77
|
+
def value
|
78
|
+
Thread.inherited_local_vars_fetch(identifier)
|
79
|
+
end
|
80
|
+
|
81
|
+
def value=(new_value)
|
82
|
+
Thread.inherited_local_vars_set(identifier => new_value)
|
83
|
+
end
|
84
|
+
end
|
File without changes
|
@@ -2,37 +2,88 @@ class PeakFlowUtils::Notifier
|
|
2
2
|
class FailedToReportError < RuntimeError; end
|
3
3
|
class NotConfiguredError < RuntimeError; end
|
4
4
|
|
5
|
-
attr_reader :auth_token
|
5
|
+
attr_reader :auth_token, :mutex, :parameters
|
6
6
|
|
7
7
|
def self.configure(auth_token:)
|
8
8
|
@current = PeakFlowUtils::Notifier.new(auth_token: auth_token)
|
9
9
|
end
|
10
10
|
|
11
|
-
def self.current
|
12
|
-
raise PeakFlowUtils::Notifier::NotConfiguredError, "Hasn't been configured" if !@current && Rails.env.test?
|
13
|
-
|
11
|
+
def self.current # rubocop:disable Style/TrivialAccessors
|
14
12
|
@current
|
15
13
|
end
|
16
14
|
|
17
|
-
def self.notify(*args)
|
18
|
-
PeakFlowUtils::Notifier.current
|
15
|
+
def self.notify(*args, **opts, &blk)
|
16
|
+
PeakFlowUtils::Notifier.current&.notify(*args, **opts, &blk)
|
17
|
+
end
|
18
|
+
|
19
|
+
def self.reset_parameters
|
20
|
+
::PeakFlowUtils::Notifier.current&.instance_variable_set(:@parameters, ::PeakFlowUtils::InheritedLocalVar.new({}))
|
21
|
+
end
|
22
|
+
|
23
|
+
def self.with_parameters(parameters)
|
24
|
+
return yield unless ::PeakFlowUtils::Notifier.current
|
25
|
+
|
26
|
+
random_id = ::SecureRandom.hex(16)
|
27
|
+
|
28
|
+
::PeakFlowUtils::Notifier.current.mutex.synchronize do
|
29
|
+
raise "'parameters' was nil?" if ::PeakFlowUtils::Notifier.current.parameters.value.nil?
|
30
|
+
|
31
|
+
parameters_with = ::PeakFlowUtils::Notifier.current.parameters.value.clone
|
32
|
+
parameters_with[random_id] = parameters
|
33
|
+
|
34
|
+
::PeakFlowUtils::Notifier.current.parameters.value = parameters_with
|
35
|
+
end
|
36
|
+
|
37
|
+
begin
|
38
|
+
yield
|
39
|
+
ensure
|
40
|
+
::PeakFlowUtils::Notifier.current.mutex.synchronize do
|
41
|
+
parameters_without = ::PeakFlowUtils::Notifier.current.parameters.value.clone
|
42
|
+
parameters_without.delete(random_id)
|
43
|
+
|
44
|
+
::PeakFlowUtils::Notifier.current.parameters.value = parameters_without
|
45
|
+
end
|
46
|
+
end
|
19
47
|
end
|
20
48
|
|
21
49
|
def initialize(auth_token:)
|
22
50
|
@auth_token = auth_token
|
51
|
+
@mutex = ::Mutex.new
|
52
|
+
@parameters = ::PeakFlowUtils::InheritedLocalVar.new({})
|
53
|
+
end
|
54
|
+
|
55
|
+
def current_parameters(parameters: nil)
|
56
|
+
hashes = current_parameters_hashes
|
57
|
+
hashes << parameters if parameters
|
58
|
+
|
59
|
+
::PeakFlowUtils::DeepMerger.execute!(hashes: hashes)
|
60
|
+
end
|
61
|
+
|
62
|
+
def current_parameters_hashes
|
63
|
+
parameters.value.values
|
64
|
+
end
|
65
|
+
|
66
|
+
def error_message_from_response(response)
|
67
|
+
message = "Couldn't report error to Peakflow (code #{response.code})"
|
68
|
+
|
69
|
+
if response["content-type"]&.starts_with?("application/json")
|
70
|
+
response_data = ::JSON.parse(response.body)
|
71
|
+
message << ": #{response_data.fetch("errors").join(". ")}" if response_data["errors"]
|
72
|
+
end
|
73
|
+
|
74
|
+
message
|
23
75
|
end
|
24
76
|
|
25
77
|
def notify(error:, environment: nil, parameters: nil)
|
26
|
-
error_parser = PeakFlowUtils::NotifierErrorParser.new(
|
78
|
+
error_parser = ::PeakFlowUtils::NotifierErrorParser.new(
|
27
79
|
backtrace: error.backtrace,
|
28
80
|
environment: environment,
|
29
81
|
error: error
|
30
82
|
)
|
31
83
|
|
32
|
-
|
84
|
+
merged_parameters = current_parameters(parameters: parameters)
|
33
85
|
|
34
|
-
|
35
|
-
https.use_ssl = true
|
86
|
+
uri = URI("https://www.peakflow.io/errors/reports")
|
36
87
|
|
37
88
|
data = {
|
38
89
|
auth_token: auth_token,
|
@@ -43,25 +94,32 @@ class PeakFlowUtils::Notifier
|
|
43
94
|
file_path: error_parser.file_path,
|
44
95
|
line_number: error_parser.line_number,
|
45
96
|
message: error.message,
|
46
|
-
parameters:
|
97
|
+
parameters: merged_parameters,
|
47
98
|
remote_ip: error_parser.remote_ip,
|
48
99
|
url: error_parser.url,
|
49
100
|
user_agent: error_parser.user_agent
|
50
101
|
}
|
51
102
|
}
|
52
103
|
|
53
|
-
|
104
|
+
send_notify_request(data: data, uri: uri)
|
105
|
+
end
|
106
|
+
|
107
|
+
def send_notify_request(data:, uri:)
|
108
|
+
https = ::Net::HTTP.new(uri.host, uri.port)
|
109
|
+
https.use_ssl = true
|
110
|
+
|
111
|
+
request = ::Net::HTTP::Post.new(uri.path)
|
54
112
|
request["Content-Type"] = "application/json"
|
55
|
-
request.body = JSON.generate(data)
|
113
|
+
request.body = ::JSON.generate(data)
|
56
114
|
|
57
115
|
response = https.request(request)
|
58
116
|
|
59
117
|
raise FailedToReportError, error_message_from_response(response) unless response.code == "200"
|
60
118
|
|
61
|
-
response_data = JSON.parse(response.body)
|
119
|
+
response_data = ::JSON.parse(response.body)
|
62
120
|
|
63
121
|
# Data not always present so dont use fetch
|
64
|
-
PeakFlowUtils::NotifierResponse.new(
|
122
|
+
::PeakFlowUtils::NotifierResponse.new(
|
65
123
|
bug_report_id: response_data["bug_report_id"],
|
66
124
|
bug_report_instance_id: response_data["bug_report_instance_id"],
|
67
125
|
project_id: response_data["project_id"],
|
@@ -69,15 +127,4 @@ class PeakFlowUtils::Notifier
|
|
69
127
|
url: response_data["url"]
|
70
128
|
)
|
71
129
|
end
|
72
|
-
|
73
|
-
def error_message_from_response(response)
|
74
|
-
message = "Couldn't report error to Peakflow (code #{response.code})"
|
75
|
-
|
76
|
-
if response["content-type"]&.starts_with?("application/json")
|
77
|
-
response_data = JSON.parse(response.body)
|
78
|
-
message << ": #{response_data.fetch("errors").join(". ")}" if response_data["errors"]
|
79
|
-
end
|
80
|
-
|
81
|
-
message
|
82
|
-
end
|
83
130
|
end
|
@@ -9,13 +9,13 @@ class PeakFlowUtils::NotifierRack
|
|
9
9
|
rescue Exception => e # rubocop:disable Lint/RescueException
|
10
10
|
controller = env["action_controller.instance"]
|
11
11
|
request = controller&.request
|
12
|
-
parameters = {}.merge(request.GET).merge(request.POST)
|
13
12
|
|
14
|
-
PeakFlowUtils::Notifier.
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
13
|
+
PeakFlowUtils::Notifier.with_parameters(rack: {get: request.GET, post: request.POST}) do
|
14
|
+
PeakFlowUtils::Notifier.notify(
|
15
|
+
environment: env,
|
16
|
+
error: e
|
17
|
+
)
|
18
|
+
end
|
19
19
|
|
20
20
|
raise e
|
21
21
|
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
class Thread
|
2
|
+
alias_method :_initialize, :initialize # rubocop:disable Style/Alias
|
3
|
+
|
4
|
+
def self.on_initialize(&callback)
|
5
|
+
@@on_initialize_count = 0 if @on_initialize_count.nil? # rubocop:disable Style/ClassVars
|
6
|
+
count_to_use = @@on_initialize_count
|
7
|
+
@@on_initialize_count += 1 # rubocop:disable Style/ClassVars
|
8
|
+
|
9
|
+
@@on_initialize_callbacks ||= {} # rubocop:disable Style/ClassVars
|
10
|
+
@@on_initialize_callbacks[count_to_use] = callback
|
11
|
+
|
12
|
+
count_to_use
|
13
|
+
end
|
14
|
+
|
15
|
+
def initialize(*args, &block)
|
16
|
+
@@on_initialize_callbacks ||= {} # rubocop:disable Style/ClassVars
|
17
|
+
@@on_initialize_callbacks.each_value do |callback|
|
18
|
+
callback.call(parent: Thread.current, thread: self)
|
19
|
+
end
|
20
|
+
|
21
|
+
_initialize(*args, &block)
|
22
|
+
end
|
23
|
+
end
|
data/lib/peak_flow_utils.rb
CHANGED
@@ -4,16 +4,19 @@ require "array_enumerator"
|
|
4
4
|
require "service_pattern"
|
5
5
|
|
6
6
|
module PeakFlowUtils
|
7
|
-
path = "#{
|
8
|
-
models_path = "#{
|
7
|
+
path = "#{__dir__}/peak_flow_utils"
|
8
|
+
models_path = "#{__dir__}/peak_flow_utils/models"
|
9
|
+
services_path = File.realpath("#{__dir__}/../app/services/peak_flow_utils")
|
9
10
|
|
11
|
+
autoload :ApplicationService, "#{services_path}/application_service"
|
12
|
+
autoload :DeepMerger, "#{services_path}/deep_merger"
|
13
|
+
autoload :InheritedLocalVar, "#{path}/inherited_local_var"
|
10
14
|
autoload :Notifier, "#{path}/notifier"
|
11
15
|
autoload :NotifierErrorParser, "#{path}/notifier_error_parser"
|
12
16
|
autoload :NotifierRack, "#{path}/notifier_rack"
|
13
17
|
autoload :NotifierRails, "#{path}/notifier_rails"
|
14
18
|
autoload :NotifierResponse, "#{path}/notifier_response"
|
15
19
|
autoload :NotifierSidekiq, "#{path}/notifier_sidekiq"
|
16
|
-
autoload :RspecHelper, "#{path}/rspec_helper"
|
17
20
|
autoload :HandlerHelper, "#{path}/handler_helper"
|
18
21
|
|
19
22
|
autoload :ApplicationRecord, "#{models_path}/application_record"
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: peak_flow_utils
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.16
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- kaspernj
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2022-02-22 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: rails
|
@@ -58,9 +58,37 @@ dependencies:
|
|
58
58
|
requirements:
|
59
59
|
- - ">="
|
60
60
|
- !ruby/object:Gem::Version
|
61
|
-
version:
|
61
|
+
version: 1.0.5
|
62
62
|
type: :runtime
|
63
63
|
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - ">="
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: 1.0.5
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: appraisal
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - ">="
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '0'
|
76
|
+
type: :development
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - ">="
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '0'
|
83
|
+
- !ruby/object:Gem::Dependency
|
84
|
+
name: pry-rails
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - ">="
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: '0'
|
90
|
+
type: :development
|
91
|
+
prerelease: false
|
64
92
|
version_requirements: !ruby/object:Gem::Requirement
|
65
93
|
requirements:
|
66
94
|
- - ">="
|
@@ -111,8 +139,7 @@ dependencies:
|
|
111
139
|
description: Utilities to be used with PeakFlow.
|
112
140
|
email:
|
113
141
|
- kaspernj@gmail.com
|
114
|
-
executables:
|
115
|
-
- peak_flow_rspec_files
|
142
|
+
executables: []
|
116
143
|
extensions: []
|
117
144
|
extra_rdoc_files: []
|
118
145
|
files:
|
@@ -135,6 +162,7 @@ files:
|
|
135
162
|
- app/services/peak_flow_utils/attribute_service.rb
|
136
163
|
- app/services/peak_flow_utils/configuration_service.rb
|
137
164
|
- app/services/peak_flow_utils/database_initializer_service.rb
|
165
|
+
- app/services/peak_flow_utils/deep_merger.rb
|
138
166
|
- app/services/peak_flow_utils/erb_inspector.rb
|
139
167
|
- app/services/peak_flow_utils/erb_inspector/file_inspector.rb
|
140
168
|
- app/services/peak_flow_utils/erb_inspector/translation_inspector.rb
|
@@ -143,14 +171,14 @@ files:
|
|
143
171
|
- app/services/peak_flow_utils/model_inspector.rb
|
144
172
|
- app/services/peak_flow_utils/translation_service.rb
|
145
173
|
- app/services/peak_flow_utils/translations_parser_service.rb
|
146
|
-
- bin/peak_flow_rspec_files
|
147
174
|
- config/routes.rb
|
148
175
|
- lib/peak_flow_utils.rb
|
149
176
|
- lib/peak_flow_utils/engine.rb
|
150
177
|
- lib/peak_flow_utils/handler_helper.rb
|
178
|
+
- lib/peak_flow_utils/inherited_local_var.rb
|
151
179
|
- lib/peak_flow_utils/migrations/20150902155200_create_translation_keys.rb
|
180
|
+
- lib/peak_flow_utils/migrations/20150907070908_create_handlers.rb
|
152
181
|
- lib/peak_flow_utils/migrations/20150907070909_create_groups.rb
|
153
|
-
- lib/peak_flow_utils/migrations/20150907090900_create_handlers.rb
|
154
182
|
- lib/peak_flow_utils/migrations/20150908085500_create_translation_values.rb
|
155
183
|
- lib/peak_flow_utils/migrations/20150908090800_create_handler_texts.rb
|
156
184
|
- lib/peak_flow_utils/migrations/20160411190500_create_scanned_files.rb
|
@@ -167,7 +195,7 @@ files:
|
|
167
195
|
- lib/peak_flow_utils/notifier_rails.rb
|
168
196
|
- lib/peak_flow_utils/notifier_response.rb
|
169
197
|
- lib/peak_flow_utils/notifier_sidekiq.rb
|
170
|
-
- lib/peak_flow_utils/
|
198
|
+
- lib/peak_flow_utils/thread_callbacks_patch.rb
|
171
199
|
- lib/peak_flow_utils/version.rb
|
172
200
|
- lib/tasks/peak_flow_utils_tasks.rake
|
173
201
|
homepage: https://github.com/kaspernj/peak_flow_utils
|
@@ -189,7 +217,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
189
217
|
- !ruby/object:Gem::Version
|
190
218
|
version: '0'
|
191
219
|
requirements: []
|
192
|
-
rubygems_version: 3.
|
220
|
+
rubygems_version: 3.2.32
|
193
221
|
signing_key:
|
194
222
|
specification_version: 4
|
195
223
|
summary: Utilities to be used with PeakFlow.
|
data/bin/peak_flow_rspec_files
DELETED
@@ -1,21 +0,0 @@
|
|
1
|
-
#!/usr/bin/env ruby
|
2
|
-
|
3
|
-
# This task detects and prints out the RSpec files for the current build group
|
4
|
-
|
5
|
-
require "#{File.dirname(__FILE__)}/../lib/peak_flow_utils"
|
6
|
-
|
7
|
-
args = {}
|
8
|
-
ARGV.each do |arg|
|
9
|
-
if (match = arg.match(/\A--(.+?)=(.+)\Z/))
|
10
|
-
args[match[1]] = match[2]
|
11
|
-
end
|
12
|
-
end
|
13
|
-
|
14
|
-
rspec_helper = PeakFlowUtils::RspecHelper.new(
|
15
|
-
groups: args.fetch("groups").to_i,
|
16
|
-
group_number: args.fetch("group-number").to_i,
|
17
|
-
only_types: args["only-types"]&.split(","),
|
18
|
-
tags: args["tags"]&.split(",")
|
19
|
-
)
|
20
|
-
|
21
|
-
print rspec_helper.group_files.map { |group_file| group_file.fetch(:path) }.join(" ")
|
@@ -1,195 +0,0 @@
|
|
1
|
-
class PeakFlowUtils::RspecHelper
|
2
|
-
attr_reader :only_types, :tags
|
3
|
-
|
4
|
-
def initialize(groups:, group_number:, only_types: nil, tags: nil)
|
5
|
-
@groups = groups
|
6
|
-
@group_number = group_number
|
7
|
-
@example_data_exists = File.exist?("spec/examples.txt")
|
8
|
-
@only_types = only_types
|
9
|
-
@tags = tags
|
10
|
-
end
|
11
|
-
|
12
|
-
def example_data_exists?
|
13
|
-
@example_data_exists
|
14
|
-
end
|
15
|
-
|
16
|
-
def example_data
|
17
|
-
@example_data ||= begin
|
18
|
-
raw_data = File.read("spec/examples.txt")
|
19
|
-
|
20
|
-
result = []
|
21
|
-
raw_data.scan(/^\.\/(.+)\[(.+?)\]\s+\|\s+(.+?)\s+\|\s+((.+?) seconds|)\s+\|$/) do |match|
|
22
|
-
file_path = match[0]
|
23
|
-
spec_result = match[1]
|
24
|
-
seconds = match[4]&.to_f
|
25
|
-
|
26
|
-
spec_data = {
|
27
|
-
file_path: file_path,
|
28
|
-
spec_result: spec_result,
|
29
|
-
seconds: seconds
|
30
|
-
}
|
31
|
-
|
32
|
-
result << spec_data
|
33
|
-
end
|
34
|
-
|
35
|
-
result
|
36
|
-
end
|
37
|
-
end
|
38
|
-
|
39
|
-
def example_files
|
40
|
-
@example_files ||= begin
|
41
|
-
files = {}
|
42
|
-
example_data.each do |spec_data|
|
43
|
-
file_path = spec_data.fetch(:file_path)
|
44
|
-
seconds = spec_data.fetch(:seconds)
|
45
|
-
|
46
|
-
files[file_path] ||= {examples: 0, seconds: 0.0}
|
47
|
-
files[file_path][:examples] += 1
|
48
|
-
files[file_path][:seconds] += seconds if seconds
|
49
|
-
end
|
50
|
-
|
51
|
-
files
|
52
|
-
end
|
53
|
-
end
|
54
|
-
|
55
|
-
def example_file(path)
|
56
|
-
example_files[path]
|
57
|
-
end
|
58
|
-
|
59
|
-
def group_files
|
60
|
-
return @group_files if @group_files
|
61
|
-
|
62
|
-
sorted_files.each do |file|
|
63
|
-
file_path = file.fetch(:path)
|
64
|
-
file_data = example_file(file_path) if example_data_exists?
|
65
|
-
|
66
|
-
if file_data
|
67
|
-
examples = file_data.fetch(:examples)
|
68
|
-
seconds = file_data.fetch(:seconds)
|
69
|
-
else
|
70
|
-
examples = file.fetch(:examples)
|
71
|
-
end
|
72
|
-
|
73
|
-
group = group_with_least
|
74
|
-
group[:examples] += examples
|
75
|
-
group[:files] << file
|
76
|
-
group[:seconds] += seconds if seconds
|
77
|
-
end
|
78
|
-
|
79
|
-
@group_files = group_orders[@group_number - 1].fetch(:files)
|
80
|
-
end
|
81
|
-
|
82
|
-
def group_orders
|
83
|
-
@group_orders ||= begin
|
84
|
-
group_orders = []
|
85
|
-
@groups.times do
|
86
|
-
group_orders << {
|
87
|
-
examples: 0,
|
88
|
-
files: [],
|
89
|
-
seconds: 0.0
|
90
|
-
}
|
91
|
-
end
|
92
|
-
group_orders
|
93
|
-
end
|
94
|
-
end
|
95
|
-
|
96
|
-
def group_with_least
|
97
|
-
group_orders.min do |group1, group2|
|
98
|
-
if example_data_exists? && group1.fetch(:seconds) != 0.0 && group2.fetch(:seconds) != 0.0
|
99
|
-
group1.fetch(:seconds) <=> group2.fetch(:seconds)
|
100
|
-
else
|
101
|
-
group1.fetch(:examples) <=> group2.fetch(:examples)
|
102
|
-
end
|
103
|
-
end
|
104
|
-
end
|
105
|
-
|
106
|
-
# Sort them so that they are sorted by file path in three groups so each group have an equal amount of controller specs, features specs and so on
|
107
|
-
def sorted_files
|
108
|
-
files.values.sort do |file1, file2|
|
109
|
-
file1_path = file1.fetch(:path)
|
110
|
-
file2_path = file2.fetch(:path)
|
111
|
-
|
112
|
-
file1_data = example_file(file1_path) if example_data_exists?
|
113
|
-
file2_data = example_file(file2_path) if example_data_exists?
|
114
|
-
|
115
|
-
if file1_data && file2_data && file1_data.fetch(:seconds) != 0.0 && file2_data.fetch(:seconds) != 0.0
|
116
|
-
value1 = file1_data[:seconds]
|
117
|
-
else
|
118
|
-
value1 = file1.fetch(:points)
|
119
|
-
end
|
120
|
-
|
121
|
-
if file2_data && file1_data && file2_data.fetch(:seconds) != 0.0 && file2_data.fetch(:seconds) != 0.0
|
122
|
-
value2 = file2_data[:seconds]
|
123
|
-
else
|
124
|
-
value2 = file2.fetch(:points)
|
125
|
-
end
|
126
|
-
|
127
|
-
if value1 == value2
|
128
|
-
value2 = file1_path
|
129
|
-
value1 = file2_path
|
130
|
-
end
|
131
|
-
|
132
|
-
value2 <=> value1
|
133
|
-
end
|
134
|
-
end
|
135
|
-
|
136
|
-
private
|
137
|
-
|
138
|
-
def dry_result_command
|
139
|
-
command = "bundle exec rspec --dry-run --format json"
|
140
|
-
|
141
|
-
tags&.each do |tag|
|
142
|
-
command << " --tag #{tag}"
|
143
|
-
end
|
144
|
-
|
145
|
-
command
|
146
|
-
end
|
147
|
-
|
148
|
-
def dry_result
|
149
|
-
require "json"
|
150
|
-
@dry_result ||= ::JSON.parse(`#{dry_result_command}`)
|
151
|
-
end
|
152
|
-
|
153
|
-
def dry_file(path)
|
154
|
-
files.fetch(path)
|
155
|
-
end
|
156
|
-
|
157
|
-
def files
|
158
|
-
@files ||= begin
|
159
|
-
result = {}
|
160
|
-
dry_result.fetch("examples").each do |example|
|
161
|
-
file_path = example.fetch("file_path")
|
162
|
-
file_path = file_path[2, file_path.length]
|
163
|
-
type = type_from_path(file_path)
|
164
|
-
points = points_from_type(type)
|
165
|
-
|
166
|
-
next if ignore_type?(type)
|
167
|
-
|
168
|
-
result[file_path] = {examples: 0, path: file_path, points: 0, type: type} unless result.key?(file_path)
|
169
|
-
result[file_path][:examples] += 1
|
170
|
-
result[file_path][:points] += points
|
171
|
-
end
|
172
|
-
|
173
|
-
result
|
174
|
-
end
|
175
|
-
end
|
176
|
-
|
177
|
-
def ignore_type?(type)
|
178
|
-
only_types && !only_types.include?(type) # rubocop:disable Rails/NegateInclude:, Style/SafeNavigation
|
179
|
-
end
|
180
|
-
|
181
|
-
def type_from_path(file_path)
|
182
|
-
match = file_path.match(/^spec\/(.+?)\//)
|
183
|
-
match[1] if match
|
184
|
-
end
|
185
|
-
|
186
|
-
def points_from_type(type)
|
187
|
-
if type == "feature" || type == "system"
|
188
|
-
10
|
189
|
-
elsif type == "controllers"
|
190
|
-
3
|
191
|
-
else
|
192
|
-
1
|
193
|
-
end
|
194
|
-
end
|
195
|
-
end
|