ossert 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +16 -0
- data/.rspec +2 -0
- data/.rubocop_todo.yml +44 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/.travis.yml +16 -0
- data/Gemfile +8 -0
- data/LICENSE.txt +21 -0
- data/README.md +199 -0
- data/Rakefile +12 -0
- data/bin/console +14 -0
- data/bin/setup +8 -0
- data/config/classifiers.yml +153 -0
- data/config/descriptions.yml +45 -0
- data/config/sidekiq.rb +15 -0
- data/config/stats.yml +198 -0
- data/config/translations.yml +44 -0
- data/db/backups/.keep +0 -0
- data/db/migrate/001_create_projects.rb +22 -0
- data/db/migrate/002_create_exceptions.rb +14 -0
- data/db/migrate/003_add_meta_to_projects.rb +14 -0
- data/db/migrate/004_add_timestamps_to_projects.rb +12 -0
- data/db/migrate/005_create_classifiers.rb +19 -0
- data/lib/ossert/classifiers/decision_tree.rb +112 -0
- data/lib/ossert/classifiers/growing/check.rb +172 -0
- data/lib/ossert/classifiers/growing/classifier.rb +175 -0
- data/lib/ossert/classifiers/growing.rb +163 -0
- data/lib/ossert/classifiers.rb +14 -0
- data/lib/ossert/config.rb +24 -0
- data/lib/ossert/fetch/bestgems.rb +98 -0
- data/lib/ossert/fetch/github.rb +536 -0
- data/lib/ossert/fetch/rubygems.rb +80 -0
- data/lib/ossert/fetch.rb +142 -0
- data/lib/ossert/presenters/project.rb +202 -0
- data/lib/ossert/presenters/project_v2.rb +117 -0
- data/lib/ossert/presenters.rb +8 -0
- data/lib/ossert/project.rb +144 -0
- data/lib/ossert/quarters_store.rb +164 -0
- data/lib/ossert/rake_tasks.rb +6 -0
- data/lib/ossert/reference.rb +87 -0
- data/lib/ossert/repositories.rb +138 -0
- data/lib/ossert/saveable.rb +153 -0
- data/lib/ossert/stats/agility_quarter.rb +62 -0
- data/lib/ossert/stats/agility_total.rb +71 -0
- data/lib/ossert/stats/base.rb +113 -0
- data/lib/ossert/stats/community_quarter.rb +28 -0
- data/lib/ossert/stats/community_total.rb +24 -0
- data/lib/ossert/stats.rb +32 -0
- data/lib/ossert/tasks/database.rake +179 -0
- data/lib/ossert/tasks/ossert.rake +52 -0
- data/lib/ossert/version.rb +4 -0
- data/lib/ossert/workers/fetch.rb +21 -0
- data/lib/ossert/workers/fetch_bestgems_page.rb +32 -0
- data/lib/ossert/workers/refresh_fetch.rb +22 -0
- data/lib/ossert/workers/sync_rubygems.rb +0 -0
- data/lib/ossert/workers.rb +11 -0
- data/lib/ossert.rb +63 -0
- data/ossert.gemspec +47 -0
- metadata +396 -0
@@ -0,0 +1,164 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
module Ossert
|
3
|
+
# Public: Class for data divided by quarters. Each quarter instantiates some statistics class.
|
4
|
+
# Contains methods for quarters calculations, such as grouping, preview and other.
|
5
|
+
class QuartersStore
|
6
|
+
attr_reader :quarters, :data_klass, :start_date, :end_date
|
7
|
+
|
8
|
+
# Public: Instantiate QuarterStore
|
9
|
+
#
|
10
|
+
# data_klass - the Object for quarter data storage, to be compatable it
|
11
|
+
# should implement:
|
12
|
+
# - class method #metrics returns Array of metric names;
|
13
|
+
# - instance method #metric_values returns values of metrics
|
14
|
+
# in same order.
|
15
|
+
#
|
16
|
+
# Returns nothing.
|
17
|
+
def initialize(data_klass_name)
|
18
|
+
@data_klass_name = data_klass_name
|
19
|
+
@quarters = {}
|
20
|
+
@start_date = Time.now
|
21
|
+
@end_date = Time.now
|
22
|
+
end
|
23
|
+
|
24
|
+
def data_klass
|
25
|
+
@data_klass ||= Kernel.const_get(@data_klass_name)
|
26
|
+
end
|
27
|
+
|
28
|
+
# Public: Strict fetch of quarter for given date
|
29
|
+
#
|
30
|
+
# date - the String, Numeric or DateTime to seek begining of quarter for.
|
31
|
+
#
|
32
|
+
# Returns quarter Object or KeyError will be raised.
|
33
|
+
def fetch(date)
|
34
|
+
quarters.fetch date_to_start(date)
|
35
|
+
end
|
36
|
+
|
37
|
+
# Public: Find or create quarter for given date.
|
38
|
+
#
|
39
|
+
# date - the String, Numeric or DateTime to seek begining of quarter for.
|
40
|
+
#
|
41
|
+
# Returns quarter Object.
|
42
|
+
def find_or_create(date)
|
43
|
+
quarters[date_to_start(date)] ||= data_klass.new
|
44
|
+
end
|
45
|
+
alias [] find_or_create
|
46
|
+
|
47
|
+
# Public: Find closest begining of quarter for given date.
|
48
|
+
#
|
49
|
+
# date - the String, Numeric or DateTime to seek begining of quarter for.
|
50
|
+
#
|
51
|
+
# Returns begining of quarter DateTime.
|
52
|
+
def date_to_start(date)
|
53
|
+
if date.is_a? String
|
54
|
+
# Alternative, but more expensive: DateTime.parse(value).beginning_of_quarter.to_i
|
55
|
+
DateTime.new(*date.split('-').map(&:to_i)).beginning_of_quarter.to_i
|
56
|
+
else
|
57
|
+
Time.at(date).to_date.to_time(:utc).beginning_of_quarter.to_i
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
# Public: Prepare quarters to preview.
|
62
|
+
#
|
63
|
+
# Returns sorted Hash of quarter date and its data.
|
64
|
+
def preview
|
65
|
+
quarters.sort.map { |unix_timestamp, quarter| [Time.at(unix_timestamp), quarter] }.to_h
|
66
|
+
end
|
67
|
+
|
68
|
+
# Public: Get quarters metric values aggregated for last year.
|
69
|
+
#
|
70
|
+
# offset - the Numeric (default: 1) in quarters for offset of when "last year" should ends
|
71
|
+
#
|
72
|
+
# Returns Array of quarter metric values aggregated for last year.
|
73
|
+
def last_year_data(offset = 1)
|
74
|
+
last_year_as_hash(offset).values
|
75
|
+
end
|
76
|
+
|
77
|
+
# Public: Get quarters metric values aggregated for last year.
|
78
|
+
#
|
79
|
+
# offset - the Numeric (default: 1) in quarters for offset of when "last year" should ends
|
80
|
+
#
|
81
|
+
# Returns Hash of quarter metrics and its values aggregated for last year.
|
82
|
+
def last_year_as_hash(offset = 1)
|
83
|
+
data_klass.metrics.zip(aggregated_quarter(offset).metric_values).to_h
|
84
|
+
end
|
85
|
+
|
86
|
+
# Public: Generate aggregated quarter object for last year.
|
87
|
+
#
|
88
|
+
# offset - the Numeric (default: 1) in quarters for offset of when "last year" should ends
|
89
|
+
#
|
90
|
+
# Returns quarter Object with attributes aggregated for last year.
|
91
|
+
def aggregated_quarter(offset = 1)
|
92
|
+
last_quarters = quarters.sort.last(4 + offset).take(4)
|
93
|
+
last_quarters.inject(data_klass.new) do |acc, (_, quarter)|
|
94
|
+
acc << quarter
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
# Public: Fill quarter bounds and wholes in periods from first to last quarter.
|
99
|
+
# It will assign @start_date and @end_date of QuarterStore instance.
|
100
|
+
# Should be called after all data is gathered and we ready for data presentation.
|
101
|
+
#
|
102
|
+
# Returns nothing.
|
103
|
+
def fullfill!
|
104
|
+
return if quarters.empty?
|
105
|
+
|
106
|
+
periods_range = with_quarters_dates do |period|
|
107
|
+
find_or_create Time.at(period)
|
108
|
+
end
|
109
|
+
|
110
|
+
@start_date = Time.at(periods_range.first)
|
111
|
+
@end_date = Time.at(periods_range.last)
|
112
|
+
end
|
113
|
+
|
114
|
+
# Public: Iterate (and yields) through quarter dates in ascending order
|
115
|
+
#
|
116
|
+
# Yields the Numeric UNIX-timestamp inside of quarter
|
117
|
+
#
|
118
|
+
# Returns Range of quarters dates
|
119
|
+
def with_quarters_dates
|
120
|
+
sorted_quarters = quarters.keys.sort
|
121
|
+
(sorted_quarters.first..sorted_quarters.last).step(93.days) { |period| yield(period) }
|
122
|
+
end
|
123
|
+
|
124
|
+
# Public: Iterate (and yields) through quarters in descending order
|
125
|
+
#
|
126
|
+
# Yields the Numeric UNIX-timestamp beginning of quarter
|
127
|
+
# the Object for quarter data
|
128
|
+
#
|
129
|
+
# Returns Array of sorted pairs of time and quarter object.
|
130
|
+
def reverse_each_sorted
|
131
|
+
quarters.sort.reverse.map { |time, quarter| yield(time, quarter) }
|
132
|
+
end
|
133
|
+
|
134
|
+
# Public: Iterate (and yields) through quarters in ascending order
|
135
|
+
#
|
136
|
+
# Yields the Numeric UNIX-timestamp beginning of quarter
|
137
|
+
# the Object for quarter data
|
138
|
+
#
|
139
|
+
# Returns Array of sorted pairs of time and quarter object.
|
140
|
+
def each_sorted
|
141
|
+
quarters.sort.map { |time, quarter| yield(time, quarter) }
|
142
|
+
end
|
143
|
+
|
144
|
+
# Public: Generate Hash for current data structure.
|
145
|
+
# Keys are UNIX-timestamps (beginning of each quarter),
|
146
|
+
# values are quarter objects explicitly converted to Hash.
|
147
|
+
#
|
148
|
+
# Returns Hash.
|
149
|
+
def to_hash
|
150
|
+
quarters.each_with_object({}) do |(time, quarter), result|
|
151
|
+
result[time] = quarter.to_hash
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
# Public: Generate JSON for current data structure.
|
156
|
+
# Keys are UNIX-timestamps (beginning of each quarter),
|
157
|
+
# values are quarter objects explicitly converted to Hash.
|
158
|
+
#
|
159
|
+
# Returns String contains valid JSON.
|
160
|
+
def to_json
|
161
|
+
MultiJson.dump(self)
|
162
|
+
end
|
163
|
+
end
|
164
|
+
end
|
@@ -0,0 +1,87 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
module Ossert
|
3
|
+
module Reference
|
4
|
+
def prepare_projects!
|
5
|
+
references = %w(A B C D E).map { |e| Kernel.const_get("Ossert::Reference::Class#{e}").new }
|
6
|
+
references.each(&:prepare_projects!)
|
7
|
+
references
|
8
|
+
end
|
9
|
+
module_function :prepare_projects!
|
10
|
+
|
11
|
+
def process_references(references)
|
12
|
+
require './config/sidekiq'
|
13
|
+
Array(references).each do |reference|
|
14
|
+
reference.project_names.each_with_object(reference.class.name.demodulize) do |project_name, klass|
|
15
|
+
if Ossert::Project.exist?(project_name)
|
16
|
+
project = Ossert::Project.load_by_name(project_name)
|
17
|
+
project.reference = klass
|
18
|
+
project.dump
|
19
|
+
else
|
20
|
+
Ossert::Workers::Fetch.perform_async(project_name, klass)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
module_function :process_references
|
26
|
+
|
27
|
+
class Base
|
28
|
+
CLASSES = %w(ClassA ClassB ClassC ClassD ClassE).freeze
|
29
|
+
|
30
|
+
attr_reader :representative, :pages, :project_names
|
31
|
+
|
32
|
+
def initialize(representative, pages)
|
33
|
+
@representative = representative
|
34
|
+
@pages = pages
|
35
|
+
@project_names = Set.new
|
36
|
+
# 20 each page, total 5907 pages
|
37
|
+
end
|
38
|
+
|
39
|
+
PER_PAGE = 20
|
40
|
+
|
41
|
+
def prepare_projects!
|
42
|
+
puts "Processing #{self.class.name}"
|
43
|
+
all_pages = pages.to_a.shuffle
|
44
|
+
all_projects = {}
|
45
|
+
(representative / PER_PAGE).times do
|
46
|
+
current_page = all_pages.pop
|
47
|
+
Fetch::BestgemsDailyStat.process_page(current_page) do |rank, downloads, name|
|
48
|
+
all_projects[name] = { rank: rank, downloads: downloads }
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
# @project_names.merge all_projects.keys.shuffle.first(representative)
|
53
|
+
@project_names.merge all_projects.sort_by { |_, info| info[:downloads] }.to_h.keys.last(representative)
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
class ClassA < Base
|
58
|
+
def initialize
|
59
|
+
super(50, 1..10)
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
class ClassB < Base
|
64
|
+
def initialize
|
65
|
+
super(50, 11..100)
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
class ClassC < Base
|
70
|
+
def initialize
|
71
|
+
super(50, 101..250)
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
class ClassD < Base
|
76
|
+
def initialize
|
77
|
+
super(50, 251..500)
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
class ClassE < Base
|
82
|
+
def initialize
|
83
|
+
super(50, 501..2500)
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
@@ -0,0 +1,138 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require 'oj'
|
3
|
+
require 'multi_json'
|
4
|
+
|
5
|
+
class NameException < Sequel::Model(:exceptions)
|
6
|
+
set_primary_key [:name]
|
7
|
+
class << self
|
8
|
+
alias by_name []
|
9
|
+
end
|
10
|
+
end
|
11
|
+
::NameException.unrestrict_primary_key
|
12
|
+
|
13
|
+
class Classifier < Sequel::Model
|
14
|
+
set_primary_key [:section]
|
15
|
+
def self.actual?
|
16
|
+
where('updated_at > ?', 1.month.ago).count.positive?
|
17
|
+
end
|
18
|
+
end
|
19
|
+
::Classifier.unrestrict_primary_key
|
20
|
+
|
21
|
+
class Project < Sequel::Model
|
22
|
+
set_primary_key [:name]
|
23
|
+
|
24
|
+
def_dataset_method(:random) do |count|
|
25
|
+
where('random() < ?', count * 0.05).limit(count)
|
26
|
+
end
|
27
|
+
|
28
|
+
class << self
|
29
|
+
def later_than(id)
|
30
|
+
where('id >= ?', id)
|
31
|
+
end
|
32
|
+
|
33
|
+
def referenced
|
34
|
+
where('reference <> ?', Ossert::Saveable::UNUSED_REFERENCE)
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
class Unpacker
|
39
|
+
def initialize(stored_project)
|
40
|
+
@stored_project = stored_project
|
41
|
+
end
|
42
|
+
|
43
|
+
def self.process(stored_project)
|
44
|
+
new(stored_project).process
|
45
|
+
end
|
46
|
+
|
47
|
+
def process
|
48
|
+
[:agility, :community].each_with_object(process_meta) do |stats_type, result|
|
49
|
+
result[stats_type] = factory_project_stats(stats_type).new(
|
50
|
+
[Total, Quarter].each_with_object({}) do |unpacker_type, stats_result|
|
51
|
+
section_unpacker = unpacker_type.new(@stored_project, stats_type)
|
52
|
+
stats_result[section_unpacker.section] = section_unpacker.process
|
53
|
+
end
|
54
|
+
)
|
55
|
+
end
|
56
|
+
ensure
|
57
|
+
@stored_project = nil
|
58
|
+
end
|
59
|
+
|
60
|
+
private
|
61
|
+
|
62
|
+
def process_meta(result = {})
|
63
|
+
result = {
|
64
|
+
created_at: @stored_project.created_at,
|
65
|
+
updated_at: @stored_project.updated_at
|
66
|
+
}
|
67
|
+
result[:meta] = if @stored_project.meta_data.present?
|
68
|
+
MultiJson.load(@stored_project.meta_data)
|
69
|
+
else
|
70
|
+
{}
|
71
|
+
end
|
72
|
+
|
73
|
+
result
|
74
|
+
end
|
75
|
+
|
76
|
+
def factory_project_stats(stats_type)
|
77
|
+
Kernel.const_get "Ossert::Project::#{stats_type.to_s.capitalize}"
|
78
|
+
end
|
79
|
+
|
80
|
+
class Base
|
81
|
+
def initialize(stored_project, stats_type)
|
82
|
+
@stats_type = stats_type
|
83
|
+
@stored_project = stored_project
|
84
|
+
end
|
85
|
+
|
86
|
+
def coerce_value(value)
|
87
|
+
return DateTime.parse(value)
|
88
|
+
rescue
|
89
|
+
value
|
90
|
+
end
|
91
|
+
|
92
|
+
def stored_data
|
93
|
+
@stored_project.send("#{@stats_type}_#{section}_data")
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
class Total < Base
|
98
|
+
def section
|
99
|
+
:total
|
100
|
+
end
|
101
|
+
|
102
|
+
def new_stats_object
|
103
|
+
Kernel.const_get("Ossert::Stats::#{@stats_type.capitalize}Total").new
|
104
|
+
end
|
105
|
+
|
106
|
+
def process
|
107
|
+
MultiJson.load(
|
108
|
+
stored_data
|
109
|
+
).each_with_object(new_stats_object) do |(metric, value), stats_object|
|
110
|
+
stats_object.send "#{metric}=", coerce_value(value)
|
111
|
+
end
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
class Quarter < Base
|
116
|
+
def section
|
117
|
+
:quarters
|
118
|
+
end
|
119
|
+
|
120
|
+
def new_stats_object
|
121
|
+
Ossert::QuartersStore.new(
|
122
|
+
"Ossert::Stats::#{@stats_type.capitalize}Quarter"
|
123
|
+
)
|
124
|
+
end
|
125
|
+
|
126
|
+
def process
|
127
|
+
MultiJson.load(
|
128
|
+
stored_data
|
129
|
+
).each_with_object(new_stats_object) do |(time, metrics), quarter_store|
|
130
|
+
metrics.each_with_object(quarter_store[time.to_i]) do |(metric, value), quarter|
|
131
|
+
quarter.send "#{metric}=", coerce_value(value)
|
132
|
+
end
|
133
|
+
end
|
134
|
+
end
|
135
|
+
end
|
136
|
+
end
|
137
|
+
end
|
138
|
+
::Project.unrestrict_primary_key
|
@@ -0,0 +1,153 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
module Ossert
|
3
|
+
module Saveable
|
4
|
+
UNUSED_REFERENCE = 'unused'
|
5
|
+
ATTRIBUTE_EXTRACT_VALUE_MAP = {
|
6
|
+
agility_total_data: ->(project) { project.agility.total.to_json },
|
7
|
+
agility_quarters_data: ->(project) { project.agility.quarters.to_json },
|
8
|
+
community_total_data: ->(project) { project.community.total.to_json },
|
9
|
+
community_quarters_data: ->(project) { project.community.quarters.to_json },
|
10
|
+
meta_data: ->(project) { project.meta_to_json }
|
11
|
+
}.freeze
|
12
|
+
|
13
|
+
def self.included(base)
|
14
|
+
base.extend(ClassMethods)
|
15
|
+
end
|
16
|
+
|
17
|
+
def dump_attribute(attriibute)
|
18
|
+
attriibute = attriibute.to_sym
|
19
|
+
value = ATTRIBUTE_EXTRACT_VALUE_MAP.fetch(attriibute).call(self)
|
20
|
+
|
21
|
+
raise 'Not saved yet, sorry!' unless (found_project = ::Project.find(name: name))
|
22
|
+
found_project.update(name, attriibute => value, updated_at: Time.now.utc)
|
23
|
+
nil
|
24
|
+
end
|
25
|
+
|
26
|
+
def dump
|
27
|
+
validate!
|
28
|
+
if (found_project = ::Project.find(name: name))
|
29
|
+
found_project.update(attributes.merge(updated_at: Time.now.utc))
|
30
|
+
else
|
31
|
+
::Project.create(attributes)
|
32
|
+
end
|
33
|
+
nil
|
34
|
+
end
|
35
|
+
|
36
|
+
def valid?
|
37
|
+
[name, github_alias, rubygems_alias].all?(&:present?)
|
38
|
+
end
|
39
|
+
|
40
|
+
class RecordInvalid < StandardError
|
41
|
+
attr_reader :message
|
42
|
+
def initialize(*)
|
43
|
+
super
|
44
|
+
@message = "Couldn't save project. Validation failed!"
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
def validate!
|
49
|
+
raise RecordInvalid.new unless valid?
|
50
|
+
end
|
51
|
+
|
52
|
+
def attributes
|
53
|
+
meta_attributes.merge(data_attributes)
|
54
|
+
end
|
55
|
+
|
56
|
+
def meta_attributes
|
57
|
+
{
|
58
|
+
name: name,
|
59
|
+
github_name: github_alias,
|
60
|
+
rubygems_name: rubygems_alias,
|
61
|
+
reference: reference
|
62
|
+
}
|
63
|
+
end
|
64
|
+
|
65
|
+
def data_attributes
|
66
|
+
{
|
67
|
+
meta_data: meta_to_json,
|
68
|
+
agility_total_data: agility.total.to_json,
|
69
|
+
agility_quarters_data: agility.quarters.to_json,
|
70
|
+
community_total_data: community.total.to_json,
|
71
|
+
community_quarters_data: community.quarters.to_json
|
72
|
+
}
|
73
|
+
end
|
74
|
+
|
75
|
+
def without_github_data?
|
76
|
+
github_alias == NO_GITHUB_NAME
|
77
|
+
end
|
78
|
+
|
79
|
+
module ClassMethods
|
80
|
+
def exist?(name)
|
81
|
+
::Project.filter(name: name).get(:name).present?
|
82
|
+
end
|
83
|
+
|
84
|
+
def random_top(count = 10)
|
85
|
+
::Project.where(reference: %w(ClassA ClassB)).random(count)
|
86
|
+
end
|
87
|
+
|
88
|
+
def random(count = 10)
|
89
|
+
::Project.dataset.random(count)
|
90
|
+
end
|
91
|
+
|
92
|
+
def find_by_name(name, reference = Ossert::Saveable::UNUSED_REFERENCE)
|
93
|
+
if (name_exception = ::NameException.find(name: name))
|
94
|
+
new(name, name_exception.github_name, name, reference)
|
95
|
+
else
|
96
|
+
new(name, nil, name, reference)
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
def load_by_name(name)
|
101
|
+
stored_prj = ::Project.find(name: name)
|
102
|
+
deserialize(stored_prj) if stored_prj
|
103
|
+
end
|
104
|
+
|
105
|
+
def load_referenced
|
106
|
+
::Project.referenced.map do |stored_prj|
|
107
|
+
deserialize(stored_prj)
|
108
|
+
end
|
109
|
+
end
|
110
|
+
|
111
|
+
def load_later_than(id)
|
112
|
+
::Project.later_than(id).map do |stored_prj|
|
113
|
+
deserialize(stored_prj)
|
114
|
+
end
|
115
|
+
end
|
116
|
+
|
117
|
+
def cleanup_referencies!
|
118
|
+
::Project.dataset.update(reference: UNUSED_REFERENCE)
|
119
|
+
end
|
120
|
+
|
121
|
+
def load_all
|
122
|
+
::Project.paged_each.map do |stored_prj|
|
123
|
+
deserialize(stored_prj)
|
124
|
+
end
|
125
|
+
end
|
126
|
+
|
127
|
+
def yield_all
|
128
|
+
::Project.paged_each do |stored_prj|
|
129
|
+
yield deserialize(stored_prj)
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
def dump
|
134
|
+
projects.each(&:dump)
|
135
|
+
end
|
136
|
+
|
137
|
+
private
|
138
|
+
|
139
|
+
def deserialize(stored_project)
|
140
|
+
project = Ossert::Project.new(
|
141
|
+
stored_project.name,
|
142
|
+
stored_project.github_name,
|
143
|
+
stored_project.rubygems_name,
|
144
|
+
stored_project.reference
|
145
|
+
)
|
146
|
+
project.assign_data(
|
147
|
+
::Project::Unpacker.process(stored_project)
|
148
|
+
)
|
149
|
+
project
|
150
|
+
end
|
151
|
+
end
|
152
|
+
end
|
153
|
+
end
|
@@ -0,0 +1,62 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
module Ossert
|
3
|
+
module Stats
|
4
|
+
class AgilityQuarter < Base
|
5
|
+
self.section = 'agility'
|
6
|
+
self.section_type = 'quarter'
|
7
|
+
create_attributes_accessors
|
8
|
+
|
9
|
+
define_percent(
|
10
|
+
issues_active: :issues_all,
|
11
|
+
issues_closed: :issues_all,
|
12
|
+
pr_active: :pr_all,
|
13
|
+
pr_closed: :pr_all,
|
14
|
+
pr_merged: :pr_all,
|
15
|
+
default_value: 100.0
|
16
|
+
)
|
17
|
+
|
18
|
+
define_counts(
|
19
|
+
:issues_active, :pr_active, :issues_closed, :issues_actual,
|
20
|
+
:pr_closed, :issues_all, :pr_all, :pr_actual
|
21
|
+
)
|
22
|
+
|
23
|
+
def issues_processed_in_median
|
24
|
+
median(issues_processed_in_days, default_value: PER_QUARTER_TOO_LONG)
|
25
|
+
end
|
26
|
+
|
27
|
+
def issues_processed_in_avg
|
28
|
+
return PER_QUARTER_TOO_LONG if (count = Array(issues_processed_in_days).size).zero?
|
29
|
+
issues_processed_in_days.sum / count
|
30
|
+
end
|
31
|
+
|
32
|
+
def pr_processed_in_median
|
33
|
+
median(pr_processed_in_days, default_value: PER_QUARTER_TOO_LONG)
|
34
|
+
end
|
35
|
+
|
36
|
+
def pr_processed_in_avg
|
37
|
+
return PER_QUARTER_TOO_LONG if (count = Array(pr_processed_in_days).size).zero?
|
38
|
+
pr_processed_in_days.sum / count
|
39
|
+
end
|
40
|
+
|
41
|
+
def issues_active
|
42
|
+
(issues_open | issues_actual) - issues_closed
|
43
|
+
end
|
44
|
+
|
45
|
+
def issues_all
|
46
|
+
(issues_open | issues_closed | issues_actual)
|
47
|
+
end
|
48
|
+
|
49
|
+
def pr_active
|
50
|
+
(pr_open | pr_actual) - pr_closed
|
51
|
+
end
|
52
|
+
|
53
|
+
def pr_all
|
54
|
+
(pr_open | pr_closed | pr_actual)
|
55
|
+
end
|
56
|
+
|
57
|
+
def releases_count
|
58
|
+
[releases_total_rg.count, releases_total_gh.count].max
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
@@ -0,0 +1,71 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
module Ossert
|
3
|
+
module Stats
|
4
|
+
class AgilityTotal < Base
|
5
|
+
self.section = 'agility'
|
6
|
+
self.section_type = 'total'
|
7
|
+
create_attributes_accessors
|
8
|
+
|
9
|
+
define_percent(
|
10
|
+
issues_active: :issues_all,
|
11
|
+
issues_closed: :issues_all,
|
12
|
+
issues_non_owner: :issues_all,
|
13
|
+
issues_with_contrib_comments: :issues_all,
|
14
|
+
pr_active: :pr_all,
|
15
|
+
pr_closed: :pr_all,
|
16
|
+
pr_non_owner: :pr_all,
|
17
|
+
pr_with_contrib_comments: :pr_all,
|
18
|
+
default_value: 100.0
|
19
|
+
)
|
20
|
+
|
21
|
+
define_ints(
|
22
|
+
:first_pr_date, :last_pr_date, :first_issue_date,
|
23
|
+
:last_issue_date, :last_release_date
|
24
|
+
)
|
25
|
+
|
26
|
+
define_counts(:issues_all, :pr_all, :stale_branches, :dependencies)
|
27
|
+
|
28
|
+
def commits_count_since_last_release_count
|
29
|
+
commits_count_since_last_release.is_a?(Array) ? 0 : commits_count_since_last_release
|
30
|
+
end
|
31
|
+
|
32
|
+
def issues_active
|
33
|
+
issues_open - issues_closed
|
34
|
+
end
|
35
|
+
|
36
|
+
def issues_all
|
37
|
+
issues_open + issues_closed
|
38
|
+
end
|
39
|
+
|
40
|
+
def pr_active
|
41
|
+
pr_open - pr_closed
|
42
|
+
end
|
43
|
+
|
44
|
+
def pr_all
|
45
|
+
pr_open + pr_closed
|
46
|
+
end
|
47
|
+
|
48
|
+
def last_changed
|
49
|
+
[last_pr_date.presence, last_issue_date.presence].compact.max || 10.years.ago
|
50
|
+
end
|
51
|
+
|
52
|
+
def life_period
|
53
|
+
last_change = [last_pr_date.presence, last_issue_date.presence].compact.max
|
54
|
+
return 0 unless last_change
|
55
|
+
|
56
|
+
first_change = [first_pr_date, first_issue_date].compact.min
|
57
|
+
return 0 unless first_change
|
58
|
+
|
59
|
+
(last_change - first_change).to_i
|
60
|
+
end
|
61
|
+
|
62
|
+
def life_period_months
|
63
|
+
life_period / 1.month
|
64
|
+
end
|
65
|
+
|
66
|
+
def releases_count
|
67
|
+
[releases_total_rg.count, releases_total_gh.count].max
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|