active_reporter 0.6.5 → 0.7.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 723ed733317c2a8e7a9671f7df71388c7df0b8019824ef83a8455d1fc2358a26
4
- data.tar.gz: 6897c073647fc646151861295592048449e80803ba000332a63b6a026942d770
3
+ metadata.gz: a17f440927f29a1dac9ad70f949fc92d0f57c72d8ccac27a4b27c0e737f86b43
4
+ data.tar.gz: 7cede09d4c1e903437b18b945e2ad3ea283ac6cdf886a21cd938124d1c39bcbf
5
5
  SHA512:
6
- metadata.gz: 1d8eb39876e5f564fdf8b3522b9e1a6739052311b57f525baa4ac827fc0a3507a515c355a23f0b8d1e6280976a9f17dd654e80fb16e39278ceb82fed6fdb2d78
7
- data.tar.gz: 89b76c59b8ef242536ed4758c7ff036fa35425703c57a56f1d212a72b642a5f5d24d919e2bc16c4907e93cac6e74e24d8500ca6f1d6ef2502c381cb4ab111fb0
6
+ metadata.gz: '0851b545beec8ac54b01d5148e8a04b0b39ef94e26f6ce8ca946ab6243bb2bb2f96da10d5d057748142b46d837310397443a27ce1c3b1eaed37faa8a5026be04'
7
+ data.tar.gz: a511a0301caeff962a234cf472b1557cda47381e04091af704816fcd3951fe4d4bfba4e531f30aeeb6b945ef9b5e038d277ab2ae30a5f915ede53964da09549a
data/README.md CHANGED
@@ -283,7 +283,7 @@ params[:dimensions][<time dimension>][:bin_width] = { months: 2, hours: 2 }
283
283
  ```
284
284
 
285
285
  `Number`s will default to using 10 bins and `Time`s will
286
- default to using a sensical increment of time given the domain; you can
286
+ default to using a sensible increment of time given the domain; you can
287
287
  customize this by overriding methods in those classes.
288
288
 
289
289
  Note that when you inspect `report.data` after grouping by a bin dimension, you
@@ -136,7 +136,7 @@ module ActiveReporter
136
136
  alias eql? ==
137
137
 
138
138
  def bin_edges
139
- case
139
+ case
140
140
  when min_and_max? then :min_and_max
141
141
  when min? then :min
142
142
  when max? then :max
@@ -115,7 +115,7 @@ module ActiveReporter
115
115
 
116
116
  def autopopulate_bins
117
117
  return [] if bin_start.blank? || bin_end.blank?
118
-
118
+
119
119
  bin_count = ((bin_end - bin_start)/(bin_width)).to_i
120
120
  invalid_param!(:bin_width, "is too small for the domain; would generate #{bin_count.to_i} bins") if bin_count > max_bins
121
121
 
@@ -11,7 +11,7 @@ module ActiveReporter
11
11
  def validate_params!
12
12
  super
13
13
 
14
- invalid_param!(:bin_width, "must be a valid width value or a hash where each hash keys is a valid value and each hash value\n is interger representing the count of the key width\n valid :bin_width values include #{STEPS.to_sentence}.") if params.key?(:bin_width) && !valid_duration?(params[:bin_width])
14
+ invalid_param!(:bin_width, "must be a valid width value or a hash where each hash keys is a valid value and each hash value\n is an integer representing the count of the key width\n valid :bin_width values include #{STEPS.to_sentence}.") if params.key?(:bin_width) && !valid_duration?(params[:bin_width])
15
15
  end
16
16
 
17
17
  def bin_width
@@ -29,7 +29,7 @@ module ActiveReporter
29
29
  # ensure that each autogenerated bin represents a correctly aligned
30
30
  # day/week/month/year
31
31
  bin_start = super
32
-
32
+
33
33
  return if bin_start.nil?
34
34
 
35
35
  step = BIN_STEPS.detect { |step| bin_width == 1.send(step) }
@@ -80,7 +80,7 @@ module ActiveReporter
80
80
  # Trackers can only be applied if the last grouper is a bin dimension, since bin dimensions are series of the
81
81
  # same data set with a pre-defined sequence. Bin dimension results also allow us to determine if an empty set
82
82
  # is present, because the bins are pre-defined.
83
- # If additional demensions are included the trackers reset each time these groups change. For example, if the
83
+ # If additional dimensions are included the trackers reset each time these groups change. For example, if the
84
84
  # category dimension "author.id" and time dimension "created_at" with bin_width "day" are used, each time the
85
85
  # "author.id" value (bin) changes the tracker is reset so we do not track changes from the last day of each
86
86
  # "author.id" to the first day of the next "author.id".
@@ -121,7 +121,6 @@ module ActiveReporter
121
121
  results[calculator_group] = calculable? ? (raw_data[calculator_group] || calculator.default_value) : nil
122
122
  end
123
123
 
124
-
125
124
  trackers.each do |name, tracker|
126
125
  tracker_group = group + [name.to_s]
127
126
  results[tracker_group] = trackable? ? (raw_data[tracker_group] || tracker.default_value) : nil
@@ -234,18 +233,18 @@ module ActiveReporter
234
233
  # field with identical values. If the field type is integer we can deduce the bin width to be 1, but if the
235
234
  # type is string or float the the width is less evident.
236
235
  # For example, if the field is float and the first value is 1.0 should the next sequential value be 1.1? What
237
- # if we have 1.0001? Should we skip 1.0002 if it does not exist and skip right to 1.01? What if we habe 1.0,
236
+ # if we have 1.0001? Should we skip 1.0002 if it does not exist and skip right to 1.01? What if we have 1.0,
238
237
  # 1.1, 1.11, and 1.13 but no 1.12? So we determine that 1.13 is sequentially after 1.11 or de we reset the
239
238
  # tracker? Even if there is a "correct" method for one report it may not be correct for a different report. The
240
239
  # same problem applies to strings. Which character is after "z"? The ASCII hex value is "{", which would work
241
240
  # fine for ordering, but maybe not for determining when a tracker should be reset. Additionally, we need to
242
241
  # deal with strings of different lengths. Alphabetically you could order "A", "AA", "AAA", "B" but how do know
243
- # when to reset the tracker? If we get a new value of "AAAA" we have entirelly new values used to calculate the
244
- # tracker value for the "B" row, effectivally making the tracker values irrelevent.
242
+ # when to reset the tracker? If we get a new value of "AAAA" we have entirely new values used to calculate the
243
+ # tracker value for the "B" row, effectively making the tracker values irrelevant.
245
244
  # Even going back to the integer example, the value allowed to be stored increments by 1, but there is no
246
- # guerentee that these are the actual values being used in the field.
245
+ # guarantee that these are the actual values being used in the field.
247
246
  # For these reasons we will not attempt to track any dimension that does not specifically specify a bin width.
248
-
247
+
249
248
  # Any class that inherits from Bin will be evaluated, this includes both Number and Time classes, all other
250
249
  # classes will be skipped.
251
250
  return false unless dimension.is_a?(ActiveReporter::Dimension::Bin)
@@ -41,7 +41,7 @@ module ActiveReporter
41
41
  # this data, and so multiple aggregator types are provided.
42
42
  #
43
43
  # Average aggregator would calculate the average value across all the data in the group.
44
- #
44
+ #
45
45
  # Sum aggregator would calculate the sum total of all values across all the data in the group.
46
46
  #
47
47
  # Additional aggregators are also available for many other calculation types
@@ -86,7 +86,7 @@ module ActiveReporter
86
86
  # are configured and an aggregator to sum Likes is configured, a tracker to calculate Likes delta may also be
87
87
  # used. Each Published date the delta will be calculated, as long as the previous row has a Published date
88
88
  # sequentially immediately adjacent to the current row. If the bin with is date, the dates 2020/06/05 and
89
- # 2020/06/06 are adjacent, but if there are no blog posts for 2020/06/07 then the dela will not be calculated
89
+ # 2020/06/06 are adjacent, but if there are no blog posts for 2020/06/07 then the delta will not be calculated
90
90
  # on the 2020/06/08 row since 2020/06/06 is not adjacent. Additionally, when the Author changes no delta will
91
91
  # be calculated, even if the Published date on the row is sequentially immediately adjacent.
92
92
  #
@@ -5,7 +5,7 @@ module ActiveReporter
5
5
  include ActiveReporter::Report::Definition
6
6
  include ActiveReporter::Report::Validation
7
7
  include ActiveReporter::Report::Metrics
8
- include ActiveReporter::Report::Aggregation
8
+ include ActiveReporter::Report::Aggregation
9
9
 
10
10
  attr_reader :params, :parent_report, :parent_groupers, :supplements
11
11
 
@@ -18,7 +18,7 @@ module ActiveReporter
18
18
  # When using a Calculator you may need the parent report data. Pass in a ActiveReporter::Report object when
19
19
  # instantiating a new ActiveReporter::Report instance as :parent_report. This will allow you to calculate a data
20
20
  # based on the #total_report of this passed :parent_report. For example, if the parent report includes a sum
21
- # aggregated "views" column, the child report can use Report::Calculator::Ratio to caluclate the ratio of "views"
21
+ # aggregated "views" column, the child report can use Report::Calculator::Ratio to calculate the ratio of "views"
22
22
  # on a given row versus the total "views" from the parent report.
23
23
  @parent_report = @params.delete(:parent_report)
24
24
  @parent_groupers = @params.delete(:parent_groupers) || ( grouper_names & Array(parent_report&.grouper_names) )
@@ -26,8 +26,8 @@ module ActiveReporter
26
26
  # Supplements -> supplemental reports and data
27
27
  #
28
28
  # we need 2 items:
29
- # 1- the #supplements, a hash of reports and data, we can refrence by name
30
- # => this is passed into the report initializer, the key is the name the value is the enrire report object
29
+ # 1- the #supplements, a hash of reports and data, we can reference by name
30
+ # => this is passed into the report initializer, the key is the name the value is the entire report object
31
31
  # 2- a configuration class, this will allow you to specify a special aggregator in the report class that
32
32
  # => take a block. The block defines { |key, row| return_value }, the block has access to the data in
33
33
  # #supplements available to use when calculating return the value.
@@ -50,18 +50,17 @@ module ActiveReporter
50
50
 
51
51
  validate_params!
52
52
 
53
- # After params are parsed and validated you can call #data (or any derivitive of: #raw_data, #flat_data,
53
+ # After params are parsed and validated you can call #data (or any derivative of: #raw_data, #flat_data,
54
54
  # #hashed_data, #nested_data, etc.) on the ActiveReporter::Report object to #aggregate the data. This will
55
55
  # aggregate all the raw data by the configured dimensions, process any calculators, and then process any
56
56
  # trackers.
57
-
58
- # Caclulators calculate values using the current row data and the #parent_report.
59
-
60
- # Trackers calculate values using the current row data and prior row data.
61
57
 
58
+ # Calculators calculate values using the current row data and the #parent_report.
59
+
60
+ # Trackers calculate values using the current row data and prior row data.
62
61
 
63
62
  # If pre-compiled raw data was passed in, process all :calculators and :trackers now.
64
- aggregate if @raw_data.present? && ( @params.include?(:calculators) || @params.include?(:trackers) )
63
+ aggregate if @raw_data.present? && ( @params.include?(:calculators) || @params.include?(:trackers) )
65
64
  total if @total_data.present?
66
65
  end
67
66
 
@@ -5,8 +5,9 @@ module ActiveReporter
5
5
  class Base
6
6
  attr_reader :report
7
7
 
8
- def initialize(report)
8
+ def initialize(report, **options)
9
9
  @report = report
10
+ @options = options
10
11
  end
11
12
 
12
13
  # Consider overriding many of these methods to use I18n with keys based
@@ -0,0 +1,22 @@
1
+ module ActiveReporter
2
+ module Serializer
3
+ class NestedHashWithId < Base
4
+ ID_DELIMITER = "✦".freeze
5
+
6
+ def table
7
+ report.hashed_data.collect { |row| row.map { |k,v| [k, (v.respond_to?(:min) ? v.min : v).to_s] }.to_h }.collect do |row|
8
+ row_with_id = row.merge(_id: row.slice(*report.grouper_names).values.join(key_delimiter))
9
+ report.grouper_names.reverse.inject(row_with_id.slice(*report.all_aggregators.keys.prepend(:_id))) do |nested_row_data, group|
10
+ { row_with_id[group] => nested_row_data }
11
+ end
12
+ end.reduce({}, :deep_merge)
13
+ end
14
+
15
+ private
16
+
17
+ def key_delimiter
18
+ @key_delimiter ||= @options[:id_delimiter] || ID_DELIMITER
19
+ end
20
+ end
21
+ end
22
+ end
@@ -5,3 +5,4 @@ require "active_reporter/serializer/form_field"
5
5
  require "active_reporter/serializer/highcharts"
6
6
  require "active_reporter/serializer/hash_table"
7
7
  require "active_reporter/serializer/nested_hash"
8
+ require "active_reporter/serializer/nested_hash_with_id"
@@ -1,3 +1,3 @@
1
1
  module ActiveReporter
2
- VERSION = "0.6.5"
2
+ VERSION = "0.7.1"
3
3
  end
@@ -34,7 +34,7 @@ describe "more complicated case" do
34
34
  def expect_equal(h1, h2)
35
35
  # sqlite uses Float instead of BigDecimal, we need to normalize the JSON objects to use the
36
36
  # same data type so the values match. We also round these at 9 decimal places to account for
37
- # rounding discrepencies between the two data types
37
+ # rounding discrepancies between the two data types
38
38
 
39
39
  h1_json = JSON.parse(h1.to_json).map do |a|
40
40
  a.deep_transform_values do |v|
@@ -227,7 +227,7 @@ describe "more complicated case" do
227
227
  { key: :min_likes, value: 3 },
228
228
  { key: :max_likes, value: 3 }
229
229
  ] }
230
- ] }
230
+ ] }
231
231
  ]
232
232
  end
233
233
 
@@ -14,7 +14,7 @@ describe ActiveReporter::Dimension::Enum do
14
14
  let(:enum_values) { { "draft" => 0, "unpublished" => 1, "published" => 2, "archived" => 3 } }
15
15
  let(:group_values) { ["published", "archived"] }
16
16
  let(:all_values) { enum_values.keys.unshift(nil) }
17
-
17
+
18
18
  let(:status_dimension) do
19
19
  dimension = ActiveReporter::Dimension::Enum.new(:status, report, { model: report_model, only: filter_values })
20
20
  allow(dimension).to receive(:enum_values).and_return(enum_values)
@@ -24,12 +24,12 @@ describe ActiveReporter::Report do
24
24
  let(:report) { report_model.new({groupers: groupers, aggregators: aggregators, dimensions: dimensions, parent_report: parent_report, parent_groupers: parent_groupers, calculators: calculators, trackers: trackers}.compact) }
25
25
 
26
26
  let(:year) { 1.year.ago.year }
27
-
27
+
28
28
  let(:jan_datetime) { Time.new(year,1,1,0,0,0,0) }
29
29
  let(:feb_datetime) { Time.new(year,2,1,0,0,0,0) }
30
30
  let(:mar_datetime) { Time.new(year,3,1,0,0,0,0) }
31
31
  let(:apr_datetime) { Time.new(year,4,1,0,0,0,0) }
32
-
32
+
33
33
  let(:jan) { { min: jan_datetime, max: jan_datetime.next_month } }
34
34
  let(:feb) { { min: feb_datetime, max: feb_datetime.next_month } }
35
35
  let(:mar) { { min: mar_datetime, max: mar_datetime.next_month } }
@@ -93,7 +93,7 @@ describe ActiveReporter::Report do
93
93
  let!(:author1_jan01_post) { create(:post, author: author1, created_at: Date.new(year,1,1), likes: 7) }
94
94
  let!(:author1_jan12_post) { create(:post, author: author1, created_at: Date.new(year,1,12), likes: 4) }
95
95
  let!(:author1_mar08_post) { create(:post, author: author1, created_at: Date.new(year,3,8), likes: 11) }
96
-
96
+
97
97
  let!(:author2_jan15_post) { create(:post, author: author2, created_at: Date.new(year,1,15), likes: 3) }
98
98
  let!(:author2_feb27_post) { create(:post, author: author2, created_at: Date.new(year,2,27), likes: 24) }
99
99
  let!(:author2_feb28_post) { create(:post, author: author2, created_at: Date.new(year,2,28), likes: 0) }
@@ -0,0 +1,47 @@
1
+ require "spec_helper"
2
+
3
+ describe ActiveReporter::Serializer::NestedHash do
4
+ let(:report_model) do
5
+ Class.new(ActiveReporter::Report) do
6
+ report_on :Post
7
+ number_dimension :likes
8
+ time_dimension :created_at
9
+ category_dimension :title
10
+ count_aggregator :post_count
11
+ sum_aggregator :likes_count, attribute: :likes
12
+ end
13
+ end
14
+
15
+ let(:report) do
16
+ report_model.new(
17
+ aggregators: [:post_count, :likes_count],
18
+ groupers: %i[title created_at],
19
+ dimensions: { created_at: { bin_width: "1 day" } }
20
+ )
21
+ end
22
+
23
+ let(:nested_hash_with_id) { ActiveReporter::Serializer::NestedHash.new(report) }
24
+ let!(:data_records) do
25
+ [
26
+ create(:post, created_at: "2016-01-01", likes: 2, title: "A"),
27
+ create(:post, created_at: "2016-01-01", likes: 2, title: "A"),
28
+ create(:post, created_at: "2016-01-01", likes: 1, title: "B"),
29
+ create(:post, created_at: "2016-01-02", likes: 1, title: "A"),
30
+ ]
31
+ end
32
+
33
+ describe "#report" do
34
+ it "builds report" do
35
+ expect(nested_hash_with_id.table).to eq({
36
+ "A" => {
37
+ "2016-01-01 00:00:00 UTC" => { post_count: "2", likes_count: "4" },
38
+ "2016-01-02 00:00:00 UTC" => { post_count: "1", likes_count: "1" },
39
+ },
40
+ "B" => {
41
+ "2016-01-01 00:00:00 UTC" => { post_count: "1", likes_count: "1" },
42
+ "2016-01-02 00:00:00 UTC" => { post_count: "0", likes_count: "0" },
43
+ }
44
+ })
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,69 @@
1
+ require "spec_helper"
2
+
3
+ describe ActiveReporter::Serializer::NestedHashWithId do
4
+ let(:report_model) do
5
+ Class.new(ActiveReporter::Report) do
6
+ report_on :Post
7
+ number_dimension :likes
8
+ time_dimension :created_at
9
+ category_dimension :title
10
+ count_aggregator :post_count
11
+ sum_aggregator :likes_count, attribute: :likes
12
+ end
13
+ end
14
+
15
+ let(:report) do
16
+ report_model.new(
17
+ aggregators: [:post_count, :likes_count],
18
+ groupers: %i[title created_at],
19
+ dimensions: { created_at: { bin_width: "1 day" } }
20
+ )
21
+ end
22
+
23
+ let(:nested_hash_with_id) { ActiveReporter::Serializer::NestedHashWithId.new(report) }
24
+ let!(:data_records) do
25
+ [
26
+ create(:post, created_at: "2016-01-01", likes: 2, title: "A"),
27
+ create(:post, created_at: "2016-01-01", likes: 2, title: "A"),
28
+ create(:post, created_at: "2016-01-01", likes: 1, title: "B"),
29
+ create(:post, created_at: "2016-01-02", likes: 1, title: "A"),
30
+ ]
31
+ end
32
+
33
+ describe "#report" do
34
+ context "with :id_delimiter option" do
35
+ let(:id_delimiter) { "___" }
36
+ let(:nested_hash_with_id) { ActiveReporter::Serializer::NestedHashWithId.new(report, id_delimiter: id_delimiter) }
37
+
38
+ it "builds report" do
39
+ expect(nested_hash_with_id.table).to eq({
40
+ "A" => {
41
+ "2016-01-01 00:00:00 UTC" => { _id: "A#{id_delimiter}2016-01-01 00:00:00 UTC", post_count: "2", likes_count: "4" },
42
+ "2016-01-02 00:00:00 UTC" => { _id: "A#{id_delimiter}2016-01-02 00:00:00 UTC", post_count: "1", likes_count: "1" },
43
+ },
44
+ "B" => {
45
+ "2016-01-01 00:00:00 UTC" => { _id: "B#{id_delimiter}2016-01-01 00:00:00 UTC", post_count: "1", likes_count: "1" },
46
+ "2016-01-02 00:00:00 UTC" => { _id: "B#{id_delimiter}2016-01-02 00:00:00 UTC", post_count: "0", likes_count: "0" },
47
+ }
48
+ })
49
+ end
50
+ end
51
+
52
+ context "without :id_delimiter option" do
53
+ let(:id_delimiter) { ActiveReporter::Serializer::NestedHashWithId::ID_DELIMITER }
54
+
55
+ it "builds report" do
56
+ expect(nested_hash_with_id.table).to eq({
57
+ "A" => {
58
+ "2016-01-01 00:00:00 UTC" => { _id: "A#{id_delimiter}2016-01-01 00:00:00 UTC", post_count: "2", likes_count: "4" },
59
+ "2016-01-02 00:00:00 UTC" => { _id: "A#{id_delimiter}2016-01-02 00:00:00 UTC", post_count: "1", likes_count: "1" },
60
+ },
61
+ "B" => {
62
+ "2016-01-01 00:00:00 UTC" => { _id: "B#{id_delimiter}2016-01-01 00:00:00 UTC", post_count: "1", likes_count: "1" },
63
+ "2016-01-02 00:00:00 UTC" => { _id: "B#{id_delimiter}2016-01-02 00:00:00 UTC", post_count: "0", likes_count: "0" },
64
+ }
65
+ })
66
+ end
67
+ end
68
+ end
69
+ end
@@ -62,7 +62,7 @@ class DataBuilder
62
62
  stddev_likes = gaussian(10, 2.5)
63
63
  h[author] = [average_likes, stddev_likes]
64
64
  end
65
-
65
+
66
66
  likeability_for = Hash.new { |author_hash, author|
67
67
  author_hash[author] = Hash.new { |title_hash, title|
68
68
  average_likes, stddev_likes = author_likeability[author]