reading 0.7.0 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. data/bin/reading +80 -10
  3. data/lib/reading/config.rb +96 -52
  4. data/lib/reading/errors.rb +4 -1
  5. data/lib/reading/filter.rb +95 -0
  6. data/lib/reading/item/time_length.rb +69 -30
  7. data/lib/reading/item/view.rb +116 -0
  8. data/lib/reading/item.rb +384 -0
  9. data/lib/reading/parsing/attributes/attribute.rb +1 -8
  10. data/lib/reading/parsing/attributes/experiences/dates_and_head_transformer.rb +11 -12
  11. data/lib/reading/parsing/attributes/experiences/history_transformer.rb +31 -22
  12. data/lib/reading/parsing/attributes/experiences/spans_validator.rb +19 -20
  13. data/lib/reading/parsing/attributes/experiences.rb +6 -6
  14. data/lib/reading/parsing/attributes/notes.rb +1 -1
  15. data/lib/reading/parsing/attributes/shared.rb +15 -8
  16. data/lib/reading/parsing/attributes/variants.rb +10 -7
  17. data/lib/reading/parsing/csv.rb +58 -44
  18. data/lib/reading/parsing/parser.rb +24 -25
  19. data/lib/reading/parsing/rows/blank.rb +23 -0
  20. data/lib/reading/parsing/rows/comment.rb +6 -7
  21. data/lib/reading/parsing/rows/compact_planned.rb +9 -9
  22. data/lib/reading/parsing/rows/compact_planned_columns/head.rb +2 -2
  23. data/lib/reading/parsing/rows/custom_config.rb +42 -0
  24. data/lib/reading/parsing/rows/regular.rb +15 -14
  25. data/lib/reading/parsing/rows/regular_columns/length.rb +8 -8
  26. data/lib/reading/parsing/rows/regular_columns/sources.rb +15 -9
  27. data/lib/reading/parsing/transformer.rb +15 -19
  28. data/lib/reading/stats/filter.rb +738 -0
  29. data/lib/reading/stats/grouping.rb +243 -0
  30. data/lib/reading/stats/operation.rb +313 -0
  31. data/lib/reading/stats/query.rb +37 -0
  32. data/lib/reading/stats/terminal_result_formatters.rb +91 -0
  33. data/lib/reading/util/exclude.rb +12 -0
  34. data/lib/reading/util/hash_to_data.rb +30 -0
  35. data/lib/reading/version.rb +1 -1
  36. data/lib/reading.rb +51 -5
  37. metadata +28 -7
  38. data/bin/readingfile +0 -31
  39. data/lib/reading/util/hash_to_struct.rb +0 -30
  40. data/lib/reading/util/string_remove.rb +0 -28
  41. data/lib/reading/util/string_truncate.rb +0 -22
@@ -1,4 +1,4 @@
1
- require_relative "spans_validator"
1
+ require_relative 'spans_validator'
2
2
 
3
3
  module Reading
4
4
  module Parsing
@@ -16,18 +16,18 @@ module Reading
16
16
  # many days, for example.
17
17
  AVERAGE_DAYS_IN_A_MONTH = 30.437r
18
18
 
19
- private attr_reader :parsed_row, :config
19
+ private attr_reader :parsed_row, :head_index
20
20
 
21
21
  # @param parsed_row [Hash] a parsed row (the intermediate hash).
22
- # @param config [Hash] an entire config
23
- def initialize(parsed_row, config)
22
+ # @param head_index [Integer] current item's position in the Head column.
23
+ def initialize(parsed_row, head_index)
24
24
  @parsed_row = parsed_row
25
- @config = config
25
+ @head_index = head_index
26
26
  end
27
27
 
28
28
  # Extracts experiences from the parsed row.
29
29
  # @return [Array<Hash>] an array of experiences; see
30
- # Config#default_config[:item_template][:experiences]
30
+ # Config#default_config[:item][:template][:experiences]
31
31
  def transform
32
32
  experiences = parsed_row[:history].map { |entries|
33
33
  {
@@ -37,8 +37,7 @@ module Reading
37
37
  }
38
38
  }
39
39
 
40
- # Raises an error if experiences overlap or are out of order.
41
- Experiences::SpansValidator.validate(experiences, config, history_column: true)
40
+ Experiences::SpansValidator.validate(experiences, history_column: true)
42
41
 
43
42
  experiences
44
43
  end
@@ -48,13 +47,13 @@ module Reading
48
47
  # A shortcut to the span template.
49
48
  # @return [Hash]
50
49
  def span_template
51
- @span_template ||= config.deep_fetch(:item_template, :experiences, 0, :spans).first
50
+ @span_template ||= Config.hash.deep_fetch(:item, :template, :experiences, 0, :spans).first
52
51
  end
53
52
 
54
53
  # The :spans sub-attribute for the given History column entries.
55
54
  # @param entries [Array<Hash>] History entries for one experience.
56
55
  # @return [Array<Hash>] an array of spans; see
57
- # Config#default_config[:item_template][:experiences].first[:spans]
56
+ # Config#default_config[:item][:template][:experiences].first[:spans]
58
57
  def spans_from_history_entries(entries)
59
58
  daily_spans = {}
60
59
  active = {
@@ -154,6 +153,12 @@ module Reading
154
153
  raise InvalidHistoryError, "Missing or incomplete first date"
155
154
  end
156
155
 
156
+ if entry[:planned] || (active[:planned] && !start_day)
157
+ active[:planned] = true
158
+ elsif active[:planned] && start_day
159
+ active[:planned] = false
160
+ end
161
+
157
162
  duplicate_open_range = !start_day && active[:open_range]
158
163
  date_range = date_range(entry, active, duplicate_open_range:)
159
164
 
@@ -165,9 +170,13 @@ module Reading
165
170
  end
166
171
  active[:after_single_date] = !date_range
167
172
 
173
+ variant_index = (entry[:variant_index] || 1).to_i - 1
174
+ format = parsed_row[:sources]&.dig(variant_index)&.dig(:format) ||
175
+ parsed_row[:head][head_index][:format]
176
+
168
177
  amount =
169
- Attributes::Shared.length(entry, key_name: :amount, ignore_repetitions: true) ||
170
- Attributes::Shared.length(parsed_row[:length], episodic: true)
178
+ Attributes::Shared.length(entry, format:, key_name: :amount, ignore_repetitions: true) ||
179
+ Attributes::Shared.length(parsed_row[:length], format:, episodic: true)
171
180
  active[:amount] = amount if amount
172
181
 
173
182
  progress = Attributes::Shared.progress(entry)
@@ -178,7 +187,7 @@ module Reading
178
187
  # https://github.com/fpsvogel/reading/blob/main/doc/csv-format.md#history-pages-and-stopping-points-books
179
188
  if !amount && progress
180
189
  if progress.is_a? Float
181
- total_length = Attributes::Shared.length(parsed_row[:length])
190
+ total_length = Attributes::Shared.length(parsed_row[:length], format:)
182
191
  amount = total_length * progress
183
192
  else
184
193
  amount = progress
@@ -202,7 +211,9 @@ module Reading
202
211
  span_without_dates = {
203
212
  dates: nil,
204
213
  amount: daily_amount || span_template[:amount],
205
- progress: (progress unless amount_from_progress) || span_template[:progress],
214
+ progress: (progress unless amount_from_progress) ||
215
+ (0.0 if entry[:planned] || active[:planned]) ||
216
+ span_template[:progress],
206
217
  name: entry[:name] || span_template[:name],
207
218
  favorite?: !!entry[:favorite] || span_template[:favorite?],
208
219
  # Temporary keys (not in the final item data) for marking
@@ -213,17 +224,15 @@ module Reading
213
224
  amount_from_progress: amount_from_progress,
214
225
  }
215
226
 
216
- if entry[:planned] || (active[:planned] && !start_day)
227
+ if entry[:planned] || active[:planned]
217
228
  date = nil
218
- active[:planned] = true
219
229
  end
220
230
 
221
231
  key = [date, span_without_dates[:name]]
222
232
 
223
- # When any entry in an open range lacks a name, add a random
224
- # number to the key so that it does not overwrite a different
225
- # entry in the open range that also lacks a name.
226
- if in_open_range && !entry[:name]
233
+ # For entries in an open range, add a random number to the key to
234
+ # avoid overwriting entries with the same name, or lacking a name.
235
+ if in_open_range
227
236
  key << rand
228
237
  end
229
238
 
@@ -304,11 +313,11 @@ module Reading
304
313
  # Distributes an amount across the given date(s).
305
314
  # @param date_or_range [Date, Range<Date>] the date or range across
306
315
  # which the amount will be split up.
307
- # @param amount [Float, Integer, Reading::Item::TimeLength] amount in
316
+ # @param amount [Float, Integer, Item::TimeLength] amount in
308
317
  # pages or time.
309
318
  # @param repetitions [Integer] e.g. "x4" in a History entry.
310
319
  # @param frequency [Integer] e.g. "/week" in a History entry.
311
- # @return [Hash{Date => Float, Integer, Reading::Item::TimeLength}]
320
+ # @return [Hash{Date => Float, Integer, Item::TimeLength}]
312
321
  def distribute_amount_across_date_range(date_or_range, amount, repetitions, frequency)
313
322
  unless amount
314
323
  raise InvalidHistoryError, "Missing length or amount"
@@ -11,24 +11,23 @@ module Reading
11
11
  # Checks the dates in the given experiences hash, and raises an error
12
12
  # at the first invalid date found.
13
13
  # @param experiences [Array<Hash>] experience hashes.
14
- # @param config [Hash] an entire config.
15
14
  # @param history_column [Boolean] whether this validation is for
16
15
  # experiences from the History column.
17
16
  # @raise [InvalidDateError] if any date is invalid.
18
- def validate(experiences, config, history_column: false)
19
- if both_date_columns?(config)
17
+ def validate(experiences, history_column: false)
18
+ if both_date_columns?
20
19
  validate_number_of_start_dates_and_end_dates(experiences)
21
20
  end
22
21
 
23
- if start_dates_column?(config) || history_column
22
+ if start_dates_column? || history_column
24
23
  validate_start_dates_are_in_order(experiences)
25
24
  end
26
25
 
27
- if end_dates_column?(config) || history_column
26
+ if end_dates_column? || history_column
28
27
  validate_end_dates_are_in_order(experiences)
29
28
  end
30
29
 
31
- if both_date_columns?(config) || history_column
30
+ if both_date_columns? || history_column
32
31
  validate_experiences_of_same_variant_do_not_overlap(experiences)
33
32
  end
34
33
 
@@ -39,28 +38,28 @@ module Reading
39
38
 
40
39
  # Whether the Start Dates column is enabled.
41
40
  # @return [Boolean]
42
- def start_dates_column?(config)
43
- config.fetch(:enabled_columns).include?(:start_dates)
41
+ def start_dates_column?
42
+ Config.hash.fetch(:enabled_columns).include?(:start_dates)
44
43
  end
45
44
 
46
45
  # Whether the End Dates column is enabled.
47
46
  # @return [Boolean]
48
- def end_dates_column?(config)
49
- config.fetch(:enabled_columns).include?(:end_dates)
47
+ def end_dates_column?
48
+ Config.hash.fetch(:enabled_columns).include?(:end_dates)
50
49
  end
51
50
 
52
51
  # Whether both the Start Dates and End Dates columns are enabled.
53
52
  # @return [Boolean]
54
- def both_date_columns?(config)
55
- start_dates_column?(config) && end_dates_column?(config)
53
+ def both_date_columns?
54
+ start_dates_column? && end_dates_column?
56
55
  end
57
56
 
58
57
  # Raises an error if there are more end dates than start dates, or
59
58
  # if there is more than one more start date than end dates.
60
59
  # @raise [InvalidDateError]
61
60
  def validate_number_of_start_dates_and_end_dates(experiences)
62
- both_dates, not_both_dates = experiences
63
- .filter { |exp| exp[:spans].first&.dig(:dates) }
61
+ _both_dates, not_both_dates = experiences
62
+ .select { |exp| exp[:spans].first&.dig(:dates) }
64
63
  .map { |exp| [exp[:spans].first[:dates].begin, exp[:spans].last[:dates].end] }
65
64
  .partition { |start_date, end_date| start_date && end_date }
66
65
 
@@ -76,7 +75,7 @@ module Reading
76
75
  # @raise [InvalidDateError]
77
76
  def validate_start_dates_are_in_order(experiences)
78
77
  experiences
79
- .filter { |exp| exp[:spans].first&.dig(:dates) }
78
+ .select { |exp| exp[:spans].first&.dig(:dates) }
80
79
  .map { |exp| exp[:spans].first[:dates].begin }
81
80
  .each_cons(2) do |a, b|
82
81
  if (a.nil? && b.nil?) || (a && b && a > b )
@@ -89,8 +88,8 @@ module Reading
89
88
  # @raise [InvalidDateError]
90
89
  def validate_end_dates_are_in_order(experiences)
91
90
  experiences
92
- .filter { |exp| exp[:spans].first&.dig(:dates) }
93
- .map { |exp| exp[:spans].last[:dates].end }
91
+ .select { |exp| exp[:spans].first&.dig(:dates) }
92
+ .map { |exp| exp[:spans].last[:dates]&.end }
94
93
  .each_cons(2) do |a, b|
95
94
  if (a.nil? && b.nil?) || (a && b && a > b )
96
95
  raise InvalidDateError, "End dates are not in order"
@@ -104,7 +103,7 @@ module Reading
104
103
  experiences
105
104
  .group_by { |exp| exp[:variant_index] }
106
105
  .each do |_variant_index, exps|
107
- exps.filter { |exp| exp[:spans].any? }.each_cons(2) do |a, b|
106
+ exps.select { |exp| exp[:spans].any? }.each_cons(2) do |a, b|
108
107
  a_metaspan = a[:spans].first[:dates].begin..a[:spans].last[:dates].end
109
108
  b_metaspan = b[:spans].first[:dates].begin..b[:spans].last[:dates].end
110
109
  if a_metaspan.cover?(b_metaspan.begin || a_metaspan.begin || a_metaspan.end) ||
@@ -116,11 +115,11 @@ module Reading
116
115
  end
117
116
 
118
117
  # Raises an error if the spans within an experience are out of order
119
- # or if the spans overlap.
118
+ # or if the spans overlap. Spans with nil dates are not considered.
120
119
  # @raise [InvalidDateError]
121
120
  def validate_spans_are_in_order_and_not_overlapping(experiences)
122
121
  experiences
123
- .filter { |exp| exp[:spans].first&.dig(:dates) }
122
+ .select { |exp| exp[:spans].first&.dig(:dates) }
124
123
  .each do |exp|
125
124
  exp[:spans]
126
125
  .map { |span| span[:dates] }
@@ -1,6 +1,6 @@
1
- require "date"
2
- require_relative "experiences/history_transformer"
3
- require_relative "experiences/dates_and_head_transformer"
1
+ require 'date'
2
+ require_relative 'experiences/history_transformer'
3
+ require_relative 'experiences/dates_and_head_transformer'
4
4
 
5
5
  module Reading
6
6
  module Parsing
@@ -13,13 +13,13 @@ module Reading
13
13
  # @param parsed_row [Hash] a parsed row (the intermediate hash).
14
14
  # @param head_index [Integer] current item's position in the Head column.
15
15
  # @return [Array<Hash>] an array of experiences; see
16
- # Config#default_config[:item_template][:experiences]
16
+ # Config#default_config[:item][:template][:experiences]
17
17
  def transform_from_parsed(parsed_row, head_index)
18
18
  if !parsed_row[:history].blank?
19
- return HistoryTransformer.new(parsed_row, config).transform
19
+ return HistoryTransformer.new(parsed_row, head_index).transform
20
20
  end
21
21
 
22
- DatesAndHeadTransformer.new(parsed_row, head_index, config).transform
22
+ DatesAndHeadTransformer.new(parsed_row, head_index).transform
23
23
  end
24
24
  end
25
25
  end
@@ -6,7 +6,7 @@ module Reading
6
6
  # @param parsed_row [Hash] a parsed row (the intermediate hash).
7
7
  # @param _head_index [Integer] current item's position in the Head column.
8
8
  # @return [Array<Hash>] an array of notes; see
9
- # Config#default_config[:item_template][:notes]
9
+ # Config#default_config[:item][:template][:notes]
10
10
  def transform_from_parsed(parsed_row, _head_index)
11
11
  parsed_row[:notes]&.map { |note|
12
12
  {
@@ -1,16 +1,19 @@
1
1
  module Reading
2
2
  module Parsing
3
3
  module Attributes
4
- # Shared
4
+ # Sub-attributes that are shared across multiple attributes.
5
5
  module Shared
6
+ using Util::HashArrayDeepFetch
7
+ using Util::NumericToIIfWhole
8
+
6
9
  # Extracts the :progress sub-attribute (percent, pages, or time) from
7
10
  # the given hash.
8
11
  # @param hash [Hash] any parsed hash that contains progress.
9
- # @return [Float, Integer, Reading::Item::TimeLength]
12
+ # @return [Float, Integer, Item::TimeLength]
10
13
  def self.progress(hash)
11
14
  hash[:progress_percent]&.to_f&./(100) ||
12
15
  hash[:progress_pages]&.to_i ||
13
- hash[:progress_time]&.then { Item::TimeLength.parse _1 } ||
16
+ hash[:progress_time]&.then { Item::TimeLength.parse(_1) } ||
14
17
  (0 if hash[:progress_dnf]) ||
15
18
  (1.0 if hash[:progress_done]) ||
16
19
  nil
@@ -18,6 +21,8 @@ module Reading
18
21
 
19
22
  # Extracts the :length sub-attribute (pages or time) from the given hash.
20
23
  # @param hash [Hash] any parsed hash that contains length.
24
+ # @param format [Symbol] the item format, which affects length in cases
25
+ # where Config.hash[:speed][:format] is customized.
21
26
  # @param key_name [Symbol] the first part of the keys to be checked.
22
27
  # @param episodic [Boolean] whether to look for episodic (not total) length.
23
28
  # If false, returns nil if hash contains :each. If true, returns a
@@ -29,16 +34,16 @@ module Reading
29
34
  # that e.g. "1:00 x14" gives a length of 1 hour instead of 14 hours.
30
35
  # This is useful for the History column, where that 1 hour can be used
31
36
  # as the default amount.
32
- # @return [Float, Integer, Reading::Item::TimeLength]
33
- def self.length(hash, key_name: :length, episodic: false, ignore_repetitions: false)
37
+ # @return [Float, Integer, Item::TimeLength]
38
+ def self.length(hash, format:, key_name: :length, episodic: false, ignore_repetitions: false)
34
39
  return nil unless hash
35
40
 
36
41
  length = hash[:"#{key_name}_pages"]&.to_i ||
37
- hash[:"#{key_name}_time"]&.then { Item::TimeLength.parse _1 }
42
+ hash[:"#{key_name}_time"]&.then { Item::TimeLength.parse(_1) }
38
43
 
39
44
  return nil unless length
40
45
 
41
- if hash[:each]
46
+ if hash[:each] && !hash[:repetitions]
42
47
  # Length is calculated based on History column in this case.
43
48
  if episodic
44
49
  return length
@@ -54,7 +59,9 @@ module Reading
54
59
  return nil if episodic && !hash[:each]
55
60
  end
56
61
 
57
- length
62
+ speed = Config.hash.deep_fetch(:speed, :format)[format] || 1.0
63
+
64
+ (length / speed).to_i_if_whole
58
65
  end
59
66
  end
60
67
  end
@@ -4,23 +4,26 @@ module Reading
4
4
  # Transformer for the :variant item attribute.
5
5
  class Variants < Attribute
6
6
  using Util::HashArrayDeepFetch
7
+ using Util::NumericToIIfWhole
7
8
 
8
9
  # @param parsed_row [Hash] a parsed row (the intermediate hash).
9
10
  # @param head_index [Integer] current item's position in the Head column.
10
11
  # @return [Array<Hash>] an array of variants; see
11
- # Config#default_config[:item_template][:variants]
12
+ # Config#default_config[:item][:template][:variants]
12
13
  def transform_from_parsed(parsed_row, head_index)
13
14
  head = parsed_row[:head][head_index]
14
15
 
15
16
  # || [{}] in case there is no Sources column.
16
17
  (parsed_row[:sources].presence || [{}])&.map { |variant|
18
+ format = variant[:format] || head[:format]
19
+
17
20
  {
18
- format: variant[:format] || head[:format],
21
+ format:,
19
22
  series: (series(head) + series(variant)).presence,
20
23
  sources: sources(variant) || sources(head),
21
24
  isbn: variant[:isbn] || variant[:asin],
22
- length: Attributes::Shared.length(variant) ||
23
- Attributes::Shared.length(parsed_row[:length]),
25
+ length: Attributes::Shared.length(variant, format:) ||
26
+ Attributes::Shared.length(parsed_row[:length], format:),
24
27
  extra_info: Array(head[:extra_info]) + Array(variant[:extra_info]),
25
28
  }.map { |k, v| [k, v || template.fetch(k)] }.to_h
26
29
  }&.compact&.presence
@@ -29,7 +32,7 @@ module Reading
29
32
  # A shortcut to the variant template.
30
33
  # @return [Hash]
31
34
  def template
32
- config.deep_fetch(:item_template, :variants).first
35
+ Config.hash.deep_fetch(:item, :template, :variants).first
33
36
  end
34
37
 
35
38
  # The :series sub-attribute for the given parsed hash.
@@ -57,11 +60,11 @@ module Reading
57
60
  end
58
61
 
59
62
  # The name for the given URL string, according to
60
- # config[:source_names_from_urls], or nil.
63
+ # Config.hash[:source_names_from_urls], or nil.
61
64
  # @param url [String] a URL.
62
65
  # @return [String, nil]
63
66
  def url_name(url)
64
- config
67
+ Config.hash
65
68
  .fetch(:source_names_from_urls)
66
69
  .each do |url_part, name|
67
70
  if url.include?(url_part)
@@ -1,24 +1,13 @@
1
- # Used throughout, in other files.
2
- require_relative "../util/blank"
3
- require_relative "../util/string_remove"
4
- require_relative "../util/string_truncate"
5
- require_relative "../util/numeric_to_i_if_whole"
6
- require_relative "../util/hash_to_struct"
7
- require_relative "../util/hash_deep_merge"
8
- require_relative "../util/hash_array_deep_fetch"
9
- require_relative "../util/hash_compact_by_template"
10
- require_relative "../errors"
11
-
12
- # Used just here.
13
- require_relative "../config"
14
- require_relative "parser"
15
- require_relative "transformer"
1
+ require 'pastel'
2
+ require_relative '../item'
3
+ require_relative 'parser'
4
+ require_relative 'transformer'
16
5
 
17
6
  module Reading
18
7
  module Parsing
19
8
  #
20
- # Validates a path or stream (string, file, etc.) of a CSV reading log, then
21
- # parses it into item data (an array of Structs).
9
+ # Validates a path or lines (string, file, etc.) of a CSV reading log, then
10
+ # parses it into an array of Items.
22
11
  #
23
12
  # Parsing happens in two steps:
24
13
  # (1) Parse a row string into an intermediate hash representing the columns.
@@ -31,69 +20,94 @@ module Reading
31
20
  # inspired by the Parslet gem: https://kschiess.github.io/parslet/transform.html
32
21
  #
33
22
  class CSV
34
- using Util::HashToStruct
35
-
36
- private attr_reader :parser, :transformer
23
+ private attr_reader :parser, :transformer, :hash_output, :item_view, :error_handler, :pastel
37
24
 
38
- # Validates a path or stream (string, file, etc.) of a CSV reading log,
25
+ # Validates a path or lines (string, file, etc.) of a CSV reading log,
39
26
  # builds the config, and initializes the parser and transformer.
40
- # @param path [String] path to the CSV file; if nil, stream is used instead.
41
- # @param stream [Object] an object responding to #each_linewith CSV row(s);
42
- # used if no path is given.
43
- # @param config [Hash] a custom config which overrides the defaults,
27
+ # @param path [String] path to the CSV file; used if no lines are given.
28
+ # @param lines [Object] an object responding to #each_line with CSV row(s);
29
+ # if nil, path is used instead.
30
+ # @param config [Hash, Config] a custom config which overrides the defaults,
44
31
  # e.g. { errors: { styling: :html } }
45
- def initialize(path = nil, stream: nil, config: {})
46
- validate_path_or_stream(path, stream)
47
- full_config = Config.new(config).hash
32
+ # @param hash_output [Boolean] whether an array of raw Hashes should be
33
+ # returned, without Items being created from them.
34
+ # @param item_view [Class, nil, Boolean] the class that will be used to build
35
+ # each Item's view object, or nil/false if no view object should be built.
36
+ # If you use a custom view class, the only requirement is that its
37
+ # #initialize take an Item and a full config as arguments.
38
+ # @param error_handler [Proc] if not provided, errors are raised.
39
+ def initialize(path: nil, lines: nil, config: nil, hash_output: false, item_view: Item::View, error_handler: nil)
40
+ validate_path_or_lines(path, lines)
41
+
42
+ Config.build(config) if config
48
43
 
49
44
  @path = path
50
- @stream = stream
51
- @parser = Parser.new(full_config)
52
- @transformer = Transformer.new(full_config)
45
+ @lines = lines
46
+ @hash_output = hash_output
47
+ @item_view = item_view
48
+ @parser = Parser.new
49
+ @transformer = Transformer.new
50
+ @error_handler = error_handler
51
+ @pastel = Pastel.new
53
52
  end
54
53
 
55
54
  # Parses and transforms the reading log into item data.
56
- # @return [Array<Struct>] an array of Structs like the template in
57
- # Config#default_config[:item_template]. The Structs are identical in
58
- # structure to that Hash (with every inner Hash replaced by a Struct).
55
+ # @return [Array<Item>] an array of Items like the template in
56
+ # Config#default_config[:item][:template]. The Items are identical in
57
+ # structure to that Hash (with every inner Hash replaced by a Data for
58
+ # dot access).
59
59
  def parse
60
- input = @path ? File.open(@path) : @stream
60
+ input = @lines || File.open(@path)
61
61
  items = []
62
62
 
63
63
  input.each_line do |line|
64
64
  begin
65
65
  intermediate = parser.parse_row_to_intermediate_hash(line)
66
+
66
67
  next if intermediate.empty? # When the row is blank or a comment.
68
+
67
69
  row_items = transformer.transform_intermediate_hash_to_item_hashes(intermediate)
68
70
  rescue Reading::Error => e
69
- raise e.class, "#{e.message} in the row \"#{line}\""
71
+ colored_e =
72
+ e.class.new("#{pastel.bright_red(e.message)} in the row #{pastel.bright_yellow(line.chomp)}")
73
+
74
+ if error_handler
75
+ error_handler.call(colored_e)
76
+ next
77
+ else
78
+ raise colored_e
79
+ end
70
80
  end
71
81
 
72
82
  items += row_items
73
83
  end
74
84
 
75
- items.map(&:to_struct)
85
+ if hash_output
86
+ items
87
+ else
88
+ items.map { |item_hash| Item.new(item_hash, view: item_view) }
89
+ end
76
90
  ensure
77
91
  input&.close if input.respond_to?(:close)
78
92
  end
79
93
 
80
94
  private
81
95
 
82
- # Checks on the given stream and path (arguments to #initialize).
96
+ # Checks on the given lines and path (arguments to #initialize).
83
97
  # @raise [FileError] if the given path is invalid.
84
- # @raise [ArgumentError] if both stream and path are nil.
85
- def validate_path_or_stream(path, stream)
86
- if path
98
+ # @raise [ArgumentError] if both lines and path are nil.
99
+ def validate_path_or_lines(path, lines)
100
+ if lines && lines.respond_to?(:each_line)
101
+ return true
102
+ elsif path
87
103
  if !File.exist?(path)
88
104
  raise FileError, "File not found! #{path}"
89
105
  elsif File.directory?(path)
90
106
  raise FileError, "A file is expected, but the path given is a directory: #{path}"
91
107
  end
92
- elsif stream && stream.respond_to?(:each_line)
93
- return true
94
108
  else
95
109
  raise ArgumentError,
96
- "Either a file path or a stream (string, file, etc.) must be provided."
110
+ "Provide either a file path or object implementing #each_line (String, File, etc.)."
97
111
  end
98
112
  end
99
113
  end
@@ -1,6 +1,8 @@
1
- require_relative "rows/regular"
2
- require_relative "rows/compact_planned"
3
- require_relative "rows/comment"
1
+ require_relative 'rows/blank'
2
+ require_relative 'rows/regular'
3
+ require_relative 'rows/compact_planned'
4
+ require_relative 'rows/custom_config'
5
+ require_relative 'rows/comment'
4
6
 
5
7
  module Reading
6
8
  module Parsing
@@ -43,14 +45,6 @@ module Reading
43
45
  #
44
46
  class Parser
45
47
  using Util::HashArrayDeepFetch
46
- using Util::StringRemove
47
-
48
- attr_reader :config
49
-
50
- # @param config [Hash] an entire config.
51
- def initialize(config)
52
- @config = config
53
- end
54
48
 
55
49
  # Parses a row string into a hash that mirrors the structure of the row.
56
50
  # @param string [String] a string containing a row of a CSV reading log.
@@ -58,7 +52,7 @@ module Reading
58
52
  def parse_row_to_intermediate_hash(string)
59
53
  columns = extract_columns(string)
60
54
 
61
- if config.fetch(:skip_compact_planned) && columns.has_key?(Rows::CompactPlanned::Head)
55
+ if Config.hash.fetch(:skip_compact_planned) && columns.has_key?(Rows::CompactPlanned::Head)
62
56
  return {}
63
57
  end
64
58
 
@@ -75,15 +69,20 @@ module Reading
75
69
  # @return [Hash{Class => String}] a hash whose keys are classes inheriting
76
70
  # Parsing::Rows::Column.
77
71
  def extract_columns(string)
78
- clean_string = string.dup.force_encoding(Encoding::UTF_8)
79
- column_strings = clean_string.split(config.fetch(:column_separator))
72
+ string = string.dup.force_encoding(Encoding::UTF_8)
73
+ column_strings = string.split(Config.hash.fetch(:column_separator))
80
74
 
81
- row_types = [Rows::Regular, Rows::CompactPlanned, Rows::Comment]
75
+ row_types = [Rows::Blank, Rows::Regular, Rows::CompactPlanned, Rows::CustomConfig, Rows::Comment]
82
76
  column_classes = row_types
83
- .find { |row_type| row_type.match?(string, config) }
77
+ .find { |row_type| row_type.match?(string) }
78
+ .tap { |row_type|
79
+ if row_type == Rows::CustomConfig
80
+ row_type.merge_custom_config!(string)
81
+ end
82
+ }
84
83
  .column_classes
85
- .filter { |column_class|
86
- config.fetch(:enabled_columns).include?(column_class.to_sym)
84
+ .select { |column_class|
85
+ Config.hash.fetch(:enabled_columns).include?(column_class.to_sym)
87
86
  }
88
87
 
89
88
  if !column_classes.count.zero? && column_strings.count > column_classes.count
@@ -123,7 +122,7 @@ module Reading
123
122
  # it doesn't contain any format emojis, return the same as above but
124
123
  # with an extra level of nesting (except when the parsed result is nil).
125
124
  if column_class.split_by_format? &&
126
- !column_string.match?(config.deep_fetch(:regex, :formats))
125
+ !column_string.match?(Config.hash.deep_fetch(:regex, :formats))
127
126
 
128
127
  parsed_column = parse_segments(column_class, column_string)
129
128
  # Wrap a non-empty value in an array so that e.g. a head without
@@ -136,18 +135,18 @@ module Reading
136
135
  # The rest is the complex case: if the column *can and is* split by format.
137
136
 
138
137
  # Each format plus the string after it.
139
- format_strings = column_string.split(config.deep_fetch(:regex, :formats_split))
138
+ format_strings = column_string.split(Config.hash.deep_fetch(:regex, :formats_split))
140
139
 
141
140
  # If there's a string before the first format, e.g. "DNF" in Head column.
142
- unless format_strings.first.match?(config.deep_fetch(:regex, :formats))
141
+ unless format_strings.first.match?(Config.hash.deep_fetch(:regex, :formats))
143
142
  before_formats = parse_segment(column_class, format_strings.shift, before_formats: true)
144
143
  end
145
144
 
146
145
  # Parse each format-plus-string into an array of segments.
147
146
  heads = format_strings.map { |string|
148
- format_emoji = string[config.deep_fetch(:regex, :formats)]
149
- string.remove!(format_emoji)
150
- format = config.fetch(:formats).key(format_emoji)
147
+ format_emoji = string[Config.hash.deep_fetch(:regex, :formats)]
148
+ string.sub!(format_emoji, '')
149
+ format = Config.hash.fetch(:formats).key(format_emoji)
151
150
 
152
151
  parse_segments(column_class, string)
153
152
  .merge(format: format)
@@ -252,7 +251,7 @@ module Reading
252
251
  # @return [Hash{Symbol => String}] e.g. { author: "Bram Stoker", title: "Dracula"}
253
252
  def parse_segment_with_regex(segment, regex)
254
253
  segment
255
- .tr(config.fetch(:ignored_characters), "")
254
+ .tr(Config.hash.fetch(:ignored_characters), "")
256
255
  .strip
257
256
  .match(regex)
258
257
  &.named_captures