reading 0.8.0 → 0.9.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (38) hide show
  1. checksums.yaml +4 -4
  2. data/bin/reading +80 -10
  3. data/lib/reading/config.rb +27 -5
  4. data/lib/reading/errors.rb +4 -1
  5. data/lib/reading/item/time_length.rb +60 -23
  6. data/lib/reading/item/view.rb +14 -19
  7. data/lib/reading/item.rb +321 -54
  8. data/lib/reading/parsing/attributes/attribute.rb +0 -7
  9. data/lib/reading/parsing/attributes/experiences/dates_and_head_transformer.rb +10 -11
  10. data/lib/reading/parsing/attributes/experiences/history_transformer.rb +27 -18
  11. data/lib/reading/parsing/attributes/experiences/spans_validator.rb +18 -19
  12. data/lib/reading/parsing/attributes/experiences.rb +5 -5
  13. data/lib/reading/parsing/attributes/shared.rb +13 -6
  14. data/lib/reading/parsing/attributes/variants.rb +9 -6
  15. data/lib/reading/parsing/csv.rb +38 -35
  16. data/lib/reading/parsing/parser.rb +23 -24
  17. data/lib/reading/parsing/rows/blank.rb +23 -0
  18. data/lib/reading/parsing/rows/comment.rb +6 -7
  19. data/lib/reading/parsing/rows/compact_planned.rb +9 -9
  20. data/lib/reading/parsing/rows/compact_planned_columns/head.rb +2 -2
  21. data/lib/reading/parsing/rows/custom_config.rb +42 -0
  22. data/lib/reading/parsing/rows/regular.rb +15 -14
  23. data/lib/reading/parsing/rows/regular_columns/length.rb +8 -8
  24. data/lib/reading/parsing/rows/regular_columns/sources.rb +15 -9
  25. data/lib/reading/parsing/transformer.rb +13 -17
  26. data/lib/reading/stats/filter.rb +738 -0
  27. data/lib/reading/stats/grouping.rb +243 -0
  28. data/lib/reading/stats/operation.rb +313 -0
  29. data/lib/reading/stats/query.rb +37 -0
  30. data/lib/reading/stats/terminal_result_formatters.rb +91 -0
  31. data/lib/reading/util/exclude.rb +12 -0
  32. data/lib/reading/util/hash_to_data.rb +2 -2
  33. data/lib/reading/version.rb +1 -1
  34. data/lib/reading.rb +36 -21
  35. metadata +10 -6
  36. data/bin/readingfile +0 -31
  37. data/lib/reading/util/string_remove.rb +0 -28
  38. data/lib/reading/util/string_truncate.rb +0 -22
@@ -11,24 +11,23 @@ module Reading
11
11
  # Checks the dates in the given experiences hash, and raises an error
12
12
  # at the first invalid date found.
13
13
  # @param experiences [Array<Hash>] experience hashes.
14
- # @param config [Hash] an entire config.
15
14
  # @param history_column [Boolean] whether this validation is for
16
15
  # experiences from the History column.
17
16
  # @raise [InvalidDateError] if any date is invalid.
18
- def validate(experiences, config, history_column: false)
19
- if both_date_columns?(config)
17
+ def validate(experiences, history_column: false)
18
+ if both_date_columns?
20
19
  validate_number_of_start_dates_and_end_dates(experiences)
21
20
  end
22
21
 
23
- if start_dates_column?(config) || history_column
22
+ if start_dates_column? || history_column
24
23
  validate_start_dates_are_in_order(experiences)
25
24
  end
26
25
 
27
- if end_dates_column?(config) || history_column
26
+ if end_dates_column? || history_column
28
27
  validate_end_dates_are_in_order(experiences)
29
28
  end
30
29
 
31
- if both_date_columns?(config) || history_column
30
+ if both_date_columns? || history_column
32
31
  validate_experiences_of_same_variant_do_not_overlap(experiences)
33
32
  end
34
33
 
@@ -39,20 +38,20 @@ module Reading
39
38
 
40
39
  # Whether the Start Dates column is enabled.
41
40
  # @return [Boolean]
42
- def start_dates_column?(config)
43
- config.fetch(:enabled_columns).include?(:start_dates)
41
+ def start_dates_column?
42
+ Config.hash.fetch(:enabled_columns).include?(:start_dates)
44
43
  end
45
44
 
46
45
  # Whether the End Dates column is enabled.
47
46
  # @return [Boolean]
48
- def end_dates_column?(config)
49
- config.fetch(:enabled_columns).include?(:end_dates)
47
+ def end_dates_column?
48
+ Config.hash.fetch(:enabled_columns).include?(:end_dates)
50
49
  end
51
50
 
52
51
  # Whether both the Start Dates and End Dates columns are enabled.
53
52
  # @return [Boolean]
54
- def both_date_columns?(config)
55
- start_dates_column?(config) && end_dates_column?(config)
53
+ def both_date_columns?
54
+ start_dates_column? && end_dates_column?
56
55
  end
57
56
 
58
57
  # Raises an error if there are more end dates than start dates, or
@@ -60,7 +59,7 @@ module Reading
60
59
  # @raise [InvalidDateError]
61
60
  def validate_number_of_start_dates_and_end_dates(experiences)
62
61
  _both_dates, not_both_dates = experiences
63
- .filter { |exp| exp[:spans].first&.dig(:dates) }
62
+ .select { |exp| exp[:spans].first&.dig(:dates) }
64
63
  .map { |exp| [exp[:spans].first[:dates].begin, exp[:spans].last[:dates].end] }
65
64
  .partition { |start_date, end_date| start_date && end_date }
66
65
 
@@ -76,7 +75,7 @@ module Reading
76
75
  # @raise [InvalidDateError]
77
76
  def validate_start_dates_are_in_order(experiences)
78
77
  experiences
79
- .filter { |exp| exp[:spans].first&.dig(:dates) }
78
+ .select { |exp| exp[:spans].first&.dig(:dates) }
80
79
  .map { |exp| exp[:spans].first[:dates].begin }
81
80
  .each_cons(2) do |a, b|
82
81
  if (a.nil? && b.nil?) || (a && b && a > b )
@@ -89,8 +88,8 @@ module Reading
89
88
  # @raise [InvalidDateError]
90
89
  def validate_end_dates_are_in_order(experiences)
91
90
  experiences
92
- .filter { |exp| exp[:spans].first&.dig(:dates) }
93
- .map { |exp| exp[:spans].last[:dates].end }
91
+ .select { |exp| exp[:spans].first&.dig(:dates) }
92
+ .map { |exp| exp[:spans].last[:dates]&.end }
94
93
  .each_cons(2) do |a, b|
95
94
  if (a.nil? && b.nil?) || (a && b && a > b )
96
95
  raise InvalidDateError, "End dates are not in order"
@@ -104,7 +103,7 @@ module Reading
104
103
  experiences
105
104
  .group_by { |exp| exp[:variant_index] }
106
105
  .each do |_variant_index, exps|
107
- exps.filter { |exp| exp[:spans].any? }.each_cons(2) do |a, b|
106
+ exps.select { |exp| exp[:spans].any? }.each_cons(2) do |a, b|
108
107
  a_metaspan = a[:spans].first[:dates].begin..a[:spans].last[:dates].end
109
108
  b_metaspan = b[:spans].first[:dates].begin..b[:spans].last[:dates].end
110
109
  if a_metaspan.cover?(b_metaspan.begin || a_metaspan.begin || a_metaspan.end) ||
@@ -116,11 +115,11 @@ module Reading
116
115
  end
117
116
 
118
117
  # Raises an error if the spans within an experience are out of order
119
- # or if the spans overlap.
118
+ # or if the spans overlap. Spans with nil dates are not considered.
120
119
  # @raise [InvalidDateError]
121
120
  def validate_spans_are_in_order_and_not_overlapping(experiences)
122
121
  experiences
123
- .filter { |exp| exp[:spans].first&.dig(:dates) }
122
+ .select { |exp| exp[:spans].first&.dig(:dates) }
124
123
  .each do |exp|
125
124
  exp[:spans]
126
125
  .map { |span| span[:dates] }
@@ -1,6 +1,6 @@
1
- require "date"
2
- require_relative "experiences/history_transformer"
3
- require_relative "experiences/dates_and_head_transformer"
1
+ require 'date'
2
+ require_relative 'experiences/history_transformer'
3
+ require_relative 'experiences/dates_and_head_transformer'
4
4
 
5
5
  module Reading
6
6
  module Parsing
@@ -16,10 +16,10 @@ module Reading
16
16
  # Config#default_config[:item][:template][:experiences]
17
17
  def transform_from_parsed(parsed_row, head_index)
18
18
  if !parsed_row[:history].blank?
19
- return HistoryTransformer.new(parsed_row, config).transform
19
+ return HistoryTransformer.new(parsed_row, head_index).transform
20
20
  end
21
21
 
22
- DatesAndHeadTransformer.new(parsed_row, head_index, config).transform
22
+ DatesAndHeadTransformer.new(parsed_row, head_index).transform
23
23
  end
24
24
  end
25
25
  end
@@ -1,8 +1,11 @@
1
1
  module Reading
2
2
  module Parsing
3
3
  module Attributes
4
- # Shared
4
+ # Sub-attributes that are shared across multiple attributes.
5
5
  module Shared
6
+ using Util::HashArrayDeepFetch
7
+ using Util::NumericToIIfWhole
8
+
6
9
  # Extracts the :progress sub-attribute (percent, pages, or time) from
7
10
  # the given hash.
8
11
  # @param hash [Hash] any parsed hash that contains progress.
@@ -10,7 +13,7 @@ module Reading
10
13
  def self.progress(hash)
11
14
  hash[:progress_percent]&.to_f&./(100) ||
12
15
  hash[:progress_pages]&.to_i ||
13
- hash[:progress_time]&.then { Item::TimeLength.parse _1 } ||
16
+ hash[:progress_time]&.then { Item::TimeLength.parse(_1) } ||
14
17
  (0 if hash[:progress_dnf]) ||
15
18
  (1.0 if hash[:progress_done]) ||
16
19
  nil
@@ -18,6 +21,8 @@ module Reading
18
21
 
19
22
  # Extracts the :length sub-attribute (pages or time) from the given hash.
20
23
  # @param hash [Hash] any parsed hash that contains length.
24
+ # @param format [Symbol] the item format, which affects length in cases
25
+ # where Config.hash[:speed][:format] is customized.
21
26
  # @param key_name [Symbol] the first part of the keys to be checked.
22
27
  # @param episodic [Boolean] whether to look for episodic (not total) length.
23
28
  # If false, returns nil if hash contains :each. If true, returns a
@@ -30,15 +35,15 @@ module Reading
30
35
  # This is useful for the History column, where that 1 hour can be used
31
36
  # as the default amount.
32
37
  # @return [Float, Integer, Item::TimeLength]
33
- def self.length(hash, key_name: :length, episodic: false, ignore_repetitions: false)
38
+ def self.length(hash, format:, key_name: :length, episodic: false, ignore_repetitions: false)
34
39
  return nil unless hash
35
40
 
36
41
  length = hash[:"#{key_name}_pages"]&.to_i ||
37
- hash[:"#{key_name}_time"]&.then { Item::TimeLength.parse _1 }
42
+ hash[:"#{key_name}_time"]&.then { Item::TimeLength.parse(_1) }
38
43
 
39
44
  return nil unless length
40
45
 
41
- if hash[:each]
46
+ if hash[:each] && !hash[:repetitions]
42
47
  # Length is calculated based on History column in this case.
43
48
  if episodic
44
49
  return length
@@ -54,7 +59,9 @@ module Reading
54
59
  return nil if episodic && !hash[:each]
55
60
  end
56
61
 
57
- length
62
+ speed = Config.hash.deep_fetch(:speed, :format)[format] || 1.0
63
+
64
+ (length / speed).to_i_if_whole
58
65
  end
59
66
  end
60
67
  end
@@ -4,6 +4,7 @@ module Reading
4
4
  # Transformer for the :variant item attribute.
5
5
  class Variants < Attribute
6
6
  using Util::HashArrayDeepFetch
7
+ using Util::NumericToIIfWhole
7
8
 
8
9
  # @param parsed_row [Hash] a parsed row (the intermediate hash).
9
10
  # @param head_index [Integer] current item's position in the Head column.
@@ -14,13 +15,15 @@ module Reading
14
15
 
15
16
  # || [{}] in case there is no Sources column.
16
17
  (parsed_row[:sources].presence || [{}])&.map { |variant|
18
+ format = variant[:format] || head[:format]
19
+
17
20
  {
18
- format: variant[:format] || head[:format],
21
+ format:,
19
22
  series: (series(head) + series(variant)).presence,
20
23
  sources: sources(variant) || sources(head),
21
24
  isbn: variant[:isbn] || variant[:asin],
22
- length: Attributes::Shared.length(variant) ||
23
- Attributes::Shared.length(parsed_row[:length]),
25
+ length: Attributes::Shared.length(variant, format:) ||
26
+ Attributes::Shared.length(parsed_row[:length], format:),
24
27
  extra_info: Array(head[:extra_info]) + Array(variant[:extra_info]),
25
28
  }.map { |k, v| [k, v || template.fetch(k)] }.to_h
26
29
  }&.compact&.presence
@@ -29,7 +32,7 @@ module Reading
29
32
  # A shortcut to the variant template.
30
33
  # @return [Hash]
31
34
  def template
32
- config.deep_fetch(:item, :template, :variants).first
35
+ Config.hash.deep_fetch(:item, :template, :variants).first
33
36
  end
34
37
 
35
38
  # The :series sub-attribute for the given parsed hash.
@@ -57,11 +60,11 @@ module Reading
57
60
  end
58
61
 
59
62
  # The name for the given URL string, according to
60
- # config[:source_names_from_urls], or nil.
63
+ # Config.hash[:source_names_from_urls], or nil.
61
64
  # @param url [String] a URL.
62
65
  # @return [String, nil]
63
66
  def url_name(url)
64
- config
67
+ Config.hash
65
68
  .fetch(:source_names_from_urls)
66
69
  .each do |url_part, name|
67
70
  if url.include?(url_part)
@@ -1,23 +1,12 @@
1
- # Used throughout, in other files.
2
- require_relative "../util/blank"
3
- require_relative "../util/string_remove"
4
- require_relative "../util/string_truncate"
5
- require_relative "../util/numeric_to_i_if_whole"
6
- require_relative "../util/hash_deep_merge"
7
- require_relative "../util/hash_array_deep_fetch"
8
- require_relative "../util/hash_compact_by_template"
9
- require_relative "../errors"
10
-
11
- # Used just here.
12
- require_relative "../config"
13
- require_relative "../item"
14
- require_relative "parser"
15
- require_relative "transformer"
1
+ require 'pastel'
2
+ require_relative '../item'
3
+ require_relative 'parser'
4
+ require_relative 'transformer'
16
5
 
17
6
  module Reading
18
7
  module Parsing
19
8
  #
20
- # Validates a path or stream (string, file, etc.) of a CSV reading log, then
9
+ # Validates a path or lines (string, file, etc.) of a CSV reading log, then
21
10
  # parses it into an array of Items.
22
11
  #
23
12
  # Parsing happens in two steps:
@@ -31,31 +20,35 @@ module Reading
31
20
  # inspired by the Parslet gem: https://kschiess.github.io/parslet/transform.html
32
21
  #
33
22
  class CSV
34
- private attr_reader :parser, :transformer, :hash_output, :item_view
23
+ private attr_reader :parser, :transformer, :hash_output, :item_view, :error_handler, :pastel
35
24
 
36
- # Validates a path or stream (string, file, etc.) of a CSV reading log,
25
+ # Validates a path or lines (string, file, etc.) of a CSV reading log,
37
26
  # builds the config, and initializes the parser and transformer.
38
- # @param path [String] path to the CSV file; used if no stream is given.
39
- # @param stream [Object] an object responding to #each_linewith CSV row(s);
27
+ # @param path [String] path to the CSV file; used if no lines are given.
28
+ # @param lines [Object] an object responding to #each_line with CSV row(s);
40
29
  # if nil, path is used instead.
41
- # @param config [Hash] a custom config which overrides the defaults,
30
+ # @param config [Hash, Config] a custom config which overrides the defaults,
42
31
  # e.g. { errors: { styling: :html } }
43
32
  # @param hash_output [Boolean] whether an array of raw Hashes should be
44
33
  # returned, without Items being created from them.
45
- # @param view [Class, nil, Boolean] the class that will be used to build
34
+ # @param item_view [Class, nil, Boolean] the class that will be used to build
46
35
  # each Item's view object, or nil/false if no view object should be built.
47
36
  # If you use a custom view class, the only requirement is that its
48
37
  # #initialize take an Item and a full config as arguments.
49
- def initialize(path = nil, stream: nil, config: {}, hash_output: false, item_view: Item::View)
50
- validate_path_or_stream(path, stream)
51
- full_config = Config.new(config).hash
38
+ # @param error_handler [Proc] if not provided, errors are raised.
39
+ def initialize(path: nil, lines: nil, config: nil, hash_output: false, item_view: Item::View, error_handler: nil)
40
+ validate_path_or_lines(path, lines)
41
+
42
+ Config.build(config) if config
52
43
 
53
44
  @path = path
54
- @stream = stream
45
+ @lines = lines
55
46
  @hash_output = hash_output
56
47
  @item_view = item_view
57
- @parser = Parser.new(full_config)
58
- @transformer = Transformer.new(full_config)
48
+ @parser = Parser.new
49
+ @transformer = Transformer.new
50
+ @error_handler = error_handler
51
+ @pastel = Pastel.new
59
52
  end
60
53
 
61
54
  # Parses and transforms the reading log into item data.
@@ -64,16 +57,26 @@ module Reading
64
57
  # structure to that Hash (with every inner Hash replaced by a Data for
65
58
  # dot access).
66
59
  def parse
67
- input = @stream || File.open(@path)
60
+ input = @lines || File.open(@path)
68
61
  items = []
69
62
 
70
63
  input.each_line do |line|
71
64
  begin
72
65
  intermediate = parser.parse_row_to_intermediate_hash(line)
66
+
73
67
  next if intermediate.empty? # When the row is blank or a comment.
68
+
74
69
  row_items = transformer.transform_intermediate_hash_to_item_hashes(intermediate)
75
70
  rescue Reading::Error => e
76
- raise e.class, "#{e.message} in the row \"#{line}\""
71
+ colored_e =
72
+ e.class.new("#{pastel.bright_red(e.message)} in the row #{pastel.bright_yellow(line.chomp)}")
73
+
74
+ if error_handler
75
+ error_handler.call(colored_e)
76
+ next
77
+ else
78
+ raise colored_e
79
+ end
77
80
  end
78
81
 
79
82
  items += row_items
@@ -90,11 +93,11 @@ module Reading
90
93
 
91
94
  private
92
95
 
93
- # Checks on the given stream and path (arguments to #initialize).
96
+ # Checks on the given lines and path (arguments to #initialize).
94
97
  # @raise [FileError] if the given path is invalid.
95
- # @raise [ArgumentError] if both stream and path are nil.
96
- def validate_path_or_stream(path, stream)
97
- if stream && stream.respond_to?(:each_line)
98
+ # @raise [ArgumentError] if both lines and path are nil.
99
+ def validate_path_or_lines(path, lines)
100
+ if lines && lines.respond_to?(:each_line)
98
101
  return true
99
102
  elsif path
100
103
  if !File.exist?(path)
@@ -104,7 +107,7 @@ module Reading
104
107
  end
105
108
  else
106
109
  raise ArgumentError,
107
- "Either a file path or a stream (string, file, etc.) must be provided."
110
+ "Provide either a file path or object implementing #each_line (String, File, etc.)."
108
111
  end
109
112
  end
110
113
  end
@@ -1,6 +1,8 @@
1
- require_relative "rows/regular"
2
- require_relative "rows/compact_planned"
3
- require_relative "rows/comment"
1
+ require_relative 'rows/blank'
2
+ require_relative 'rows/regular'
3
+ require_relative 'rows/compact_planned'
4
+ require_relative 'rows/custom_config'
5
+ require_relative 'rows/comment'
4
6
 
5
7
  module Reading
6
8
  module Parsing
@@ -43,14 +45,6 @@ module Reading
43
45
  #
44
46
  class Parser
45
47
  using Util::HashArrayDeepFetch
46
- using Util::StringRemove
47
-
48
- attr_reader :config
49
-
50
- # @param config [Hash] an entire config.
51
- def initialize(config)
52
- @config = config
53
- end
54
48
 
55
49
  # Parses a row string into a hash that mirrors the structure of the row.
56
50
  # @param string [String] a string containing a row of a CSV reading log.
@@ -58,7 +52,7 @@ module Reading
58
52
  def parse_row_to_intermediate_hash(string)
59
53
  columns = extract_columns(string)
60
54
 
61
- if config.fetch(:skip_compact_planned) && columns.has_key?(Rows::CompactPlanned::Head)
55
+ if Config.hash.fetch(:skip_compact_planned) && columns.has_key?(Rows::CompactPlanned::Head)
62
56
  return {}
63
57
  end
64
58
 
@@ -76,14 +70,19 @@ module Reading
76
70
  # Parsing::Rows::Column.
77
71
  def extract_columns(string)
78
72
  string = string.dup.force_encoding(Encoding::UTF_8)
79
- column_strings = string.split(config.fetch(:column_separator))
73
+ column_strings = string.split(Config.hash.fetch(:column_separator))
80
74
 
81
- row_types = [Rows::Regular, Rows::CompactPlanned, Rows::Comment]
75
+ row_types = [Rows::Blank, Rows::Regular, Rows::CompactPlanned, Rows::CustomConfig, Rows::Comment]
82
76
  column_classes = row_types
83
- .find { |row_type| row_type.match?(string, config) }
77
+ .find { |row_type| row_type.match?(string) }
78
+ .tap { |row_type|
79
+ if row_type == Rows::CustomConfig
80
+ row_type.merge_custom_config!(string)
81
+ end
82
+ }
84
83
  .column_classes
85
- .filter { |column_class|
86
- config.fetch(:enabled_columns).include?(column_class.to_sym)
84
+ .select { |column_class|
85
+ Config.hash.fetch(:enabled_columns).include?(column_class.to_sym)
87
86
  }
88
87
 
89
88
  if !column_classes.count.zero? && column_strings.count > column_classes.count
@@ -123,7 +122,7 @@ module Reading
123
122
  # it doesn't contain any format emojis, return the same as above but
124
123
  # with an extra level of nesting (except when the parsed result is nil).
125
124
  if column_class.split_by_format? &&
126
- !column_string.match?(config.deep_fetch(:regex, :formats))
125
+ !column_string.match?(Config.hash.deep_fetch(:regex, :formats))
127
126
 
128
127
  parsed_column = parse_segments(column_class, column_string)
129
128
  # Wrap a non-empty value in an array so that e.g. a head without
@@ -136,18 +135,18 @@ module Reading
136
135
  # The rest is the complex case: if the column *can and is* split by format.
137
136
 
138
137
  # Each format plus the string after it.
139
- format_strings = column_string.split(config.deep_fetch(:regex, :formats_split))
138
+ format_strings = column_string.split(Config.hash.deep_fetch(:regex, :formats_split))
140
139
 
141
140
  # If there's a string before the first format, e.g. "DNF" in Head column.
142
- unless format_strings.first.match?(config.deep_fetch(:regex, :formats))
141
+ unless format_strings.first.match?(Config.hash.deep_fetch(:regex, :formats))
143
142
  before_formats = parse_segment(column_class, format_strings.shift, before_formats: true)
144
143
  end
145
144
 
146
145
  # Parse each format-plus-string into an array of segments.
147
146
  heads = format_strings.map { |string|
148
- format_emoji = string[config.deep_fetch(:regex, :formats)]
149
- string.remove!(format_emoji)
150
- format = config.fetch(:formats).key(format_emoji)
147
+ format_emoji = string[Config.hash.deep_fetch(:regex, :formats)]
148
+ string.sub!(format_emoji, '')
149
+ format = Config.hash.fetch(:formats).key(format_emoji)
151
150
 
152
151
  parse_segments(column_class, string)
153
152
  .merge(format: format)
@@ -252,7 +251,7 @@ module Reading
252
251
  # @return [Hash{Symbol => String}] e.g. { author: "Bram Stoker", title: "Dracula"}
253
252
  def parse_segment_with_regex(segment, regex)
254
253
  segment
255
- .tr(config.fetch(:ignored_characters), "")
254
+ .tr(Config.hash.fetch(:ignored_characters), "")
256
255
  .strip
257
256
  .match(regex)
258
257
  &.named_captures
@@ -0,0 +1,23 @@
1
+ module Reading
2
+ module Parsing
3
+ module Rows
4
+ # A row that is a blank line.
5
+ module Blank
6
+ using Util::HashArrayDeepFetch
7
+
8
+ # No columns.
9
+ # @return [Array]
10
+ def self.column_classes
11
+ []
12
+ end
13
+
14
+ # Is a blank line.
15
+ # @param row_string [String]
16
+ # @return [Boolean]
17
+ def self.match?(row_string)
18
+ row_string == "\n"
19
+ end
20
+ end
21
+ end
22
+ end
23
+ end
@@ -5,20 +5,19 @@ module Reading
5
5
  module Comment
6
6
  using Util::HashArrayDeepFetch
7
7
 
8
- # No columns; comments are parsed as if the row were blank.
8
+ # No columns; parsed as if the row were blank.
9
9
  # @return [Array]
10
10
  def self.column_classes
11
11
  []
12
12
  end
13
13
 
14
- # Starts with a comment character and does not include any format emojis.
15
- # (Commented rows that DO include format emojis are matched as compact
16
- # planned rows.)
14
+ # Starts with a comment character. Note: this must be called *after*
15
+ # calling ::match? on Rows::CompactPlanned and Rows::CustomConfig,
16
+ # because those check for starting with a comment character too.
17
17
  # @param row_string [String]
18
- # @param config [Hash]
19
18
  # @return [Boolean]
20
- def self.match?(row_string, config)
21
- row_string.lstrip.start_with?(config.fetch(:comment_character))
19
+ def self.match?(row_string)
20
+ row_string.lstrip.start_with?(Config.hash.fetch(:comment_character))
22
21
  end
23
22
  end
24
23
  end
@@ -1,6 +1,7 @@
1
- require_relative "column"
2
- require_relative "compact_planned_columns/head"
3
- require_relative "regular_columns/sources"
1
+ require_relative 'column'
2
+ require_relative 'compact_planned_columns/head'
3
+ require_relative 'regular_columns/sources'
4
+ require_relative 'regular_columns/length'
4
5
 
5
6
  module Reading
6
7
  module Parsing
@@ -12,17 +13,16 @@ module Reading
12
13
  # The columns that are possible in this type of row.
13
14
  # @return [Array<Class>]
14
15
  def self.column_classes
15
- [CompactPlanned::Head, Regular::Sources]
16
+ [CompactPlanned::Head, Regular::Sources, Regular::Length]
16
17
  end
17
18
 
18
19
  # Starts with a comment character and includes one or more format emojis.
19
20
  # @param row_string [String]
20
- # @param config [Hash]
21
21
  # @return [Boolean]
22
- def self.match?(row_string, config)
23
- row_string.lstrip.start_with?(config.fetch(:comment_character)) &&
24
- row_string.match?(config.deep_fetch(:regex, :formats)) &&
25
- row_string.count(config.fetch(:column_separator)) <= column_classes.count - 1
22
+ def self.match?(row_string)
23
+ row_string.lstrip.start_with?(Config.hash.fetch(:comment_character)) &&
24
+ row_string.match?(Config.hash.deep_fetch(:regex, :formats)) &&
25
+ row_string.count(Config.hash.fetch(:column_separator)) <= column_classes.count - 1
26
26
  end
27
27
  end
28
28
  end
@@ -15,7 +15,7 @@ module Reading
15
15
  \\ # comment character
16
16
  \s*
17
17
  (
18
- (?<genres>[^a-z]+)?
18
+ (?<genres>[^a-z@]+)?
19
19
  \s*
20
20
  (?<sources>@.+)?
21
21
  \s*:
@@ -49,7 +49,7 @@ module Reading
49
49
  )?
50
50
  (?<title>[^@]+)
51
51
  (?<sources>@.+)?
52
- \z}x if segment_index.zero?),
52
+ \z}x if segment_index.zero?),
53
53
  *Column::SHARED_REGEXES[:series_and_extra_info],
54
54
  ].compact
55
55
  end
@@ -0,0 +1,42 @@
1
+ module Reading
2
+ module Parsing
3
+ module Rows
4
+ # A row that declares custom config.
5
+ module CustomConfig
6
+ using Util::HashArrayDeepFetch
7
+
8
+ # No columns; parsed as if the row were blank.
9
+ # @return [Array]
10
+ def self.column_classes
11
+ []
12
+ end
13
+
14
+ # Starts with a comment character and opening curly brace, and ends with
15
+ # a closing curly brace.
16
+ # @param row_string [String]
17
+ # @return [Boolean]
18
+ def self.match?(row_string)
19
+ row_string.match?(
20
+ %r{\A
21
+ \s*
22
+ #{Regexp.escape(Config.hash.fetch(:comment_character))}
23
+ \s*
24
+ \{.+\}
25
+ \s*
26
+ \z}x
27
+ )
28
+ end
29
+
30
+ # Adds this row's custom config to the singleton config.
31
+ # @param row_string [String]
32
+ # @param config [Hash] an entire config.
33
+ def self.merge_custom_config!(row_string)
34
+ stripped_row = row_string.strip.delete_prefix(Config.hash.fetch(:comment_character))
35
+ custom_config = eval(stripped_row)
36
+
37
+ Config.hash.merge!(custom_config)
38
+ end
39
+ end
40
+ end
41
+ end
42
+ end
@@ -1,13 +1,13 @@
1
- require_relative "column"
2
- require_relative "regular_columns/rating"
3
- require_relative "regular_columns/head"
4
- require_relative "regular_columns/sources"
5
- require_relative "regular_columns/start_dates"
6
- require_relative "regular_columns/end_dates"
7
- require_relative "regular_columns/genres"
8
- require_relative "regular_columns/length"
9
- require_relative "regular_columns/notes"
10
- require_relative "regular_columns/history"
1
+ require_relative 'column'
2
+ require_relative 'regular_columns/rating'
3
+ require_relative 'regular_columns/head'
4
+ require_relative 'regular_columns/sources'
5
+ require_relative 'regular_columns/start_dates'
6
+ require_relative 'regular_columns/end_dates'
7
+ require_relative 'regular_columns/genres'
8
+ require_relative 'regular_columns/length'
9
+ require_relative 'regular_columns/notes'
10
+ require_relative 'regular_columns/history'
11
11
 
12
12
  module Reading
13
13
  module Parsing
@@ -20,12 +20,13 @@ module Reading
20
20
  [Rating, Head, Sources, StartDates, EndDates, Genres, Length, Notes, History]
21
21
  end
22
22
 
23
- # Does not start with a comment character.
23
+ # Does not start with a comment character. Note: this must be called
24
+ # *after* calling ::match? on Rows::Blank, because that one catches
25
+ # blank lines.
24
26
  # @param row_string [String]
25
- # @param config [Hash]
26
27
  # @return [Boolean]
27
- def self.match?(row_string, config)
28
- !row_string.lstrip.start_with?(config.fetch(:comment_character))
28
+ def self.match?(row_string)
29
+ !row_string.lstrip.start_with?(Config.hash.fetch(:comment_character))
29
30
  end
30
31
  end
31
32
  end