reading 0.6.1 → 0.7.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (60) hide show
  1. checksums.yaml +4 -4
  2. data/bin/reading +5 -5
  3. data/bin/readingfile +31 -0
  4. data/lib/reading/config.rb +115 -149
  5. data/lib/reading/errors.rb +10 -66
  6. data/lib/reading/item/time_length.rb +138 -0
  7. data/lib/reading/parsing/attributes/attribute.rb +26 -0
  8. data/lib/reading/parsing/attributes/author.rb +15 -0
  9. data/lib/reading/parsing/attributes/experiences/dates_and_head_transformer.rb +106 -0
  10. data/lib/reading/parsing/attributes/experiences/history_transformer.rb +452 -0
  11. data/lib/reading/parsing/attributes/experiences/spans_validator.rb +149 -0
  12. data/lib/reading/parsing/attributes/experiences.rb +27 -0
  13. data/lib/reading/parsing/attributes/genres.rb +16 -0
  14. data/lib/reading/parsing/attributes/notes.rb +22 -0
  15. data/lib/reading/parsing/attributes/rating.rb +17 -0
  16. data/lib/reading/parsing/attributes/shared.rb +62 -0
  17. data/lib/reading/parsing/attributes/title.rb +21 -0
  18. data/lib/reading/parsing/attributes/variants.rb +77 -0
  19. data/lib/reading/parsing/csv.rb +101 -0
  20. data/lib/reading/parsing/parser.rb +292 -0
  21. data/lib/reading/parsing/rows/column.rb +131 -0
  22. data/lib/reading/parsing/rows/comment.rb +26 -0
  23. data/lib/reading/parsing/rows/compact_planned.rb +30 -0
  24. data/lib/reading/parsing/rows/compact_planned_columns/head.rb +60 -0
  25. data/lib/reading/parsing/rows/regular.rb +33 -0
  26. data/lib/reading/parsing/rows/regular_columns/end_dates.rb +20 -0
  27. data/lib/reading/parsing/rows/regular_columns/genres.rb +20 -0
  28. data/lib/reading/parsing/rows/regular_columns/head.rb +45 -0
  29. data/lib/reading/parsing/rows/regular_columns/history.rb +143 -0
  30. data/lib/reading/parsing/rows/regular_columns/length.rb +35 -0
  31. data/lib/reading/parsing/rows/regular_columns/notes.rb +32 -0
  32. data/lib/reading/parsing/rows/regular_columns/rating.rb +15 -0
  33. data/lib/reading/parsing/rows/regular_columns/sources.rb +94 -0
  34. data/lib/reading/parsing/rows/regular_columns/start_dates.rb +35 -0
  35. data/lib/reading/parsing/transformer.rb +70 -0
  36. data/lib/reading/util/hash_compact_by_template.rb +1 -0
  37. data/lib/reading/util/hash_deep_merge.rb +1 -1
  38. data/lib/reading/util/hash_to_struct.rb +1 -0
  39. data/lib/reading/util/numeric_to_i_if_whole.rb +12 -0
  40. data/lib/reading/util/string_truncate.rb +13 -4
  41. data/lib/reading/version.rb +1 -1
  42. data/lib/reading.rb +18 -0
  43. metadata +58 -41
  44. data/lib/reading/attribute/all_attributes.rb +0 -83
  45. data/lib/reading/attribute/attribute.rb +0 -25
  46. data/lib/reading/attribute/experiences/dates_validator.rb +0 -94
  47. data/lib/reading/attribute/experiences/experiences_attribute.rb +0 -74
  48. data/lib/reading/attribute/experiences/progress_subattribute.rb +0 -48
  49. data/lib/reading/attribute/experiences/spans_subattribute.rb +0 -82
  50. data/lib/reading/attribute/variants/extra_info_subattribute.rb +0 -44
  51. data/lib/reading/attribute/variants/length_subattribute.rb +0 -45
  52. data/lib/reading/attribute/variants/series_subattribute.rb +0 -57
  53. data/lib/reading/attribute/variants/sources_subattribute.rb +0 -78
  54. data/lib/reading/attribute/variants/variants_attribute.rb +0 -69
  55. data/lib/reading/csv.rb +0 -76
  56. data/lib/reading/line.rb +0 -23
  57. data/lib/reading/row/blank_row.rb +0 -23
  58. data/lib/reading/row/compact_planned_row.rb +0 -130
  59. data/lib/reading/row/regular_row.rb +0 -99
  60. data/lib/reading/row/row.rb +0 -88
@@ -1,45 +0,0 @@
1
- module Reading
2
- class Row
3
- class LengthSubattribute
4
- using Util::HashArrayDeepFetch
5
-
6
- private attr_reader :item_head, :bare_variant, :columns, :config
7
-
8
- # @param bare_variant [String] the variant string before series / extra info.
9
- # @param columns [Array<String>]
10
- # @param config [Hash]
11
- def initialize(bare_variant:, columns:, config:)
12
- @bare_variant = bare_variant
13
- @columns = columns
14
- @config = config
15
- end
16
-
17
- def parse
18
- in_variant = length_in(
19
- bare_variant,
20
- time_regex: config.deep_fetch(:csv, :regex, :time_length_in_variant),
21
- pages_regex: config.deep_fetch(:csv, :regex, :pages_length_in_variant),
22
- )
23
- in_length = length_in(
24
- columns[:length],
25
- time_regex: config.deep_fetch(:csv, :regex, :time_length),
26
- pages_regex: config.deep_fetch(:csv, :regex, :pages_length),
27
- )
28
-
29
- in_variant || in_length ||
30
- (raise InvalidLengthError, "Missing length" unless columns[:length].blank?)
31
- end
32
-
33
- private
34
-
35
- def length_in(str, time_regex:, pages_regex:)
36
- return nil if str.blank?
37
-
38
- time_length = str.strip.match(time_regex)&.captures&.first
39
- return time_length unless time_length.nil?
40
-
41
- str.strip.match(pages_regex)&.captures&.first&.to_i
42
- end
43
- end
44
- end
45
- end
@@ -1,57 +0,0 @@
1
- module Reading
2
- class Row
3
- class SeriesSubattribute
4
- using Util::HashArrayDeepFetch
5
-
6
- private attr_reader :item_head, :variant_with_extras, :config
7
-
8
- # @param item_head [String] see Row#item_heads for a definition.
9
- # @param variant_with_extras [String] the full variant string.
10
- # @param config [Hash]
11
- def initialize(item_head:, variant_with_extras: nil, config:)
12
- @item_head = item_head
13
- @variant_with_extras = variant_with_extras
14
- @config = config
15
- end
16
-
17
- def parse
18
- (
19
- Array(series(item_head)) +
20
- Array(series(variant_with_extras))
21
- ).presence
22
- end
23
-
24
- def parse_head
25
- series(item_head)
26
- end
27
-
28
- private
29
-
30
- def template
31
- config.deep_fetch(:item, :template, :variants, 0, :series).first
32
- end
33
-
34
- def series(str)
35
- separated = str
36
- .split(config.deep_fetch(:csv, :long_separator))
37
- .map(&:strip)
38
- .map(&:presence)
39
- .compact
40
-
41
- separated.delete_at(0) # everything before the series/extra info
42
-
43
- separated.map { |str|
44
- volume = str.match(config.deep_fetch(:csv, :regex, :series_volume))
45
- prefix = "#{config.deep_fetch(:csv, :series_prefix)} "
46
-
47
- if volume || str.start_with?(prefix)
48
- {
49
- name: str.delete_suffix(volume.to_s).delete_prefix(prefix) || template[:name],
50
- volume: volume&.captures&.first&.to_i || template[:volume],
51
- }
52
- end
53
- }.compact.presence
54
- end
55
- end
56
- end
57
- end
@@ -1,78 +0,0 @@
1
- module Reading
2
- class Row
3
- class SourcesSubattribute
4
- using Util::StringRemove
5
- using Util::HashArrayDeepFetch
6
-
7
- private attr_reader :item_head, :bare_variant, :config
8
-
9
- # @param bare_variant [String] the variant string before series / extra info.
10
- # @param config [Hash]
11
- def initialize(bare_variant:, config:)
12
- @bare_variant = bare_variant
13
- @config = config
14
- end
15
-
16
- def parse
17
- urls = sources_urls(bare_variant).map { |url|
18
- {
19
- name: url_name(url) || template.deep_fetch(:sources, 0, :name),
20
- url: url,
21
- }
22
- }
23
-
24
- names = sources_names(bare_variant).map { |name|
25
- {
26
- name: name,
27
- url: template.deep_fetch(:sources, 0, :url),
28
- }
29
- }
30
-
31
- (urls + names).presence
32
- end
33
-
34
- private
35
-
36
- def template
37
- @template ||= config.deep_fetch(:item, :template, :variants).first
38
- end
39
-
40
- def sources_urls(str)
41
- str.scan(config.deep_fetch(:csv, :regex, :url))
42
- end
43
-
44
- # Turns everything that is not a source name (ISBN, source URL, length) into
45
- # a separator, then splits by that separator and removes empty elements
46
- # and format emojis. What's left is source names.
47
- def sources_names(str)
48
- not_names = [:isbn, :url, :time_length_in_variant, :pages_length_in_variant]
49
- names_and_separators = str
50
-
51
- not_names.each do |regex_type|
52
- names_and_separators = names_and_separators.gsub(
53
- config.deep_fetch(:csv, :regex, regex_type),
54
- config.deep_fetch(:csv, :separator),
55
- )
56
- end
57
-
58
- names_and_separators
59
- .split(config.deep_fetch(:csv, :separator))
60
- .map { |name| name.remove(/\A\s*#{config.deep_fetch(:csv, :regex, :formats)}\s*/) }
61
- .map(&:strip)
62
- .reject(&:empty?)
63
- end
64
-
65
- def url_name(url)
66
- config
67
- .deep_fetch(:item, :sources, :names_from_urls)
68
- .each do |url_part, name|
69
- if url.include?(url_part)
70
- return name
71
- end
72
- end
73
-
74
- config.deep_fetch(:item, :sources, :default_name_for_url)
75
- end
76
- end
77
- end
78
- end
@@ -1,69 +0,0 @@
1
- require_relative "series_subattribute"
2
- require_relative "sources_subattribute"
3
- require_relative "length_subattribute"
4
- require_relative "extra_info_subattribute"
5
-
6
- module Reading
7
- class Row
8
- class VariantsAttribute < Attribute
9
- using Util::HashArrayDeepFetch
10
-
11
- def parse
12
- sources_str = columns[:sources]&.presence || " "
13
-
14
- format_as_separator = config.deep_fetch(:csv, :regex, :formats_split)
15
-
16
- sources_str.split(format_as_separator).map { |variant_with_extras|
17
- # without extra info or series
18
- bare_variant = variant_with_extras
19
- .split(config.deep_fetch(:csv, :long_separator))
20
- .first
21
-
22
- series_attr = SeriesSubattribute.new(item_head:, variant_with_extras:, config:)
23
- sources_attr = SourcesSubattribute.new(bare_variant:, config:)
24
- # Length, despite not being very complex, is still split out into a
25
- # subattribute because it needs to be accessible to
26
- # ExperiencesAttribute (more specifically SpansSubattribute) which
27
- # uses length as a default value for amount.
28
- length_attr = LengthSubattribute.new(bare_variant:, columns:, config:)
29
- extra_info_attr = ExtraInfoSubattribute.new(item_head:, variant_with_extras:, config:)
30
-
31
- variant =
32
- {
33
- format: format(bare_variant) || format(item_head) || template.fetch(:format),
34
- series: series_attr.parse || template.fetch(:series),
35
- sources: sources_attr.parse || template.fetch(:sources),
36
- isbn: isbn(bare_variant) || template.fetch(:isbn),
37
- length: length_attr.parse || template.fetch(:length),
38
- extra_info: extra_info_attr.parse || template.fetch(:extra_info)
39
- }
40
-
41
- if variant != template
42
- variant
43
- else
44
- nil
45
- end
46
- }.compact.presence
47
- end
48
-
49
- private
50
-
51
- def template
52
- @template ||= config.deep_fetch(:item, :template, :variants).first
53
- end
54
-
55
- def format(str)
56
- emoji = str.match(/^#{config.deep_fetch(:csv, :regex, :formats)}/).to_s
57
- config.deep_fetch(:item, :formats).key(emoji)
58
- end
59
-
60
- def isbn(str)
61
- isbns = str.scan(config.deep_fetch(:csv, :regex, :isbn))
62
- if isbns.count > 1
63
- raise InvalidSourceError, "Only one ISBN/ASIN is allowed per item variant"
64
- end
65
- isbns[0]&.to_s
66
- end
67
- end
68
- end
69
- end
data/lib/reading/csv.rb DELETED
@@ -1,76 +0,0 @@
1
- # Used throughout, in other files.
2
- require_relative "util/blank"
3
- require_relative "util/string_remove"
4
- require_relative "util/string_truncate"
5
- require_relative "util/hash_to_struct"
6
- require_relative "util/hash_deep_merge"
7
- require_relative "util/hash_array_deep_fetch"
8
- require_relative "util/hash_compact_by_template"
9
- require_relative "errors"
10
-
11
- # Used just here.
12
- require_relative "config"
13
- require_relative "line"
14
-
15
- module Reading
16
- class CSV
17
- using Util::HashDeepMerge
18
- using Util::HashArrayDeepFetch
19
- using Util::HashToStruct
20
-
21
- attr_reader :config
22
-
23
- # @param feed [Object] the input source, which must respond to #each_line;
24
- # if nil, the file at the given path is used.
25
- # @param path [String] the path of the source file.
26
- # @param config [Hash] a custom config which overrides the defaults,
27
- # e.g. { errors: { styling: :html } }
28
- def initialize(feed = nil, path: nil, config: {})
29
- validate_feed_or_path(feed, path)
30
-
31
- @feed = feed
32
- @path = path
33
- @config ||= Config.new(config).hash
34
- end
35
-
36
- # Parses a CSV reading log into item data (an array of Structs).
37
- # For what the Structs look like, see the Hash at @default_config[:item][:template]
38
- # in config.rb. The Structs are identical in structure to that Hash (with
39
- # every inner Hash replaced with a Struct).
40
- # @return [Array<Struct>] an array of Structs like the template in config.rb
41
- def parse
42
- feed = @feed || File.open(@path)
43
- items = []
44
-
45
- feed.each_line do |string|
46
- line = Line.new(string, self)
47
- row = line.to_row
48
-
49
- items += row.parse
50
- end
51
-
52
- items.map(&:to_struct)
53
- ensure
54
- feed&.close if feed.respond_to?(:close)
55
- end
56
-
57
- private
58
-
59
- # Checks on the given feed and path (arguments to #initialize).
60
- # @raise [FileError] if the given path is invalid.
61
- # @raise [ArgumentError] if both feed and path are nil.
62
- def validate_feed_or_path(feed, path)
63
- return true if feed
64
-
65
- if path
66
- if !File.exist?(path)
67
- raise FileError, "File not found! #{path}"
68
- elsif File.directory?(path)
69
- raise FileError, "The reading log must be a file, but the path given is a directory: #{path}"
70
- end
71
- else
72
- raise ArgumentError, "Either a feed (String, File, etc.) or a file path must be provided."
73
- end
74
- end
75
- end
76
- end
data/lib/reading/line.rb DELETED
@@ -1,23 +0,0 @@
1
- require_relative "row/compact_planned_row"
2
- require_relative "row/blank_row"
3
- require_relative "row/regular_row"
4
-
5
- module Reading
6
- # A bridge between rows as strings and as parsable Rows, used whenever the
7
- # context of the line in the CSV is needed, e.g. converting a line to a Row,
8
- # or adding a CSV line to a Row parsing error.
9
- class Line
10
- attr_reader :string, :csv
11
-
12
- def initialize(string, csv)
13
- @string = string.dup.force_encoding(Encoding::UTF_8).strip
14
- @csv = csv
15
- end
16
-
17
- def to_row
18
- return CompactPlannedRow.new(self) if CompactPlannedRow.match?(self)
19
- return BlankRow.new(self) if BlankRow.match?(self)
20
- RegularRow.new(self)
21
- end
22
- end
23
- end
@@ -1,23 +0,0 @@
1
- require_relative "row"
2
-
3
- module Reading
4
- # An empty or commented-out row. A null object which returns an empty array.
5
- class BlankRow < Row
6
- using Util::HashArrayDeepFetch
7
-
8
- # Whether the given CSV line is a blank row.
9
- # @param line [Reading::Line]
10
- # @return [Boolean]
11
- def self.match?(line)
12
- comment_char = line.csv.config.deep_fetch(:csv, :comment_character)
13
-
14
- line.string.strip.empty? ||
15
- line.string.strip.start_with?(comment_char)
16
- end
17
-
18
- # Overrides Row#parse.
19
- def parse
20
- []
21
- end
22
- end
23
- end
@@ -1,130 +0,0 @@
1
- require_relative "row"
2
- require "debug"
3
-
4
- module Reading
5
- # Parses a row of compactly listed planned items into an array of hashes of
6
- # item data.
7
- class CompactPlannedRow < Row
8
- using Util::StringRemove
9
- using Util::HashDeepMerge
10
- using Util::HashArrayDeepFetch
11
-
12
- # Whether the given CSV line is a compact planned row.
13
- # @param line [Reading::Line]
14
- # @return [Boolean]
15
- def self.match?(line)
16
- comment_char = line.csv.config.deep_fetch(:csv, :comment_character)
17
-
18
- line.string.strip.start_with?(comment_char) &&
19
- line.string.match?(line.csv.config.deep_fetch(:csv, :regex, :compact_planned_row_start))
20
- end
21
-
22
- private
23
-
24
- def skip?
25
- config.deep_fetch(:csv, :skip_compact_planned)
26
- end
27
-
28
- def before_parse
29
- to_ignore = config.deep_fetch(:csv, :regex, :compact_planned_ignored_chars)
30
- start_regex = config.deep_fetch(:csv, :regex, :compact_planned_row_start)
31
-
32
- string_without_ignored_chars = string.remove_all(to_ignore)
33
- start = string_without_ignored_chars.match(start_regex)
34
-
35
- @genres = Array(start[:genres]&.downcase&.strip&.split(",")&.map(&:strip))
36
- @sources = sources(start[:sources])
37
- @row_without_genre = string_without_ignored_chars.remove(start.to_s)
38
- end
39
-
40
- def string_to_be_split_by_format_emojis
41
- @row_without_genre
42
- end
43
-
44
- def item_hash(item_head)
45
- item_match = item_head.match(config.deep_fetch(:csv, :regex, :compact_planned_item))
46
- unless item_match
47
- raise InvalidHeadError, "Title missing after #{item_head} in compact planned row"
48
- end
49
-
50
- author = AuthorAttribute.new(item_head: item_match[:author_title], config:).parse
51
-
52
- begin
53
- title = TitleAttribute.new(item_head: item_match[:author_title], config:).parse
54
- rescue InvalidHeadError
55
- raise InvalidHeadError, "Title missing after #{item_head} in compact planned row"
56
- end
57
-
58
- if item_match[:sources_column]
59
- if item_match[:sources_column].include?(config.deep_fetch(:csv, :column_separator))
60
- raise TooManyColumnsError, "Too many columns (only Sources allowed) " \
61
- "after #{item_head} in compact planned row"
62
- end
63
-
64
- variants_attr = VariantsAttribute.new(
65
- item_head: item_match[:format_emoji] + item_match[:author_title],
66
- columns: { sources: item_match[:sources_column], length: nil },
67
- config:,
68
- )
69
- variants = variants_attr.parse
70
- else
71
- variants = [parse_variant(item_match)]
72
- end
73
-
74
- template.deep_merge(
75
- author: author || template.fetch(:author),
76
- title: title,
77
- genres: @genres.presence || template.fetch(:genres),
78
- variants:,
79
- )
80
- end
81
-
82
- def template
83
- @template ||= config.deep_fetch(:item, :template)
84
- end
85
-
86
- def parse_variant(item_match)
87
- item_head = item_match[:format_emoji] + item_match[:author_title]
88
- series_attr = SeriesSubattribute.new(item_head:, config:)
89
- extra_info_attr = ExtraInfoSubattribute.new(item_head:, config:)
90
- sources = (@sources + sources(item_match[:sources])).uniq.presence
91
-
92
- {
93
- format: format(item_match[:format_emoji]),
94
- series: series_attr.parse_head || template.deep_fetch(:variants, 0, :series),
95
- sources: sources || template.deep_fetch(:variants, 0, :sources),
96
- isbn: template.deep_fetch(:variants, 0, :isbn),
97
- length: template.deep_fetch(:variants, 0, :length),
98
- extra_info: extra_info_attr.parse_head || template.deep_fetch(:variants, 0, :extra_info),
99
- }
100
- end
101
-
102
- def format(format_emoji)
103
- config.deep_fetch(:item, :formats).key(format_emoji)
104
- end
105
-
106
- def sources(sources_str)
107
- return [] if sources_str.nil?
108
-
109
- sources_str
110
- .split(config.deep_fetch(:csv, :compact_planned_source_prefix))
111
- .map { |source| source.remove(/\s*,\s*/) }
112
- .map(&:strip)
113
- .reject(&:empty?)
114
- .map { |source_name|
115
- if valid_url?(source_name)
116
- source_name = source_name.chop if source_name.chars.last == "/"
117
- { name: config.deep_fetch(:item, :sources, :default_name_for_url),
118
- url: source_name }
119
- else
120
- { name: source_name,
121
- url: nil }
122
- end
123
- }
124
- end
125
-
126
- def valid_url?(str)
127
- str&.match?(/http[^\s,]+/)
128
- end
129
- end
130
- end
@@ -1,99 +0,0 @@
1
- require_relative "row"
2
- require_relative "../attribute/all_attributes"
3
-
4
- module Reading
5
- # Parses a normal CSV row into an array of hashes of item data. Typically
6
- # a normal row describes one item and so it's parsed into an array containing
7
- # a single hash, but it's also possible for a row to describe multiple items.
8
- class RegularRow < Row
9
- using Util::HashArrayDeepFetch
10
-
11
- private attr_reader :columns, :attribute_classes
12
-
13
- private
14
-
15
- def after_initialize
16
- set_attribute_classes
17
- end
18
-
19
- def before_parse
20
- set_columns
21
- ensure_head_column_present
22
- end
23
-
24
- def string_to_be_split_by_format_emojis
25
- columns[:head]
26
- end
27
-
28
- def set_attribute_classes
29
- @attribute_classes ||= config.deep_fetch(:item, :template).map { |attribute_name, _default|
30
- attribute_name_camelcase = attribute_name.to_s.split("_").map(&:capitalize).join
31
- attribute_class_name = "#{attribute_name_camelcase}Attribute"
32
- attribute_class = self.class.const_get(attribute_class_name)
33
-
34
- [attribute_name, attribute_class]
35
- }.to_h
36
- .merge(custom_attribute_classes)
37
- end
38
-
39
- def custom_attribute_classes
40
- numeric = custom_attribute_classes_of_type(:numeric) do |value|
41
- Float(value, exception: false)
42
- end
43
-
44
- text = custom_attribute_classes_of_type(:text) do |value|
45
- value
46
- end
47
-
48
- (numeric + text).to_h
49
- end
50
-
51
- def custom_attribute_classes_of_type(type, &process_value)
52
- config.deep_fetch(:csv, :"custom_#{type}_columns").map { |attribute, _default_value|
53
- custom_class = Class.new(Attribute)
54
-
55
- custom_class.define_method(:parse) do
56
- value = columns[attribute.to_sym]&.strip&.presence
57
- process_value.call(value)
58
- end
59
-
60
- [attribute.to_sym, custom_class]
61
- }
62
- end
63
-
64
- def set_columns
65
- column_names = config.deep_fetch(:csv, :enabled_columns) +
66
- config.deep_fetch(:csv, :custom_numeric_columns).keys +
67
- config.deep_fetch(:csv, :custom_text_columns).keys
68
-
69
- columns_count = string.count(config.deep_fetch(:csv, :column_separator))
70
- if columns_count >= column_names.count
71
- raise TooManyColumnsError, "Too many columns"
72
- end
73
-
74
- column_contents = string.split(config.deep_fetch(:csv, :column_separator))
75
-
76
- @columns = column_names.zip(column_contents).to_h
77
- end
78
-
79
- def ensure_head_column_present
80
- if columns[:head].nil? || columns[:head].strip.empty?
81
- raise InvalidHeadError, "The Head column must not be blank"
82
- end
83
- end
84
-
85
- def item_hash(item_head)
86
- config
87
- .deep_fetch(:item, :template)
88
- .merge(config.deep_fetch(:csv, :custom_numeric_columns))
89
- .merge(config.deep_fetch(:csv, :custom_text_columns))
90
- .map { |attribute_name, default_value|
91
- attribute_class = attribute_classes.fetch(attribute_name)
92
- attribute_parser = attribute_class.new(item_head:, columns:, config:)
93
- parsed = attribute_parser.parse
94
-
95
- [attribute_name, parsed || default_value]
96
- }.to_h
97
- end
98
- end
99
- end
@@ -1,88 +0,0 @@
1
- module Reading
2
- # A base class that contains behaviors common to ___Row classes.
3
- class Row
4
- using Util::StringRemove
5
- using Util::HashArrayDeepFetch
6
- using Util::HashCompactByTemplate
7
-
8
- private attr_reader :line
9
-
10
- # @param line [Reading::Line] the Line that this Row represents.
11
- def initialize(line)
12
- @line = line
13
-
14
- after_initialize
15
- end
16
-
17
- # Parses a CSV row into an array of hashes of item data. How this is done
18
- # depends on how the template methods (further below) are implemented in
19
- # subclasses of Row.
20
- # @return [Array<Hash>] an array of hashes like the template in config.rb
21
- def parse
22
- return [] if skip?
23
-
24
- before_parse
25
-
26
- items = item_heads.map { |item_head|
27
- item_hash(item_head)
28
- .compact_by(template: config.deep_fetch(:item, :template))
29
- }.compact
30
-
31
- items
32
-
33
- rescue Reading::Error => e
34
- e.handle(line:)
35
- []
36
- end
37
-
38
- private
39
-
40
- def string
41
- @line.string
42
- end
43
-
44
- def config
45
- @line.csv.config
46
- end
47
-
48
- # A "head" is a string in the Head column containing a chunk of item
49
- # information, starting with a format emoji. A typical row describes one
50
- # item and so contains one head, but a row describing multiple items (with
51
- # multiple heads in the Head column) is possible. Also, a row of compact
52
- # planned items is essentially a list of heads, though with different
53
- # elements than a normal row's head.
54
- # @return [Array<String>]
55
- def item_heads
56
- string_to_be_split_by_format_emojis
57
- .split(config.deep_fetch(:csv, :regex, :formats_split))
58
- .tap { |item_heads|
59
- item_heads.first.remove!(config.deep_fetch(:csv, :regex, :dnf))
60
- item_heads.first.remove!(config.deep_fetch(:csv, :regex, :progress))
61
- }
62
- .map { |item_head| item_head.strip }
63
- .partition { |item_head| item_head.match?(/\A#{config.deep_fetch(:csv, :regex, :formats)}/) }
64
- .reject(&:empty?)
65
- .first
66
- end
67
-
68
- # Below: template methods that can (or must) be overridden.
69
-
70
- def after_initialize
71
- end
72
-
73
- def before_parse
74
- end
75
-
76
- def skip?
77
- false
78
- end
79
-
80
- def string_to_be_split_by_format_emojis
81
- raise NotImplementedError, "#{self.class} should have implemented #{__method__}"
82
- end
83
-
84
- def item_hash(item_head)
85
- raise NotImplementedError, "#{self.class} should have implemented #{__method__}"
86
- end
87
- end
88
- end