reading 0.6.1 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/bin/reading +5 -5
- data/bin/readingfile +31 -0
- data/lib/reading/config.rb +96 -108
- data/lib/reading/errors.rb +10 -66
- data/lib/reading/filter.rb +95 -0
- data/lib/reading/item/time_length.rb +140 -0
- data/lib/reading/item/view.rb +121 -0
- data/lib/reading/item.rb +117 -0
- data/lib/reading/parsing/attributes/attribute.rb +26 -0
- data/lib/reading/parsing/attributes/author.rb +15 -0
- data/lib/reading/parsing/attributes/experiences/dates_and_head_transformer.rb +106 -0
- data/lib/reading/parsing/attributes/experiences/history_transformer.rb +452 -0
- data/lib/reading/parsing/attributes/experiences/spans_validator.rb +149 -0
- data/lib/reading/parsing/attributes/experiences.rb +27 -0
- data/lib/reading/parsing/attributes/genres.rb +16 -0
- data/lib/reading/parsing/attributes/notes.rb +22 -0
- data/lib/reading/parsing/attributes/rating.rb +17 -0
- data/lib/reading/parsing/attributes/shared.rb +62 -0
- data/lib/reading/parsing/attributes/title.rb +21 -0
- data/lib/reading/parsing/attributes/variants.rb +77 -0
- data/lib/reading/parsing/csv.rb +112 -0
- data/lib/reading/parsing/parser.rb +292 -0
- data/lib/reading/parsing/rows/column.rb +131 -0
- data/lib/reading/parsing/rows/comment.rb +26 -0
- data/lib/reading/parsing/rows/compact_planned.rb +30 -0
- data/lib/reading/parsing/rows/compact_planned_columns/head.rb +60 -0
- data/lib/reading/parsing/rows/regular.rb +33 -0
- data/lib/reading/parsing/rows/regular_columns/end_dates.rb +20 -0
- data/lib/reading/parsing/rows/regular_columns/genres.rb +20 -0
- data/lib/reading/parsing/rows/regular_columns/head.rb +45 -0
- data/lib/reading/parsing/rows/regular_columns/history.rb +143 -0
- data/lib/reading/parsing/rows/regular_columns/length.rb +35 -0
- data/lib/reading/parsing/rows/regular_columns/notes.rb +32 -0
- data/lib/reading/parsing/rows/regular_columns/rating.rb +15 -0
- data/lib/reading/parsing/rows/regular_columns/sources.rb +94 -0
- data/lib/reading/parsing/rows/regular_columns/start_dates.rb +35 -0
- data/lib/reading/parsing/transformer.rb +70 -0
- data/lib/reading/util/hash_compact_by_template.rb +1 -0
- data/lib/reading/util/hash_deep_merge.rb +1 -1
- data/lib/reading/util/hash_to_data.rb +30 -0
- data/lib/reading/util/numeric_to_i_if_whole.rb +12 -0
- data/lib/reading/util/string_truncate.rb +13 -4
- data/lib/reading/version.rb +1 -1
- data/lib/reading.rb +49 -0
- metadata +76 -42
- data/lib/reading/attribute/all_attributes.rb +0 -83
- data/lib/reading/attribute/attribute.rb +0 -25
- data/lib/reading/attribute/experiences/dates_validator.rb +0 -94
- data/lib/reading/attribute/experiences/experiences_attribute.rb +0 -74
- data/lib/reading/attribute/experiences/progress_subattribute.rb +0 -48
- data/lib/reading/attribute/experiences/spans_subattribute.rb +0 -82
- data/lib/reading/attribute/variants/extra_info_subattribute.rb +0 -44
- data/lib/reading/attribute/variants/length_subattribute.rb +0 -45
- data/lib/reading/attribute/variants/series_subattribute.rb +0 -57
- data/lib/reading/attribute/variants/sources_subattribute.rb +0 -78
- data/lib/reading/attribute/variants/variants_attribute.rb +0 -69
- data/lib/reading/csv.rb +0 -76
- data/lib/reading/line.rb +0 -23
- data/lib/reading/row/blank_row.rb +0 -23
- data/lib/reading/row/compact_planned_row.rb +0 -130
- data/lib/reading/row/regular_row.rb +0 -99
- data/lib/reading/row/row.rb +0 -88
- data/lib/reading/util/hash_to_struct.rb +0 -29
@@ -1,74 +0,0 @@
|
|
1
|
-
require_relative "spans_subattribute"
|
2
|
-
require_relative "progress_subattribute"
|
3
|
-
require_relative "dates_validator"
|
4
|
-
require "date"
|
5
|
-
|
6
|
-
module Reading
|
7
|
-
class Row
|
8
|
-
class ExperiencesAttribute < Attribute
|
9
|
-
using Util::HashArrayDeepFetch
|
10
|
-
using Util::HashDeepMerge
|
11
|
-
|
12
|
-
def parse
|
13
|
-
started, finished = dates_split(columns)
|
14
|
-
|
15
|
-
experiences_with_dates = started.map.with_index { |entry, i|
|
16
|
-
variant_index = variant_index(entry)
|
17
|
-
spans_attr = SpansSubattribute.new(date_entry: entry, dates_finished: finished, date_index: i, variant_index:, columns:, config:)
|
18
|
-
|
19
|
-
{
|
20
|
-
spans: spans_attr.parse || template.fetch(:spans),
|
21
|
-
group: group(entry) || template.fetch(:group),
|
22
|
-
variant_index: variant_index || template.fetch(:variant_index)
|
23
|
-
}
|
24
|
-
}.presence
|
25
|
-
|
26
|
-
if experiences_with_dates
|
27
|
-
# Raises an error if any sequence of dates does not make sense.
|
28
|
-
DatesValidator.validate(experiences_with_dates, config)
|
29
|
-
|
30
|
-
return experiences_with_dates
|
31
|
-
else
|
32
|
-
if prog = ProgressSubattribute.new(columns:, config:).parse_head
|
33
|
-
return [template.deep_merge(spans: [{ progress: prog }] )]
|
34
|
-
else
|
35
|
-
return nil
|
36
|
-
end
|
37
|
-
end
|
38
|
-
end
|
39
|
-
|
40
|
-
private
|
41
|
-
|
42
|
-
def template
|
43
|
-
@template ||= config.deep_fetch(:item, :template, :experiences).first
|
44
|
-
end
|
45
|
-
|
46
|
-
def dates_split(columns)
|
47
|
-
dates_finished = columns[:dates_finished]&.presence
|
48
|
-
&.split(config.deep_fetch(:csv, :separator))&.map(&:strip) || []
|
49
|
-
# Don't use #has_key? because simply checking for nil covers the
|
50
|
-
# case where dates_started is the last column and omitted.
|
51
|
-
started_column_exists = columns[:dates_started]&.presence
|
52
|
-
|
53
|
-
dates_started =
|
54
|
-
if started_column_exists
|
55
|
-
columns[:dates_started]&.presence&.split(config.deep_fetch(:csv, :separator))&.map(&:strip)
|
56
|
-
else
|
57
|
-
[""] * dates_finished.count
|
58
|
-
end
|
59
|
-
|
60
|
-
[dates_started, dates_finished]
|
61
|
-
end
|
62
|
-
|
63
|
-
def group(entry)
|
64
|
-
entry.match(config.deep_fetch(:csv, :regex, :group_experience))&.captures&.first
|
65
|
-
end
|
66
|
-
|
67
|
-
def variant_index(date_entry)
|
68
|
-
match = date_entry.match(config.deep_fetch(:csv, :regex, :variant_index))
|
69
|
-
|
70
|
-
(match&.captures&.first&.to_i || 1) - 1
|
71
|
-
end
|
72
|
-
end
|
73
|
-
end
|
74
|
-
end
|
@@ -1,48 +0,0 @@
|
|
1
|
-
module Reading
|
2
|
-
class Row
|
3
|
-
class ProgressSubattribute
|
4
|
-
using Util::HashArrayDeepFetch
|
5
|
-
|
6
|
-
private attr_reader :date_entry, :variant_index, :columns, :config
|
7
|
-
|
8
|
-
# @param date_entry [String] the entry in Dates Started.
|
9
|
-
# @param variant_index [Integer] the variant index, for getting length for default amount.
|
10
|
-
# @param columns [Array<String>]
|
11
|
-
# @param config [Hash]
|
12
|
-
def initialize(date_entry: nil, variant_index: nil, columns:, config:)
|
13
|
-
@date_entry = date_entry
|
14
|
-
@variant_index = variant_index
|
15
|
-
@columns = columns
|
16
|
-
@config = config
|
17
|
-
end
|
18
|
-
|
19
|
-
def parse
|
20
|
-
progress(date_entry) || progress(columns[:head])
|
21
|
-
end
|
22
|
-
|
23
|
-
def parse_head
|
24
|
-
progress(columns[:head])
|
25
|
-
end
|
26
|
-
|
27
|
-
private
|
28
|
-
|
29
|
-
def progress(str)
|
30
|
-
prog = str.match(config.deep_fetch(:csv, :regex, :progress))
|
31
|
-
|
32
|
-
if prog
|
33
|
-
if prog_percent = prog[:percent]&.to_i
|
34
|
-
return prog_percent / 100.0
|
35
|
-
elsif prog_time = prog[:time]
|
36
|
-
return prog_time
|
37
|
-
elsif prog_pages = prog[:pages]&.to_i
|
38
|
-
return prog_pages
|
39
|
-
end
|
40
|
-
end
|
41
|
-
|
42
|
-
dnf = str.match(config.deep_fetch(:csv, :regex, :dnf))&.captures&.first
|
43
|
-
return 0 if dnf
|
44
|
-
nil
|
45
|
-
end
|
46
|
-
end
|
47
|
-
end
|
48
|
-
end
|
@@ -1,82 +0,0 @@
|
|
1
|
-
module Reading
|
2
|
-
class Row
|
3
|
-
class SpansSubattribute
|
4
|
-
using Util::HashArrayDeepFetch
|
5
|
-
|
6
|
-
private attr_reader :date_entry, :dates_finished, :date_index, :variant_index, :columns, :config
|
7
|
-
|
8
|
-
# @param date_entry [String] the entry in Dates Started.
|
9
|
-
# @param dates_finished [Array<String>] the entries in Dates Finished.
|
10
|
-
# @param date_index [Integer] the index of the entry.
|
11
|
-
# @param variant_index [Integer] the variant index, for getting length for default amount.
|
12
|
-
# @param columns [Array<String>]
|
13
|
-
# @param config [Hash]
|
14
|
-
def initialize(date_entry:, dates_finished:, date_index:, variant_index:, columns:, config:)
|
15
|
-
@date_entry = date_entry
|
16
|
-
@dates_finished = dates_finished
|
17
|
-
@date_index = date_index
|
18
|
-
@variant_index = variant_index
|
19
|
-
@columns = columns
|
20
|
-
@config = config
|
21
|
-
end
|
22
|
-
|
23
|
-
def parse
|
24
|
-
started = date_started(date_entry)
|
25
|
-
finished = date_finished(dates_finished, date_index)
|
26
|
-
return [] if started.nil? && finished.nil?
|
27
|
-
|
28
|
-
progress_attr = ProgressSubattribute.new(date_entry:, variant_index:, columns:, config:)
|
29
|
-
progress = progress_attr.parse
|
30
|
-
|
31
|
-
[{
|
32
|
-
dates: started..finished || template.fetch(:dates),
|
33
|
-
amount: length || template.fetch(:amount),
|
34
|
-
progress: progress || (1.0 if finished) || template.fetch(:progress),
|
35
|
-
name: template.fetch(:name),
|
36
|
-
favorite?: template.fetch(:favorite?),
|
37
|
-
}]
|
38
|
-
end
|
39
|
-
|
40
|
-
private
|
41
|
-
|
42
|
-
def template
|
43
|
-
@template ||= config.deep_fetch(:item, :template, :experiences, 0, :spans).first
|
44
|
-
end
|
45
|
-
|
46
|
-
def date_started(date_entry)
|
47
|
-
dates = date_entry.scan(config.deep_fetch(:csv, :regex, :date))
|
48
|
-
raise InvalidDateError, "Conjoined dates" if dates.count > 1
|
49
|
-
raise InvalidDateError, "Missing or incomplete date" if date_entry.present? && dates.empty?
|
50
|
-
|
51
|
-
date_str = dates.first
|
52
|
-
Date.parse(date_str) if date_str
|
53
|
-
rescue Date::Error
|
54
|
-
raise InvalidDateError, "Unparsable date"
|
55
|
-
end
|
56
|
-
|
57
|
-
def date_finished(dates_finished, date_index)
|
58
|
-
return nil if dates_finished.nil?
|
59
|
-
|
60
|
-
date_str = dates_finished[date_index]&.presence
|
61
|
-
Date.parse(date_str) if date_str
|
62
|
-
rescue Date::Error
|
63
|
-
if date_str.match?(config.deep_fetch(:csv, :regex, :date))
|
64
|
-
raise InvalidDateError, "Unparsable date"
|
65
|
-
else
|
66
|
-
raise InvalidDateError, "Missing or incomplete date"
|
67
|
-
end
|
68
|
-
end
|
69
|
-
|
70
|
-
def length
|
71
|
-
sources_str = columns[:sources]&.presence || " "
|
72
|
-
bare_variant = sources_str
|
73
|
-
.split(config.deep_fetch(:csv, :regex, :formats_split))
|
74
|
-
.dig(variant_index)
|
75
|
-
&.split(config.deep_fetch(:csv, :long_separator))
|
76
|
-
&.first
|
77
|
-
length_attr = LengthSubattribute.new(bare_variant:, columns:, config:)
|
78
|
-
length_attr.parse
|
79
|
-
end
|
80
|
-
end
|
81
|
-
end
|
82
|
-
end
|
@@ -1,44 +0,0 @@
|
|
1
|
-
module Reading
|
2
|
-
class Row
|
3
|
-
class ExtraInfoSubattribute
|
4
|
-
using Util::HashArrayDeepFetch
|
5
|
-
|
6
|
-
private attr_reader :item_head, :variant_with_extras, :config
|
7
|
-
|
8
|
-
# @param item_head [String] see Row#item_heads for a definition.
|
9
|
-
# @param variant_with_extras [String] the full variant string.
|
10
|
-
# @param config [Hash]
|
11
|
-
def initialize(item_head:, variant_with_extras: nil, config:)
|
12
|
-
@item_head = item_head
|
13
|
-
@variant_with_extras = variant_with_extras
|
14
|
-
@config = config
|
15
|
-
end
|
16
|
-
|
17
|
-
def parse
|
18
|
-
(
|
19
|
-
Array(extra_info(item_head)) +
|
20
|
-
Array(extra_info(variant_with_extras))
|
21
|
-
).presence
|
22
|
-
end
|
23
|
-
|
24
|
-
def parse_head
|
25
|
-
extra_info(item_head)
|
26
|
-
end
|
27
|
-
|
28
|
-
private
|
29
|
-
|
30
|
-
def template
|
31
|
-
config.deep_fetch(:item, :template, :variants, 0, :series).first
|
32
|
-
end
|
33
|
-
|
34
|
-
def extra_info(str)
|
35
|
-
separated = str.split(config.deep_fetch(:csv, :long_separator))
|
36
|
-
separated.delete_at(0) # everything before the extra info
|
37
|
-
separated.reject { |str|
|
38
|
-
str.start_with?("#{config.deep_fetch(:csv, :series_prefix)} ") ||
|
39
|
-
str.match(config.deep_fetch(:csv, :regex, :series_volume))
|
40
|
-
}.presence
|
41
|
-
end
|
42
|
-
end
|
43
|
-
end
|
44
|
-
end
|
@@ -1,45 +0,0 @@
|
|
1
|
-
module Reading
|
2
|
-
class Row
|
3
|
-
class LengthSubattribute
|
4
|
-
using Util::HashArrayDeepFetch
|
5
|
-
|
6
|
-
private attr_reader :item_head, :bare_variant, :columns, :config
|
7
|
-
|
8
|
-
# @param bare_variant [String] the variant string before series / extra info.
|
9
|
-
# @param columns [Array<String>]
|
10
|
-
# @param config [Hash]
|
11
|
-
def initialize(bare_variant:, columns:, config:)
|
12
|
-
@bare_variant = bare_variant
|
13
|
-
@columns = columns
|
14
|
-
@config = config
|
15
|
-
end
|
16
|
-
|
17
|
-
def parse
|
18
|
-
in_variant = length_in(
|
19
|
-
bare_variant,
|
20
|
-
time_regex: config.deep_fetch(:csv, :regex, :time_length_in_variant),
|
21
|
-
pages_regex: config.deep_fetch(:csv, :regex, :pages_length_in_variant),
|
22
|
-
)
|
23
|
-
in_length = length_in(
|
24
|
-
columns[:length],
|
25
|
-
time_regex: config.deep_fetch(:csv, :regex, :time_length),
|
26
|
-
pages_regex: config.deep_fetch(:csv, :regex, :pages_length),
|
27
|
-
)
|
28
|
-
|
29
|
-
in_variant || in_length ||
|
30
|
-
(raise InvalidLengthError, "Missing length" unless columns[:length].blank?)
|
31
|
-
end
|
32
|
-
|
33
|
-
private
|
34
|
-
|
35
|
-
def length_in(str, time_regex:, pages_regex:)
|
36
|
-
return nil if str.blank?
|
37
|
-
|
38
|
-
time_length = str.strip.match(time_regex)&.captures&.first
|
39
|
-
return time_length unless time_length.nil?
|
40
|
-
|
41
|
-
str.strip.match(pages_regex)&.captures&.first&.to_i
|
42
|
-
end
|
43
|
-
end
|
44
|
-
end
|
45
|
-
end
|
@@ -1,57 +0,0 @@
|
|
1
|
-
module Reading
|
2
|
-
class Row
|
3
|
-
class SeriesSubattribute
|
4
|
-
using Util::HashArrayDeepFetch
|
5
|
-
|
6
|
-
private attr_reader :item_head, :variant_with_extras, :config
|
7
|
-
|
8
|
-
# @param item_head [String] see Row#item_heads for a definition.
|
9
|
-
# @param variant_with_extras [String] the full variant string.
|
10
|
-
# @param config [Hash]
|
11
|
-
def initialize(item_head:, variant_with_extras: nil, config:)
|
12
|
-
@item_head = item_head
|
13
|
-
@variant_with_extras = variant_with_extras
|
14
|
-
@config = config
|
15
|
-
end
|
16
|
-
|
17
|
-
def parse
|
18
|
-
(
|
19
|
-
Array(series(item_head)) +
|
20
|
-
Array(series(variant_with_extras))
|
21
|
-
).presence
|
22
|
-
end
|
23
|
-
|
24
|
-
def parse_head
|
25
|
-
series(item_head)
|
26
|
-
end
|
27
|
-
|
28
|
-
private
|
29
|
-
|
30
|
-
def template
|
31
|
-
config.deep_fetch(:item, :template, :variants, 0, :series).first
|
32
|
-
end
|
33
|
-
|
34
|
-
def series(str)
|
35
|
-
separated = str
|
36
|
-
.split(config.deep_fetch(:csv, :long_separator))
|
37
|
-
.map(&:strip)
|
38
|
-
.map(&:presence)
|
39
|
-
.compact
|
40
|
-
|
41
|
-
separated.delete_at(0) # everything before the series/extra info
|
42
|
-
|
43
|
-
separated.map { |str|
|
44
|
-
volume = str.match(config.deep_fetch(:csv, :regex, :series_volume))
|
45
|
-
prefix = "#{config.deep_fetch(:csv, :series_prefix)} "
|
46
|
-
|
47
|
-
if volume || str.start_with?(prefix)
|
48
|
-
{
|
49
|
-
name: str.delete_suffix(volume.to_s).delete_prefix(prefix) || template[:name],
|
50
|
-
volume: volume&.captures&.first&.to_i || template[:volume],
|
51
|
-
}
|
52
|
-
end
|
53
|
-
}.compact.presence
|
54
|
-
end
|
55
|
-
end
|
56
|
-
end
|
57
|
-
end
|
@@ -1,78 +0,0 @@
|
|
1
|
-
module Reading
|
2
|
-
class Row
|
3
|
-
class SourcesSubattribute
|
4
|
-
using Util::StringRemove
|
5
|
-
using Util::HashArrayDeepFetch
|
6
|
-
|
7
|
-
private attr_reader :item_head, :bare_variant, :config
|
8
|
-
|
9
|
-
# @param bare_variant [String] the variant string before series / extra info.
|
10
|
-
# @param config [Hash]
|
11
|
-
def initialize(bare_variant:, config:)
|
12
|
-
@bare_variant = bare_variant
|
13
|
-
@config = config
|
14
|
-
end
|
15
|
-
|
16
|
-
def parse
|
17
|
-
urls = sources_urls(bare_variant).map { |url|
|
18
|
-
{
|
19
|
-
name: url_name(url) || template.deep_fetch(:sources, 0, :name),
|
20
|
-
url: url,
|
21
|
-
}
|
22
|
-
}
|
23
|
-
|
24
|
-
names = sources_names(bare_variant).map { |name|
|
25
|
-
{
|
26
|
-
name: name,
|
27
|
-
url: template.deep_fetch(:sources, 0, :url),
|
28
|
-
}
|
29
|
-
}
|
30
|
-
|
31
|
-
(urls + names).presence
|
32
|
-
end
|
33
|
-
|
34
|
-
private
|
35
|
-
|
36
|
-
def template
|
37
|
-
@template ||= config.deep_fetch(:item, :template, :variants).first
|
38
|
-
end
|
39
|
-
|
40
|
-
def sources_urls(str)
|
41
|
-
str.scan(config.deep_fetch(:csv, :regex, :url))
|
42
|
-
end
|
43
|
-
|
44
|
-
# Turns everything that is not a source name (ISBN, source URL, length) into
|
45
|
-
# a separator, then splits by that separator and removes empty elements
|
46
|
-
# and format emojis. What's left is source names.
|
47
|
-
def sources_names(str)
|
48
|
-
not_names = [:isbn, :url, :time_length_in_variant, :pages_length_in_variant]
|
49
|
-
names_and_separators = str
|
50
|
-
|
51
|
-
not_names.each do |regex_type|
|
52
|
-
names_and_separators = names_and_separators.gsub(
|
53
|
-
config.deep_fetch(:csv, :regex, regex_type),
|
54
|
-
config.deep_fetch(:csv, :separator),
|
55
|
-
)
|
56
|
-
end
|
57
|
-
|
58
|
-
names_and_separators
|
59
|
-
.split(config.deep_fetch(:csv, :separator))
|
60
|
-
.map { |name| name.remove(/\A\s*#{config.deep_fetch(:csv, :regex, :formats)}\s*/) }
|
61
|
-
.map(&:strip)
|
62
|
-
.reject(&:empty?)
|
63
|
-
end
|
64
|
-
|
65
|
-
def url_name(url)
|
66
|
-
config
|
67
|
-
.deep_fetch(:item, :sources, :names_from_urls)
|
68
|
-
.each do |url_part, name|
|
69
|
-
if url.include?(url_part)
|
70
|
-
return name
|
71
|
-
end
|
72
|
-
end
|
73
|
-
|
74
|
-
config.deep_fetch(:item, :sources, :default_name_for_url)
|
75
|
-
end
|
76
|
-
end
|
77
|
-
end
|
78
|
-
end
|
@@ -1,69 +0,0 @@
|
|
1
|
-
require_relative "series_subattribute"
|
2
|
-
require_relative "sources_subattribute"
|
3
|
-
require_relative "length_subattribute"
|
4
|
-
require_relative "extra_info_subattribute"
|
5
|
-
|
6
|
-
module Reading
|
7
|
-
class Row
|
8
|
-
class VariantsAttribute < Attribute
|
9
|
-
using Util::HashArrayDeepFetch
|
10
|
-
|
11
|
-
def parse
|
12
|
-
sources_str = columns[:sources]&.presence || " "
|
13
|
-
|
14
|
-
format_as_separator = config.deep_fetch(:csv, :regex, :formats_split)
|
15
|
-
|
16
|
-
sources_str.split(format_as_separator).map { |variant_with_extras|
|
17
|
-
# without extra info or series
|
18
|
-
bare_variant = variant_with_extras
|
19
|
-
.split(config.deep_fetch(:csv, :long_separator))
|
20
|
-
.first
|
21
|
-
|
22
|
-
series_attr = SeriesSubattribute.new(item_head:, variant_with_extras:, config:)
|
23
|
-
sources_attr = SourcesSubattribute.new(bare_variant:, config:)
|
24
|
-
# Length, despite not being very complex, is still split out into a
|
25
|
-
# subattribute because it needs to be accessible to
|
26
|
-
# ExperiencesAttribute (more specifically SpansSubattribute) which
|
27
|
-
# uses length as a default value for amount.
|
28
|
-
length_attr = LengthSubattribute.new(bare_variant:, columns:, config:)
|
29
|
-
extra_info_attr = ExtraInfoSubattribute.new(item_head:, variant_with_extras:, config:)
|
30
|
-
|
31
|
-
variant =
|
32
|
-
{
|
33
|
-
format: format(bare_variant) || format(item_head) || template.fetch(:format),
|
34
|
-
series: series_attr.parse || template.fetch(:series),
|
35
|
-
sources: sources_attr.parse || template.fetch(:sources),
|
36
|
-
isbn: isbn(bare_variant) || template.fetch(:isbn),
|
37
|
-
length: length_attr.parse || template.fetch(:length),
|
38
|
-
extra_info: extra_info_attr.parse || template.fetch(:extra_info)
|
39
|
-
}
|
40
|
-
|
41
|
-
if variant != template
|
42
|
-
variant
|
43
|
-
else
|
44
|
-
nil
|
45
|
-
end
|
46
|
-
}.compact.presence
|
47
|
-
end
|
48
|
-
|
49
|
-
private
|
50
|
-
|
51
|
-
def template
|
52
|
-
@template ||= config.deep_fetch(:item, :template, :variants).first
|
53
|
-
end
|
54
|
-
|
55
|
-
def format(str)
|
56
|
-
emoji = str.match(/^#{config.deep_fetch(:csv, :regex, :formats)}/).to_s
|
57
|
-
config.deep_fetch(:item, :formats).key(emoji)
|
58
|
-
end
|
59
|
-
|
60
|
-
def isbn(str)
|
61
|
-
isbns = str.scan(config.deep_fetch(:csv, :regex, :isbn))
|
62
|
-
if isbns.count > 1
|
63
|
-
raise InvalidSourceError, "Only one ISBN/ASIN is allowed per item variant"
|
64
|
-
end
|
65
|
-
isbns[0]&.to_s
|
66
|
-
end
|
67
|
-
end
|
68
|
-
end
|
69
|
-
end
|
data/lib/reading/csv.rb
DELETED
@@ -1,76 +0,0 @@
|
|
1
|
-
# Used throughout, in other files.
|
2
|
-
require_relative "util/blank"
|
3
|
-
require_relative "util/string_remove"
|
4
|
-
require_relative "util/string_truncate"
|
5
|
-
require_relative "util/hash_to_struct"
|
6
|
-
require_relative "util/hash_deep_merge"
|
7
|
-
require_relative "util/hash_array_deep_fetch"
|
8
|
-
require_relative "util/hash_compact_by_template"
|
9
|
-
require_relative "errors"
|
10
|
-
|
11
|
-
# Used just here.
|
12
|
-
require_relative "config"
|
13
|
-
require_relative "line"
|
14
|
-
|
15
|
-
module Reading
|
16
|
-
class CSV
|
17
|
-
using Util::HashDeepMerge
|
18
|
-
using Util::HashArrayDeepFetch
|
19
|
-
using Util::HashToStruct
|
20
|
-
|
21
|
-
attr_reader :config
|
22
|
-
|
23
|
-
# @param feed [Object] the input source, which must respond to #each_line;
|
24
|
-
# if nil, the file at the given path is used.
|
25
|
-
# @param path [String] the path of the source file.
|
26
|
-
# @param config [Hash] a custom config which overrides the defaults,
|
27
|
-
# e.g. { errors: { styling: :html } }
|
28
|
-
def initialize(feed = nil, path: nil, config: {})
|
29
|
-
validate_feed_or_path(feed, path)
|
30
|
-
|
31
|
-
@feed = feed
|
32
|
-
@path = path
|
33
|
-
@config ||= Config.new(config).hash
|
34
|
-
end
|
35
|
-
|
36
|
-
# Parses a CSV reading log into item data (an array of Structs).
|
37
|
-
# For what the Structs look like, see the Hash at @default_config[:item][:template]
|
38
|
-
# in config.rb. The Structs are identical in structure to that Hash (with
|
39
|
-
# every inner Hash replaced with a Struct).
|
40
|
-
# @return [Array<Struct>] an array of Structs like the template in config.rb
|
41
|
-
def parse
|
42
|
-
feed = @feed || File.open(@path)
|
43
|
-
items = []
|
44
|
-
|
45
|
-
feed.each_line do |string|
|
46
|
-
line = Line.new(string, self)
|
47
|
-
row = line.to_row
|
48
|
-
|
49
|
-
items += row.parse
|
50
|
-
end
|
51
|
-
|
52
|
-
items.map(&:to_struct)
|
53
|
-
ensure
|
54
|
-
feed&.close if feed.respond_to?(:close)
|
55
|
-
end
|
56
|
-
|
57
|
-
private
|
58
|
-
|
59
|
-
# Checks on the given feed and path (arguments to #initialize).
|
60
|
-
# @raise [FileError] if the given path is invalid.
|
61
|
-
# @raise [ArgumentError] if both feed and path are nil.
|
62
|
-
def validate_feed_or_path(feed, path)
|
63
|
-
return true if feed
|
64
|
-
|
65
|
-
if path
|
66
|
-
if !File.exist?(path)
|
67
|
-
raise FileError, "File not found! #{path}"
|
68
|
-
elsif File.directory?(path)
|
69
|
-
raise FileError, "The reading log must be a file, but the path given is a directory: #{path}"
|
70
|
-
end
|
71
|
-
else
|
72
|
-
raise ArgumentError, "Either a feed (String, File, etc.) or a file path must be provided."
|
73
|
-
end
|
74
|
-
end
|
75
|
-
end
|
76
|
-
end
|
data/lib/reading/line.rb
DELETED
@@ -1,23 +0,0 @@
|
|
1
|
-
require_relative "row/compact_planned_row"
|
2
|
-
require_relative "row/blank_row"
|
3
|
-
require_relative "row/regular_row"
|
4
|
-
|
5
|
-
module Reading
|
6
|
-
# A bridge between rows as strings and as parsable Rows, used whenever the
|
7
|
-
# context of the line in the CSV is needed, e.g. converting a line to a Row,
|
8
|
-
# or adding a CSV line to a Row parsing error.
|
9
|
-
class Line
|
10
|
-
attr_reader :string, :csv
|
11
|
-
|
12
|
-
def initialize(string, csv)
|
13
|
-
@string = string.dup.force_encoding(Encoding::UTF_8).strip
|
14
|
-
@csv = csv
|
15
|
-
end
|
16
|
-
|
17
|
-
def to_row
|
18
|
-
return CompactPlannedRow.new(self) if CompactPlannedRow.match?(self)
|
19
|
-
return BlankRow.new(self) if BlankRow.match?(self)
|
20
|
-
RegularRow.new(self)
|
21
|
-
end
|
22
|
-
end
|
23
|
-
end
|
@@ -1,23 +0,0 @@
|
|
1
|
-
require_relative "row"
|
2
|
-
|
3
|
-
module Reading
|
4
|
-
# An empty or commented-out row. A null object which returns an empty array.
|
5
|
-
class BlankRow < Row
|
6
|
-
using Util::HashArrayDeepFetch
|
7
|
-
|
8
|
-
# Whether the given CSV line is a blank row.
|
9
|
-
# @param line [Reading::Line]
|
10
|
-
# @return [Boolean]
|
11
|
-
def self.match?(line)
|
12
|
-
comment_char = line.csv.config.deep_fetch(:csv, :comment_character)
|
13
|
-
|
14
|
-
line.string.strip.empty? ||
|
15
|
-
line.string.strip.start_with?(comment_char)
|
16
|
-
end
|
17
|
-
|
18
|
-
# Overrides Row#parse.
|
19
|
-
def parse
|
20
|
-
[]
|
21
|
-
end
|
22
|
-
end
|
23
|
-
end
|