reading 0.9.0 → 0.9.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e839c46251c307a65a234a9a374ad964f68d934d974005d6aa38cbb35e10f756
4
- data.tar.gz: fa20c0b78f3fd1194495e1e604bb1d95b37e0334ac12c745d6508e026cd6fe97
3
+ metadata.gz: e30d2e08f85d4ecb58352862165f63b62dbcbeb436103cf0761f318592359571
4
+ data.tar.gz: 7991e2a241345bd8f5b8fe268c462b721d610157d2c2652aec16882d3833c65f
5
5
  SHA512:
6
- metadata.gz: 2bbca1450b2f6ae4c0d2b6d4cfeec495bf30b20639fcb3a5b121711e1d995a8b13a5ca64100d7a5e11d00d60c3808d61d25583510e3cd19902d412eca216154f
7
- data.tar.gz: 74a676238941cdd5a0a7ecf8b2a4ed3c705883daadc25b3710a17eb9e325ec294d8b9fad71e653779af1ea9e01d0e8fed6423592dbe750b4f37c79f0f690387d
6
+ metadata.gz: 1357b2ecd226209ff58e5fa7215ca073bfda99225a9d1623b111417d9bf01ecc9ec61406b2bde9fb3a26f1c7ce6a24e9495eb2bedca403a45d029d9e94e83c0e
7
+ data.tar.gz: 10285aa58757b8fa07e94945811a9195f6e5015e06ace666c963885b5a5eb19839bae855013a54a76fb4d2a038a9dd1fc53611b2ea0d28a4385073811064cb46
data/bin/reading CHANGED
@@ -13,9 +13,9 @@
13
13
  # reading '3|📕Trying|Little Library 1970147288'
14
14
  # reading '📕Trying|Little Library 1970147288' 'head, sources'
15
15
 
16
- require 'debug'
17
16
  require_relative '../lib/reading'
18
17
  require_relative '../lib/reading/stats/terminal_result_formatters'
18
+ require 'debug'
19
19
  require 'amazing_print'
20
20
  require 'readline'
21
21
  require 'pastel'
@@ -38,26 +38,33 @@ GROUP_HEADING_FORMATTERS = [
38
38
  def print_grouped_results(grouped_results, group_heading_formatters)
39
39
  indent_level = GROUP_HEADING_FORMATTERS.count - group_heading_formatters.count
40
40
 
41
- if grouped_results.nil? || grouped_results.empty?
41
+ if grouped_results.nil? || (grouped_results.respond_to?(:empty?) && grouped_results.empty?)
42
42
  puts " " * indent_level + PASTEL.bright_black("none") + "\n"
43
43
  return
44
- elsif !grouped_results.is_a? Hash
45
- puts " " * indent_level + grouped_results.gsub("\n", "\n" + " " * indent_level) + "\n"
46
- return
47
44
  end
48
45
 
49
- grouped_results.each do |group_name, grouped|
50
- puts " " * indent_level + group_heading_formatters.first.call(group_name)
51
- print_grouped_results(grouped, group_heading_formatters[1..])
46
+ if grouped_results.is_a?(Hash) ||
47
+ (grouped_results.is_a?(Array) && grouped_results.first.length == 2)
48
+
49
+ grouped_results.each do |group_name, grouped|
50
+ puts " " * indent_level + group_heading_formatters.first.call(group_name)
51
+ print_grouped_results(grouped, group_heading_formatters[1..])
52
+ end
53
+ elsif grouped_results.is_a?(Array)
54
+ numbered_results = grouped_results.map.with_index { |v, i| "#{i + 1}. #{v}" }
55
+
56
+ puts " " * indent_level + numbered_results.join("\n" + " " * indent_level) + "\n"
57
+ else
58
+ puts " " * indent_level + grouped_results.to_s + "\n"
52
59
  end
53
60
  end
54
61
 
55
62
  input = ARGV[0]
56
63
  unless input
57
64
  raise ArgumentError,
58
- "Argument required, either a CSV file path or a CSV string. Examples:\n" \
59
- "parsereading /home/felipe/reading.csv\n" \
60
- "parsereading '3|📕Trying|Little Library 1970147288'"
65
+ "Argument required, either a CSV file path or a CSV string.\nExamples:\n" \
66
+ "reading /home/felipe/reading.csv\n" \
67
+ "reading '3|📕Trying|Little Library 1970147288'"
61
68
  end
62
69
 
63
70
  if ARGV[1]
@@ -86,11 +93,19 @@ if input_is_csv_path
86
93
  result_formatters: Reading::Stats::ResultFormatters::TERMINAL,
87
94
  )
88
95
 
89
- print_grouped_results(results, GROUP_HEADING_FORMATTERS)
96
+ if results.is_a?(Array) && results.first.is_a?(Reading::Item) # `debug` operation
97
+ r = results
98
+ puts PASTEL.red.bold("Enter 'c' to leave the debugger.")
99
+ debugger
100
+ else
101
+ print_grouped_results(results, GROUP_HEADING_FORMATTERS)
102
+ end
90
103
  rescue Reading::Error => e
91
104
  puts e
92
105
  end
93
106
  else # CSV string arg
107
+ input = input.gsub("\\|", "|") # because some pipes are escaped when pasting into the terminal
108
+
94
109
  begin
95
110
  item_hashes = Reading.parse(lines: input, hash_output: true, item_view: false)
96
111
  rescue Reading::Error => e
@@ -178,8 +178,8 @@ module Reading
178
178
  spans:
179
179
  [{
180
180
  dates: nil,
181
- progress: 1.0,
182
181
  amount: 0,
182
+ progress: 1.0,
183
183
  name: nil,
184
184
  favorite?: false,
185
185
  }],
data/lib/reading/item.rb CHANGED
@@ -119,12 +119,11 @@ module Reading
119
119
  before_index = nil
120
120
  middle_indices = experiences.map.with_index { |experience, i|
121
121
  if experience.spans.first.dates &&
122
- experience.spans.first.dates.begin < date &&
123
- experience.last_end_date
122
+ experience.spans.first.dates.begin < date
124
123
 
125
124
  before_index = i
126
125
 
127
- if experience.last_end_date >= date
126
+ if (experience.last_end_date || Date.today) >= date
128
127
  i
129
128
  else
130
129
  nil
@@ -133,15 +132,14 @@ module Reading
133
132
  }
134
133
  .compact
135
134
 
136
- # There are no experiences with done spans that overlap the date.
135
+ # There are no experiences with spans that overlap the date.
137
136
  if middle_indices.none?
138
137
  # The Item is planned.
139
138
  return [] if experiences.none? { _1.spans.first.dates }
140
139
  # date is after all spans.
141
140
  return [self, nil] if experiences.all? { _1.last_end_date && date > _1.last_end_date }
142
- # date is before all spans, or overlaps with an in-progress span.
143
- return [nil, self] if experiences.all? { _1.spans.first.dates.begin >= date } ||
144
- experiences.any? { _1.spans.first.dates.begin < date && _1.last_end_date.nil? }
141
+ # date is before all spans.
142
+ return [nil, self] if experiences.all? { _1.spans.first.dates.begin >= date }
145
143
 
146
144
  # Date is in between experiences.
147
145
  if before_index
@@ -173,7 +171,7 @@ module Reading
173
171
  if span.dates && span.dates.begin < date
174
172
  before_index = i
175
173
 
176
- span.dates.end >= date
174
+ (span.dates.end || Date.today) >= date
177
175
  end
178
176
  }
179
177
 
@@ -183,15 +181,20 @@ module Reading
183
181
  else
184
182
  span_middle = experience_middle.spans[span_middle_index]
185
183
 
184
+ unless span_middle.dates.end
185
+ end_today_instead_of_endless = { dates: span_middle.dates.begin..Date.today }
186
+ span_middle = span_middle.to_h.merge(end_today_instead_of_endless).to_data
187
+ end
188
+
186
189
  dates_before = span_middle.dates.begin..date.prev_day
187
- amount_before = span_middle.amount * (dates_before.count / span_middle.dates.count.to_f)
190
+ amount_before = (span_middle.amount || 0) * (dates_before.count / span_middle.dates.count.to_f)
188
191
  span_middle_before = span_middle.with(
189
192
  dates: dates_before,
190
193
  amount: amount_before,
191
194
  )
192
195
 
193
196
  dates_after = date..span_middle.dates.end
194
- amount_after = span_middle.amount * (dates_after.count / span_middle.dates.count.to_f)
197
+ amount_after = (span_middle.amount || 0) * (dates_after.count / span_middle.dates.count.to_f)
195
198
  span_middle_after = span_middle.with(
196
199
  dates: dates_after,
197
200
  amount: amount_after,
@@ -29,7 +29,10 @@ module Reading
29
29
  start_dates = Array.new(size) { |i| parsed_row[:start_dates]&.dig(i) || {} }
30
30
  end_dates = Array.new(size) { |i| parsed_row[:end_dates]&.dig(i) || nil }
31
31
 
32
- start_end_dates = start_dates.zip(end_dates).presence || [[{}, nil]]
32
+ start_end_dates = start_dates
33
+ .reject { _1[:planned] }
34
+ .zip(end_dates)
35
+ .presence || [[{}, nil]]
33
36
 
34
37
  experiences_with_dates = start_end_dates.map { |start_entry, end_entry|
35
38
  {
@@ -85,12 +88,14 @@ module Reading
85
88
  parsed_row[:head][head_index][:format]
86
89
  length = Attributes::Shared.length(parsed_row[:sources]&.dig(variant_index), format:) ||
87
90
  Attributes::Shared.length(parsed_row[:length], format:)
91
+ no_end_date = !dates.end if dates &&
92
+ Config.hash.fetch(:enabled_columns).include?(:end_dates)
88
93
 
89
94
  [
90
95
  {
91
96
  dates: dates,
92
97
  amount: (length if dates),
93
- progress: Attributes::Shared.progress(start_entry) ||
98
+ progress: Attributes::Shared.progress(start_entry, no_end_date:) ||
94
99
  Attributes::Shared.progress(parsed_row[:head][head_index]) ||
95
100
  (1.0 if end_entry),
96
101
  name: span_template.fetch(:name),
@@ -1,5 +1,16 @@
1
1
  require_relative 'spans_validator'
2
2
 
3
+ # TODO Refactor! This entire file has become 🤢🤮 with the accumulation of new
4
+ # features in the History column.
5
+ #
6
+ # Goals of the refactor:
7
+ # - if possible, avoid daily_spans; build spans with date ranges directly.
8
+ # - validate spans at every step; that way the origin of bugs will be easier
9
+ # to find, e.g. for the bug fixed in 6310639, spans became invalid in
10
+ # #fix_open_ranges! and led to an error elsewhere that didn't give a trace
11
+ # back to the origin.
12
+ # - to facilitate the points above, create a class ExperienceBuilder to
13
+ # contain much of the logic that is currently in this file.
3
14
  module Reading
4
15
  module Parsing
5
16
  module Attributes
@@ -16,13 +27,14 @@ module Reading
16
27
  # many days, for example.
17
28
  AVERAGE_DAYS_IN_A_MONTH = 30.437r
18
29
 
19
- private attr_reader :parsed_row, :head_index
30
+ private attr_reader :parsed_row, :head_index, :next_open_range_id
20
31
 
21
32
  # @param parsed_row [Hash] a parsed row (the intermediate hash).
22
33
  # @param head_index [Integer] current item's position in the Head column.
23
34
  def initialize(parsed_row, head_index)
24
35
  @parsed_row = parsed_row
25
36
  @head_index = head_index
37
+ @next_open_range_id = 0
26
38
  end
27
39
 
28
40
  # Extracts experiences from the parsed row.
@@ -61,16 +73,21 @@ module Reading
61
73
  month: nil,
62
74
  day: nil,
63
75
  after_single_date: false,
64
- open_range: false,
76
+ open_range_id: nil,
65
77
  planned: false,
66
78
  amount: nil,
79
+ repetitions: nil,
80
+ frequency: nil,
67
81
  last_start_year: nil,
68
82
  last_start_month: nil,
69
83
  }
70
84
 
85
+ # Dates after "not" entries.
86
+ except_dates = []
87
+
71
88
  entries.each do |entry|
72
89
  if entry[:except_dates]
73
- reject_exception_dates!(entry, daily_spans, active)
90
+ except_dates += reject_exception_dates!(entry, daily_spans, active)
74
91
  next
75
92
  end
76
93
 
@@ -79,10 +96,14 @@ module Reading
79
96
 
80
97
  spans = merge_daily_spans(daily_spans)
81
98
 
82
- fix_open_ranges!(spans)
99
+ spans = fix_open_ranges(spans, except_dates)
83
100
 
84
101
  relativize_amounts_from_progress!(spans)
85
102
 
103
+ remove_last_end_date_of_today_if_open_range!(spans)
104
+
105
+ remove_temporary_keys!(spans)
106
+
86
107
  spans
87
108
  end
88
109
 
@@ -92,6 +113,7 @@ module Reading
92
113
  # date-and-name combination.
93
114
  # @param active [Hash] variables that persist across entries, such as
94
115
  # amount and implied date.
116
+ # @return [Array<Date>] the rejected dates.
95
117
  def reject_exception_dates!(entry, daily_spans, active)
96
118
  except_active = {
97
119
  year: active[:last_start_year],
@@ -122,6 +144,8 @@ module Reading
122
144
  daily_spans.reject! do |(date, _name), _span|
123
145
  except_dates.include?(date)
124
146
  end
147
+
148
+ except_dates
125
149
  end
126
150
 
127
151
  # Expands the given entry into one span per day, then adds them to daily_spans.
@@ -145,7 +169,7 @@ module Reading
145
169
  active[:month] = start_month if start_month
146
170
  active[:last_start_month] = active[:month]
147
171
  if start_day
148
- active[:open_range] = false
172
+ active[:open_range_id] = nil
149
173
  active[:day] = start_day
150
174
  end
151
175
 
@@ -159,8 +183,8 @@ module Reading
159
183
  active[:planned] = false
160
184
  end
161
185
 
162
- duplicate_open_range = !start_day && active[:open_range]
163
- date_range = date_range(entry, active, duplicate_open_range:)
186
+ duplicate_open_range_id = active[:open_range_id] if !start_day
187
+ date_range = date_range(entry, active, duplicate_open_range: !!duplicate_open_range_id)
164
188
 
165
189
  # A startless date range (i.e. with an implied start date) appearing
166
190
  # immediately after a single date has its start date bumped forward
@@ -174,13 +198,15 @@ module Reading
174
198
  format = parsed_row[:sources]&.dig(variant_index)&.dig(:format) ||
175
199
  parsed_row[:head][head_index][:format]
176
200
 
177
- amount =
178
- Attributes::Shared.length(entry, format:, key_name: :amount, ignore_repetitions: true) ||
179
- Attributes::Shared.length(parsed_row[:length], format:, episodic: true)
180
- active[:amount] = amount if amount
181
-
182
201
  progress = Attributes::Shared.progress(entry)
183
202
 
203
+ amount_from_entry =
204
+ Attributes::Shared.length(entry, format:, key_name: :amount, ignore_repetitions: true)
205
+ amount_from_length =
206
+ Attributes::Shared.length(parsed_row[:length], format:, episodic: progress.nil? || parsed_row.dig(:length, :repetitions).nil?)
207
+ amount = amount_from_entry || amount_from_length
208
+ active[:amount] = amount if amount
209
+
184
210
  # If the entry has no amount and the item has no episodic length,
185
211
  # then use progress as amount instead. The typical scenario for this
186
212
  # is when tracking fixed-length items such as books. See
@@ -195,17 +221,27 @@ module Reading
195
221
  amount_from_progress = true
196
222
  end
197
223
 
198
- repetitions = entry[:repetitions]&.to_i || 1
224
+ repetitions = entry[:repetitions]&.to_i
199
225
  frequency = entry[:frequency]
200
226
 
227
+ # If the entry has no amount or progress, default to the previous
228
+ # repetitions and frequency.
229
+ unless amount_from_entry || progress || repetitions
230
+ repetitions = active[:repetitions]
231
+ frequency = active[:frequency]
232
+ end
233
+
234
+ active[:repetitions] = repetitions if repetitions
235
+ active[:frequency] = frequency if frequency
236
+
201
237
  amounts_by_date = distribute_amount_across_date_range(
202
238
  date_range || Date.new(active[:year], active[:month], active[:day]),
203
239
  amount || active[:amount],
204
- repetitions,
240
+ repetitions || 1,
205
241
  frequency,
206
242
  )
207
243
 
208
- in_open_range = active[:open_range] || duplicate_open_range
244
+ open_range_id = active[:open_range_id] || duplicate_open_range_id
209
245
 
210
246
  daily_spans_from_entry = amounts_by_date.map { |date, daily_amount|
211
247
  span_without_dates = {
@@ -219,9 +255,11 @@ module Reading
219
255
  # Temporary keys (not in the final item data) for marking
220
256
  # spans to ...
221
257
  # ... be distributed evenly across an open date range.
222
- in_open_range: in_open_range,
258
+ open_range_id:,
223
259
  # ... have their amounts adjusted to be relative to previous progress.
224
- amount_from_progress: amount_from_progress,
260
+ amount_from_progress?: amount_from_progress,
261
+ amount_from_frequency?: !!frequency,
262
+ implied_date_range?: !date_range && !!frequency,
225
263
  }
226
264
 
227
265
  if entry[:planned] || active[:planned]
@@ -232,7 +270,7 @@ module Reading
232
270
 
233
271
  # For entries in an open range, add a random number to the key to
234
272
  # avoid overwriting entries with the same name, or lacking a name.
235
- if in_open_range
273
+ if open_range_id
236
274
  key << rand
237
275
  end
238
276
 
@@ -285,7 +323,7 @@ module Reading
285
323
  return nil unless entry[:range] || duplicate_open_range
286
324
 
287
325
  if entry[:end_day]
288
- active[:open_range] = false
326
+ active[:open_range_id] = nil
289
327
 
290
328
  end_year = entry[:end_year]&.to_i
291
329
  end_month = entry[:end_month]&.to_i
@@ -299,11 +337,17 @@ module Reading
299
337
  date_range = Date.new(active[:year], active[:month], active[:day])..
300
338
  Date.new(end_year || active[:year], end_month || active[:month], end_day)
301
339
 
302
- active[:day] = end_day + 1
303
- active[:month] = end_month if end_month
304
- active[:year] = end_year if end_year
340
+ date_after_end = date_range.end.next_day
341
+
342
+ active[:day] = date_after_end.day
343
+ active[:month] = date_after_end.month if end_month
344
+ active[:year] = date_after_end.year if end_year
305
345
  else # either starting or continuing (duplicating) an open range
306
- active[:open_range] ||= true
346
+ unless active[:open_range_id]
347
+ active[:open_range_id] = next_open_range_id
348
+ @next_open_range_id += 1
349
+ end
350
+
307
351
  date_range = Date.new(active[:year], active[:month], active[:day])..Date.today
308
352
  end
309
353
 
@@ -316,7 +360,7 @@ module Reading
316
360
  # @param amount [Float, Integer, Item::TimeLength] amount in
317
361
  # pages or time.
318
362
  # @param repetitions [Integer] e.g. "x4" in a History entry.
319
- # @param frequency [Integer] e.g. "/week" in a History entry.
363
+ # @param frequency [String] e.g. "/week" in a History entry.
320
364
  # @return [Hash{Date => Float, Integer, Item::TimeLength}]
321
365
  def distribute_amount_across_date_range(date_or_range, amount, repetitions, frequency)
322
366
  unless amount
@@ -366,33 +410,64 @@ module Reading
366
410
  # Set each open date range's last end date (wherever it's today, i.e.
367
411
  # it wasn't defined) to the day before the next entry's start date.
368
412
  # At the same time, distribute each open range's spans evenly.
369
- # Lastly, remove the :in_open_range key from spans.
413
+ # Lastly, remove the :open_range_id key from spans.
370
414
  # @param spans [Array<Hash>] spans after being merged from daily_spans.
415
+ # @param except_dates [Date] dates after "not" entries which were
416
+ # rejected from spans.
371
417
  # @return [Array<Hash>]
372
- def fix_open_ranges!(spans)
418
+ def fix_open_ranges(spans, except_dates)
373
419
  chunked_by_open_range = spans.chunk_while { |a, b|
374
420
  a[:dates] && b[:dates] && # in case of planned entry
375
- a[:dates].begin == b[:dates].begin &&
376
- a[:in_open_range] == b[:in_open_range]
421
+ a[:open_range_id] == b[:open_range_id]
377
422
  }
378
423
 
379
- next_chunk_start_date = nil
424
+ next_start_date = nil
380
425
  chunked_by_open_range
381
- .reverse_each { |chunk|
382
- unless chunk.first[:in_open_range] && chunk.any? { _1[:dates].end == Date.today }
426
+ .to_a.reverse.map { |chunk|
427
+ unless chunk.first[:open_range_id]
383
428
  # safe nav. in case of planned entry
384
- next_chunk_start_date = chunk.first[:dates]&.begin
385
- next
429
+ next_start_date = chunk.first[:dates]&.begin
430
+ next chunk
431
+ end
432
+
433
+ # Filter out spans that begin after the next chunk's start date.
434
+ if next_start_date
435
+ chunk.reject! do |span|
436
+ span[:dates].begin >= next_start_date
437
+ end
438
+ end
439
+
440
+ # For the remaining spans (which begin before the next chunk's
441
+ # start date), bound each end date to that date.
442
+ chunk.reverse_each do |span|
443
+ if !next_start_date
444
+ next_start_date = span[:dates].begin
445
+ next
446
+ end
447
+
448
+ if span[:dates].end >= next_start_date
449
+ new_dates = span[:dates].begin..next_start_date.prev_day
450
+
451
+ if span[:amount_from_frequency?]
452
+ new_to_old_dates_ratio = new_dates.count / span[:dates].count.to_f
453
+ span[:amount] = (span[:amount] * new_to_old_dates_ratio).to_i_if_whole
454
+ end
455
+
456
+ span[:dates] = new_dates
457
+ end
458
+
459
+ next_start_date = span[:dates].begin if span[:dates].begin < next_start_date
386
460
  end
387
461
 
388
- # Set last end date.
389
- if chunk.last[:dates].end == Date.today && next_chunk_start_date
390
- chunk.last[:dates] = chunk.last[:dates].begin..next_chunk_start_date.prev_day
462
+ if !next_start_date || chunk.first[:dates].begin < next_start_date
463
+ next_start_date = chunk.first[:dates].begin
391
464
  end
392
- next_chunk_start_date = chunk.first[:dates].begin
465
+
466
+ next chunk if chunk.map { |span| span[:open_range_id] }.uniq.count == 1 &&
467
+ chunk.map { |span| span[:dates].begin }.uniq.count > 1
393
468
 
394
469
  # Distribute spans across the open date range.
395
- total_amount = chunk.sum { |c| c[:amount] }
470
+ total_amount = chunk.sum { |span| span[:amount] }
396
471
  dates = chunk.last[:dates]
397
472
  amount_per_day = total_amount / dates.count.to_f
398
473
 
@@ -400,6 +475,7 @@ module Reading
400
475
  last_end_date = chunk.last[:dates].end
401
476
 
402
477
  span = nil
478
+ chunk_dup = chunk.dup
403
479
  amount_acc = 0
404
480
  span_needing_end = nil
405
481
  dates.each do |date|
@@ -409,8 +485,8 @@ module Reading
409
485
  end
410
486
 
411
487
  while amount_acc < amount_per_day
412
- break if chunk.empty?
413
- span = chunk.shift
488
+ break if chunk_dup.empty?
489
+ span = chunk_dup.shift
414
490
  amount_acc += span[:amount]
415
491
 
416
492
  if amount_acc < amount_per_day
@@ -427,11 +503,11 @@ module Reading
427
503
  end
428
504
 
429
505
  span[:dates] = span[:dates].begin..last_end_date
430
- }
431
506
 
432
- spans.each do |span|
433
- span.delete(:in_open_range)
434
- end
507
+ chunk
508
+ }
509
+ .reverse
510
+ .flatten
435
511
  end
436
512
 
437
513
  # Changes amounts taken from progress, from absolute to relative,
@@ -443,15 +519,42 @@ module Reading
443
519
  def relativize_amounts_from_progress!(spans)
444
520
  amount_acc = 0
445
521
  spans.each do |span|
446
- if span[:amount_from_progress]
522
+ if span[:amount_from_progress?]
447
523
  span[:amount] -= amount_acc
448
524
  end
449
525
 
450
526
  amount_acc += span[:amount]
451
527
  end
528
+ end
529
+
530
+ # Removes the end date from the last span if it's today, and if it was
531
+ # written as an open range.
532
+ # @param spans [Array<Hash>] spans after being merged from daily_spans.
533
+ # @return [Array<Hash>]
534
+ def remove_last_end_date_of_today_if_open_range!(spans)
535
+ if spans.last[:dates] &&
536
+ spans.last[:dates].end == Date.today &&
537
+ (spans.last[:open_range_id] || spans.last[:implied_date_range?])
538
+
539
+ spans.last[:dates] = spans.last[:dates].begin..
540
+ end
541
+ end
542
+
543
+ # Removes all keys that shouldn't be in the final item data.
544
+ # @param spans [Array<Hash>] spans after being merged from daily_spans.
545
+ # @return [Array<Hash>]
546
+ def remove_temporary_keys!(spans)
547
+ temporary_keys = %i[
548
+ open_range_id
549
+ amount_from_progress?
550
+ amount_from_frequency?
551
+ implied_date_range?
552
+ ]
452
553
 
453
554
  spans.each do |span|
454
- span.delete(:amount_from_progress)
555
+ temporary_keys.each do |key|
556
+ span.delete(key)
557
+ end
455
558
  end
456
559
  end
457
560
  end
@@ -15,7 +15,7 @@ module Reading
15
15
  # experiences from the History column.
16
16
  # @raise [InvalidDateError] if any date is invalid.
17
17
  def validate(experiences, history_column: false)
18
- if both_date_columns?
18
+ if both_date_columns? && !history_column
19
19
  validate_number_of_start_dates_and_end_dates(experiences)
20
20
  end
21
21
 
@@ -131,7 +131,7 @@ module Reading
131
131
  end
132
132
  end
133
133
  .each_cons(2) do |a, b|
134
- if a.begin > b.begin || a.end > b.end
134
+ if a.begin > b.begin || (a.end || Date.today) > (b.end || Date.today)
135
135
  raise InvalidDateError, "Dates are not in order"
136
136
  end
137
137
  if a.cover?(b.begin + 1)
@@ -9,13 +9,16 @@ module Reading
9
9
  # Extracts the :progress sub-attribute (percent, pages, or time) from
10
10
  # the given hash.
11
11
  # @param hash [Hash] any parsed hash that contains progress.
12
+ # @param no_end_date [Boolean] for start and end dates (as opposed to
13
+ # the History column), whether an end date is present.
12
14
  # @return [Float, Integer, Item::TimeLength]
13
- def self.progress(hash)
15
+ def self.progress(hash, no_end_date: nil)
14
16
  hash[:progress_percent]&.to_f&./(100) ||
15
17
  hash[:progress_pages]&.to_i ||
16
18
  hash[:progress_time]&.then { Item::TimeLength.parse(_1) } ||
17
19
  (0 if hash[:progress_dnf]) ||
18
20
  (1.0 if hash[:progress_done]) ||
21
+ (0.0 if no_end_date) ||
19
22
  nil
20
23
  end
21
24
 
@@ -91,7 +91,7 @@ module Reading
91
91
 
92
92
  private
93
93
 
94
- ISBN_REGEX = /(\d{3}[-\s]?)?\d{10}/
94
+ ISBN_REGEX = /(\d{3}[-\s]?)?(\d{10}|\d{9}X)/
95
95
  ASIN_REGEX = /B0[A-Z\d]{8}/
96
96
  end
97
97
  end
@@ -16,7 +16,11 @@ module Reading
16
16
  (\s+|\z)
17
17
  )?
18
18
  (
19
- (?<date>\d{4}/\d\d?/\d\d?)
19
+ (
20
+ (?<date>\d{4}/\d\d?/\d\d?)
21
+ |
22
+ (?<planned>\?\?)
23
+ )
20
24
  (\s+|\z)
21
25
  )?
22
26
  (
@@ -119,7 +119,9 @@ module Reading
119
119
  .compact
120
120
  .max
121
121
 
122
- year_ranges = (begin_date.year..end_date.year).flat_map { |year|
122
+ end_year = [Date.today.year, end_date.year].min
123
+
124
+ year_ranges = (begin_date.year..end_year).flat_map { |year|
123
125
  beginning_of_year = Date.new(year, 1, 1)
124
126
  end_of_year = Date.new(year + 1, 1, 1).prev_day
125
127
 
@@ -191,7 +193,7 @@ module Reading
191
193
  groups
192
194
  end
193
195
  },
194
- genre: proc { |items|
196
+ eachgenre: proc { |items|
195
197
  groups = Hash.new { |h, k| h[k] = [] }
196
198
 
197
199
  items.each do |item|
@@ -200,6 +202,18 @@ module Reading
200
202
 
201
203
  groups.sort.to_h
202
204
  },
205
+ genre: proc { |items|
206
+ groups = Hash.new { |h, k| h[k] = [] }
207
+
208
+ items.each do |item|
209
+ if item.genres.any?
210
+ genre_combination = item.genres.sort.join(", ")
211
+ groups[genre_combination] << item
212
+ end
213
+ end
214
+
215
+ groups.sort.to_h
216
+ },
203
217
  length: proc { |items|
204
218
  boundaries = Config.hash.fetch(:length_group_boundaries)
205
219
 
@@ -50,14 +50,14 @@ module Reading
50
50
  # Items) of the given hash of grouped items.
51
51
  # @param grouped_items [Hash]
52
52
  # @yield [Array<Item>]
53
- def self.apply_to_inner_items(grouped_items, &)
53
+ def self.apply_to_inner_items(grouped_items, &block)
54
54
  if grouped_items.values.first.is_a? Array
55
55
  grouped_items.transform_values! { |inner_items|
56
56
  yield inner_items
57
57
  }
58
58
  else # It's a Hash, so go one level deeper.
59
59
  grouped_items.each do |group_name, grouped|
60
- apply_to_inner_items(grouped, &)
60
+ apply_to_inner_items(grouped, &block)
61
61
  end
62
62
  end
63
63
  end
@@ -89,7 +89,7 @@ module Reading
89
89
  (lengths.sum / lengths.count.to_f).to_i_if_whole
90
90
  end
91
91
  },
92
- :"average_amount" => proc { |items|
92
+ average_amount: proc { |items|
93
93
  total_amount = items.sum { |item|
94
94
  item.experiences.sum { |experience|
95
95
  experience.spans.sum(&:amount)
@@ -105,6 +105,9 @@ module Reading
105
105
  amounts_by_date.values.sum / amounts_by_date.count
106
106
  end
107
107
  },
108
+ list_item: proc { |items|
109
+ items.map { |item| author_and_title(item) }
110
+ },
108
111
  total_item: proc { |items|
109
112
  items.count
110
113
  },
@@ -119,12 +122,24 @@ module Reading
119
122
  },
120
123
  top_rating: proc { |items, number_arg|
121
124
  items
122
- .max_by(number_arg || DEFAULT_NUMBER_ARG, &:rating)
125
+ .max_by(number_arg || DEFAULT_NUMBER_ARG) { _1.rating || 0}
123
126
  .map { |item| [author_and_title(item), item.rating] }
124
127
  },
125
128
  top_length: proc { |items, number_arg|
126
129
  items
127
- .map { |item| [author_and_title(item), item.variants.map(&:length).max] }
130
+ .map { |item|
131
+ [
132
+ author_and_title(item),
133
+ # Longest length, or if undefined length then longest experience
134
+ # (code adapted from top_amount below).
135
+ item.variants.map(&:length).max ||
136
+ item.experiences.map { |experience|
137
+ experience.spans.sum { |span|
138
+ (span.amount * span.progress).to_i_if_whole
139
+ }
140
+ }.max,
141
+ ]
142
+ }
128
143
  .reject { |_title, length| length.nil? }
129
144
  .max_by(number_arg || DEFAULT_NUMBER_ARG) { |_title, length| length }
130
145
  },
@@ -152,12 +167,24 @@ module Reading
152
167
  },
153
168
  bottom_rating: proc { |items, number_arg|
154
169
  items
155
- .min_by(number_arg || DEFAULT_NUMBER_ARG, &:rating)
170
+ .min_by(number_arg || DEFAULT_NUMBER_ARG) { _1.rating || 0}
156
171
  .map { |item| [author_and_title(item), item.rating] }
157
172
  },
158
173
  bottom_length: proc { |items, number_arg|
159
174
  items
160
- .map { |item| [author_and_title(item), item.variants.map(&:length).max] }
175
+ .map { |item|
176
+ [
177
+ author_and_title(item),
178
+ # Longest length, or if undefined length then longest experience
179
+ # (code adapted from bottom_amount below).
180
+ item.variants.map(&:length).max ||
181
+ item.experiences.map { |experience|
182
+ experience.spans.sum { |span|
183
+ (span.amount * span.progress).to_i_if_whole
184
+ }
185
+ }.max,
186
+ ]
187
+ }
161
188
  .reject { |_title, length| length.nil? }
162
189
  .min_by(number_arg || DEFAULT_NUMBER_ARG) { |_title, length| length }
163
190
  },
@@ -183,6 +210,9 @@ module Reading
183
210
  speed_hash[:amount] / speed_hash[:days].to_f
184
211
  }
185
212
  },
213
+ debug: proc { |items|
214
+ items
215
+ },
186
216
  }
187
217
 
188
218
  ALIASES = {
@@ -190,6 +220,7 @@ module Reading
190
220
  average_length: %w[al],
191
221
  average_amount: %w[aia ai],
192
222
  :"average_daily-amount" => %w[ada ad],
223
+ list_item: %w[li list],
193
224
  total_item: %w[item count],
194
225
  total_amount: %w[amount],
195
226
  top_rating: %w[tr],
@@ -200,6 +231,7 @@ module Reading
200
231
  bottom_length: %w[bl],
201
232
  bottom_amount: %w[ba],
202
233
  bottom_speed: %w[bs],
234
+ debug: %w[d],
203
235
  }
204
236
 
205
237
  REGEXES = ACTIONS.map { |key, _action|
@@ -1,31 +1,9 @@
1
1
  module Reading
2
2
  module Util
3
- class FetchDepthExceededError < StandardError
4
- end
5
-
6
3
  # Similar to Array#dig and Hash#dig but raises an error for not found elements.
7
- #
8
- # More flexible but slightly slower alternative:
9
- # keys.reduce(self) { |a, e| a.fetch(e) }
10
- #
11
- # See performance comparisons:
12
- # https://fpsvogel.com/posts/2022/ruby-hash-dot-syntax-deep-fetch
13
4
  module HashArrayDeepFetch
14
5
  def deep_fetch(*keys)
15
- case keys.length
16
- when 1
17
- fetch(keys[0])
18
- when 2
19
- fetch(keys[0]).fetch(keys[1])
20
- when 3
21
- fetch(keys[0]).fetch(keys[1]).fetch(keys[2])
22
- when 4
23
- fetch(keys[0]).fetch(keys[1]).fetch(keys[2]).fetch(keys[3])
24
- when 5
25
- fetch(keys[0]).fetch(keys[1]).fetch(keys[2]).fetch(keys[3]).fetch(keys[4])
26
- else
27
- raise FetchDepthExceededError, "#deep_fetch can't fetch that deep!"
28
- end
6
+ keys.reduce(self) { |a, e| a.fetch(e) }
29
7
  end
30
8
 
31
9
  refine Hash do
@@ -1,3 +1,3 @@
1
1
  module Reading
2
- VERSION = '0.9.0'
2
+ VERSION = '0.9.1'
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: reading
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.9.0
4
+ version: 0.9.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Felipe Vogel
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-07-15 00:00:00.000000000 Z
11
+ date: 2024-07-29 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: pastel
@@ -24,6 +24,20 @@ dependencies:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
26
  version: '0.8'
27
+ - !ruby/object:Gem::Dependency
28
+ name: amazing_print
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '1.4'
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '1.4'
27
41
  - !ruby/object:Gem::Dependency
28
42
  name: debug
29
43
  requirement: !ruby/object:Gem::Requirement
@@ -94,20 +108,6 @@ dependencies:
94
108
  - - "~>"
95
109
  - !ruby/object:Gem::Version
96
110
  version: '1.0'
97
- - !ruby/object:Gem::Dependency
98
- name: amazing_print
99
- requirement: !ruby/object:Gem::Requirement
100
- requirements:
101
- - - "~>"
102
- - !ruby/object:Gem::Version
103
- version: '1.4'
104
- type: :development
105
- prerelease: false
106
- version_requirements: !ruby/object:Gem::Requirement
107
- requirements:
108
- - - "~>"
109
- - !ruby/object:Gem::Version
110
- version: '1.4'
111
111
  - !ruby/object:Gem::Dependency
112
112
  name: rubycritic
113
113
  requirement: !ruby/object:Gem::Requirement
@@ -189,23 +189,23 @@ metadata:
189
189
  allowed_push_host: https://rubygems.org
190
190
  homepage_uri: https://github.com/fpsvogel/reading
191
191
  source_code_uri: https://github.com/fpsvogel/reading
192
- changelog_uri: https://github.com/fpsvogel/reading/blob/master/CHANGELOG.md
192
+ changelog_uri: https://github.com/fpsvogel/reading/blob/main/CHANGELOG.md
193
193
  post_install_message:
194
194
  rdoc_options: []
195
195
  require_paths:
196
196
  - lib
197
197
  required_ruby_version: !ruby/object:Gem::Requirement
198
198
  requirements:
199
- - - ">="
199
+ - - "~>"
200
200
  - !ruby/object:Gem::Version
201
- version: 3.0.0
201
+ version: 3.3.0
202
202
  required_rubygems_version: !ruby/object:Gem::Requirement
203
203
  requirements:
204
204
  - - ">="
205
205
  - !ruby/object:Gem::Version
206
206
  version: '0'
207
207
  requirements: []
208
- rubygems_version: 3.4.9
208
+ rubygems_version: 3.5.15
209
209
  signing_key:
210
210
  specification_version: 4
211
211
  summary: Parses a CSV reading log.