appydave-tools 0.69.0 → 0.71.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. checksums.yaml +4 -4
  2. data/.claude/commands/brainstorming-agent.md +227 -0
  3. data/.claude/commands/cli-test.md +251 -0
  4. data/.claude/commands/dev.md +234 -0
  5. data/.claude/commands/po.md +227 -0
  6. data/.claude/commands/progress.md +51 -0
  7. data/.claude/commands/uat.md +321 -0
  8. data/.rubocop.yml +11 -0
  9. data/AGENTS.md +43 -0
  10. data/CHANGELOG.md +24 -0
  11. data/CLAUDE.md +96 -3
  12. data/README.md +15 -0
  13. data/bin/dam +39 -7
  14. data/bin/jump.rb +29 -0
  15. data/bin/subtitle_processor.rb +54 -1
  16. data/bin/zsh_history.rb +846 -0
  17. data/docs/README.md +162 -68
  18. data/docs/architecture/cli/exe-bin-convention.md +434 -0
  19. data/docs/architecture/cli-patterns.md +631 -0
  20. data/docs/architecture/gpt-context/gpt-context-architecture.md +325 -0
  21. data/docs/architecture/gpt-context/gpt-context-implementation-guide.md +419 -0
  22. data/docs/architecture/gpt-context/gpt-context-vision.md +179 -0
  23. data/docs/architecture/testing/testing-patterns.md +762 -0
  24. data/docs/backlog.md +120 -0
  25. data/docs/cli-tests/FR-3-jump-location-tool.md +515 -0
  26. data/docs/dam/batch-s3-listing-requirements.md +780 -0
  27. data/docs/guides/tools/video-file-namer.md +400 -0
  28. data/docs/specs/fr-002-gpt-context-help-system.md +265 -0
  29. data/docs/specs/fr-003-jump-location-tool.md +779 -0
  30. data/docs/specs/zsh-history-tool.md +820 -0
  31. data/docs/uat/FR-3-jump-location-tool.md +741 -0
  32. data/exe/jump +11 -0
  33. data/exe/{subtitle_manager → subtitle_processor} +1 -1
  34. data/exe/zsh_history +11 -0
  35. data/lib/appydave/tools/configuration/openai.rb +1 -1
  36. data/lib/appydave/tools/dam/file_helper.rb +28 -0
  37. data/lib/appydave/tools/dam/project_listing.rb +220 -138
  38. data/lib/appydave/tools/dam/s3_operations.rb +112 -60
  39. data/lib/appydave/tools/dam/ssd_status.rb +226 -0
  40. data/lib/appydave/tools/dam/status.rb +3 -51
  41. data/lib/appydave/tools/jump/cli.rb +561 -0
  42. data/lib/appydave/tools/jump/commands/add.rb +52 -0
  43. data/lib/appydave/tools/jump/commands/base.rb +43 -0
  44. data/lib/appydave/tools/jump/commands/generate.rb +153 -0
  45. data/lib/appydave/tools/jump/commands/remove.rb +58 -0
  46. data/lib/appydave/tools/jump/commands/report.rb +214 -0
  47. data/lib/appydave/tools/jump/commands/update.rb +42 -0
  48. data/lib/appydave/tools/jump/commands/validate.rb +54 -0
  49. data/lib/appydave/tools/jump/config.rb +233 -0
  50. data/lib/appydave/tools/jump/formatters/base.rb +48 -0
  51. data/lib/appydave/tools/jump/formatters/json_formatter.rb +19 -0
  52. data/lib/appydave/tools/jump/formatters/paths_formatter.rb +21 -0
  53. data/lib/appydave/tools/jump/formatters/table_formatter.rb +183 -0
  54. data/lib/appydave/tools/jump/location.rb +134 -0
  55. data/lib/appydave/tools/jump/path_validator.rb +47 -0
  56. data/lib/appydave/tools/jump/search.rb +230 -0
  57. data/lib/appydave/tools/subtitle_processor/transcript.rb +51 -0
  58. data/lib/appydave/tools/version.rb +1 -1
  59. data/lib/appydave/tools/zsh_history/command.rb +37 -0
  60. data/lib/appydave/tools/zsh_history/config.rb +235 -0
  61. data/lib/appydave/tools/zsh_history/filter.rb +184 -0
  62. data/lib/appydave/tools/zsh_history/formatter.rb +75 -0
  63. data/lib/appydave/tools/zsh_history/parser.rb +101 -0
  64. data/lib/appydave/tools.rb +25 -0
  65. data/package.json +1 -1
  66. metadata +53 -4
@@ -152,35 +152,37 @@ module Appydave
152
152
 
153
153
  s3_path = build_s3_key(relative_path)
154
154
 
155
- # Check if file already exists with same MD5
156
- local_md5 = file_md5(file)
157
- s3_md5 = s3_file_md5(s3_path)
158
-
159
- if local_md5 == s3_md5
160
- puts " ⏭️ Skipped: #{relative_path} (unchanged)"
161
- skipped += 1
162
- else
163
- # Warn if we're about to overwrite an existing S3 file
164
- if s3_md5 && s3_md5 != local_md5
165
- puts " ⚠️ Warning: #{relative_path} exists in S3 with different content"
166
-
167
- # Try to get S3 timestamp for comparison
168
- s3_file_info = get_s3_file_info(s3_path)
169
- if s3_file_info && s3_file_info['LastModified']
170
- s3_time = s3_file_info['LastModified']
171
- local_time = File.mtime(file)
172
- puts " S3: #{s3_time.strftime('%Y-%m-%d %H:%M')} | Local: #{local_time.strftime('%Y-%m-%d %H:%M')}"
173
-
174
- puts ' ⚠️ S3 file is NEWER than local - you may be overwriting recent changes!' if s3_time > local_time
175
- end
176
- puts ' Uploading will overwrite S3 version...'
155
+ # Check if file already exists in S3 and compare
156
+ s3_info = get_s3_file_info(s3_path)
157
+
158
+ if s3_info
159
+ s3_etag = s3_info['ETag'].gsub('"', '')
160
+ s3_size = s3_info['Size']
161
+ match_status = compare_files(local_file: file, s3_etag: s3_etag, s3_size: s3_size)
162
+
163
+ if match_status == :synced
164
+ comparison_method = multipart_etag?(s3_etag) ? 'size match' : 'unchanged'
165
+ puts " ⏭️ Skipped: #{relative_path} (#{comparison_method})"
166
+ skipped += 1
167
+ next
177
168
  end
178
169
 
179
- if upload_file(file, s3_path, dry_run: dry_run)
180
- uploaded += 1
181
- else
182
- failed += 1
183
- end
170
+ # File exists but content differs - warn before overwriting
171
+ puts " ⚠️ Warning: #{relative_path} exists in S3 with different content"
172
+ puts ' (multipart upload detected - comparing by size)' if multipart_etag?(s3_etag)
173
+
174
+ s3_time = s3_info['LastModified']
175
+ local_time = File.mtime(file)
176
+ puts " S3: #{s3_time.strftime('%Y-%m-%d %H:%M')} | Local: #{local_time.strftime('%Y-%m-%d %H:%M')}"
177
+
178
+ puts ' ⚠️ S3 file is NEWER than local - you may be overwriting recent changes!' if s3_time > local_time
179
+ puts ' Uploading will overwrite S3 version...'
180
+ end
181
+
182
+ if upload_file(file, s3_path, dry_run: dry_run)
183
+ uploaded += 1
184
+ else
185
+ failed += 1
184
186
  end
185
187
  end
186
188
  # rubocop:enable Metrics/BlockLength
@@ -208,48 +210,55 @@ module Appydave
208
210
  return
209
211
  end
210
212
 
211
- puts "📦 Downloading #{s3_files.size} file(s) from S3 to #{project_id}/s3-staging/..."
213
+ total_size = s3_files.sum { |f| f['Size'] || 0 }
214
+ puts "📦 Downloading #{s3_files.size} file(s) (#{file_size_human(total_size)}) from S3 to #{project_id}/s3-staging/..."
212
215
  puts ''
213
216
 
214
217
  downloaded = 0
215
218
  skipped = 0
216
219
  failed = 0
217
220
 
221
+ # rubocop:disable Metrics/BlockLength
218
222
  s3_files.each do |s3_file|
219
223
  key = s3_file['Key']
220
224
  relative_path = extract_relative_path(key)
221
225
  local_file = File.join(staging_dir, relative_path)
222
226
 
223
- # Check if file already exists with same MD5
224
- s3_md5 = s3_file['ETag'].gsub('"', '')
225
- local_md5 = File.exist?(local_file) ? file_md5(local_file) : nil
227
+ # Check if file already exists and compare
228
+ s3_etag = s3_file['ETag'].gsub('"', '')
229
+ s3_size = s3_file['Size']
226
230
 
227
- if local_md5 == s3_md5
228
- puts " ⏭️ Skipped: #{relative_path} (unchanged)"
229
- skipped += 1
230
- else
231
- # Warn if we're about to overwrite an existing local file
232
- if local_md5 && local_md5 != s3_md5
233
- puts " ⚠️ Warning: #{relative_path} exists locally with different content"
234
-
235
- # Compare timestamps
236
- if s3_file['LastModified'] && File.exist?(local_file)
237
- s3_time = s3_file['LastModified']
238
- local_time = File.mtime(local_file)
239
- puts " S3: #{s3_time.strftime('%Y-%m-%d %H:%M')} | Local: #{local_time.strftime('%Y-%m-%d %H:%M')}"
240
-
241
- puts ' ⚠️ Local file is NEWER than S3 - you may be overwriting recent changes!' if local_time > s3_time
242
- end
243
- puts ' Downloading will overwrite local version...'
231
+ if File.exist?(local_file)
232
+ match_status = compare_files(local_file: local_file, s3_etag: s3_etag, s3_size: s3_size)
233
+
234
+ if match_status == :synced
235
+ comparison_method = multipart_etag?(s3_etag) ? 'size match' : 'unchanged'
236
+ puts " ⏭️ Skipped: #{relative_path} (#{comparison_method})"
237
+ skipped += 1
238
+ next
244
239
  end
245
240
 
246
- if download_file(key, local_file, dry_run: dry_run)
247
- downloaded += 1
248
- else
249
- failed += 1
241
+ # File exists but content differs - warn before overwriting
242
+ puts " ⚠️ Warning: #{relative_path} exists locally with different content"
243
+ puts ' (multipart upload detected - comparing by size)' if multipart_etag?(s3_etag)
244
+
245
+ if s3_file['LastModified']
246
+ s3_time = s3_file['LastModified']
247
+ local_time = File.mtime(local_file)
248
+ puts " S3: #{s3_time.strftime('%Y-%m-%d %H:%M')} | Local: #{local_time.strftime('%Y-%m-%d %H:%M')}"
249
+
250
+ puts ' ⚠️ Local file is NEWER than S3 - you may be overwriting recent changes!' if local_time > s3_time
250
251
  end
252
+ puts ' Downloading will overwrite local version...'
253
+ end
254
+
255
+ if download_file(key, local_file, dry_run: dry_run)
256
+ downloaded += 1
257
+ else
258
+ failed += 1
251
259
  end
252
260
  end
261
+ # rubocop:enable Metrics/BlockLength
253
262
  puts ''
254
263
  puts '✅ Download complete!'
255
264
  puts " Downloaded: #{downloaded}, Skipped: #{skipped}, Failed: #{failed}"
@@ -322,11 +331,12 @@ module Appydave
322
331
  total_s3_size += s3_size
323
332
  total_local_size += local_size
324
333
 
325
- local_md5 = file_md5(local_file)
326
- s3_md5 = s3_file['ETag'].gsub('"', '')
334
+ s3_etag = s3_file['ETag'].gsub('"', '')
335
+ match_status = compare_files(local_file: local_file, s3_etag: s3_etag, s3_size: s3_size)
327
336
 
328
- if local_md5 == s3_md5
329
- puts " ✓ #{relative_path} (#{file_size_human(s3_size)}) [synced]"
337
+ if match_status == :synced
338
+ status_label = multipart_etag?(s3_etag) ? 'synced*' : 'synced'
339
+ puts " ✓ #{relative_path} (#{file_size_human(s3_size)}) [#{status_label}]"
330
340
  else
331
341
  puts " ⚠️ #{relative_path} (#{file_size_human(s3_size)}) [modified]"
332
342
  end
@@ -526,10 +536,11 @@ module Appydave
526
536
  s3_file = s3_files_map[relative_path]
527
537
 
528
538
  if s3_file
529
- # Compare MD5
530
- local_md5 = file_md5(local_file)
531
- s3_md5 = s3_file['ETag'].gsub('"', '')
532
- needs_upload = true if local_md5 != s3_md5
539
+ # Compare using multipart-aware comparison
540
+ s3_etag = s3_file['ETag'].gsub('"', '')
541
+ s3_size = s3_file['Size']
542
+ match_status = compare_files(local_file: local_file, s3_etag: s3_etag, s3_size: s3_size)
543
+ needs_upload = true if match_status != :synced
533
544
  else
534
545
  # Local file not in S3
535
546
  needs_upload = true
@@ -623,6 +634,47 @@ module Appydave
623
634
  nil
624
635
  end
625
636
 
637
+ # Check if an S3 ETag is from a multipart upload
638
+ # Multipart ETags have format: "hash-partcount" (e.g., "d41d8cd98f00b204e9800998ecf8427e-5")
639
+ def multipart_etag?(etag)
640
+ return false if etag.nil?
641
+
642
+ etag.include?('-')
643
+ end
644
+
645
+ # Compare local file with S3 file, handling multipart ETags
646
+ # Returns: :synced, :modified, or :unknown
647
+ # For multipart uploads, falls back to size comparison since MD5 won't match
648
+ def compare_files(local_file:, s3_etag:, s3_size:)
649
+ return :unknown unless File.exist?(local_file)
650
+ return :unknown if s3_etag.nil?
651
+
652
+ local_size = File.size(local_file)
653
+
654
+ if multipart_etag?(s3_etag)
655
+ # Multipart upload - MD5 comparison won't work, use size
656
+ # Size match is a reasonable proxy for "unchanged" in this context
657
+ local_size == s3_size ? :synced : :modified
658
+ else
659
+ # Standard upload - use MD5 comparison
660
+ local_md5 = file_md5(local_file)
661
+ return :unknown if local_md5.nil?
662
+
663
+ local_md5 == s3_etag ? :synced : :modified
664
+ end
665
+ end
666
+
667
+ # Get S3 file size from path (for upload comparison)
668
+ def s3_file_size(s3_path)
669
+ response = s3_client.head_object(
670
+ bucket: brand_info.aws.s3_bucket,
671
+ key: s3_path
672
+ )
673
+ response.content_length
674
+ rescue Aws::S3::Errors::NotFound, Aws::S3::Errors::ServiceError
675
+ nil
676
+ end
677
+
626
678
  # Upload file to S3
627
679
  def upload_file(local_file, s3_path, dry_run: false)
628
680
  if dry_run
@@ -0,0 +1,226 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Appydave
4
+ module Tools
5
+ module Dam
6
+ # Show SSD mount status for all brands
7
+ class SsdStatus
8
+ attr_reader :brands_config
9
+
10
+ def initialize(brands_config: nil)
11
+ if brands_config
12
+ @brands_config = brands_config
13
+ else
14
+ Appydave::Tools::Configuration::Config.configure
15
+ @brands_config = Appydave::Tools::Configuration::Config.brands
16
+ end
17
+ end
18
+
19
+ # Show SSD status for all brands
20
+ def show_all
21
+ results = collect_brand_statuses
22
+
23
+ # Identify unique volumes
24
+ volumes = results.select { |r| r[:configured] }
25
+ .map { |r| extract_volume_name(r[:ssd_path]) }
26
+ .compact
27
+ .uniq
28
+
29
+ if volumes.empty?
30
+ puts '⚠️ No SSD volumes configured'
31
+ return
32
+ end
33
+
34
+ # Show simple mount status for each volume
35
+ volumes.each do |volume|
36
+ volume_path = "/Volumes/#{volume}"
37
+ if Dir.exist?(volume_path)
38
+ puts "✅ #{volume} is MOUNTED"
39
+ else
40
+ puts "❌ #{volume} is NOT MOUNTED"
41
+ end
42
+ end
43
+
44
+ puts ''
45
+ puts '| Brand | Path | Status |'
46
+ puts '|----------------|----------------------------------------|--------------|'
47
+ results.each do |result|
48
+ display_brand_row(result)
49
+ end
50
+ end
51
+
52
+ # Show SSD status for a specific brand
53
+ def show(brand_key)
54
+ brand_info = @brands_config.get_brand(brand_key)
55
+ ssd_path = brand_info.locations.ssd_backup
56
+
57
+ puts "💾 SSD Status: #{brand_info.name} (#{brand_info.key})"
58
+ puts ''
59
+
60
+ if ssd_path.nil? || ssd_path.empty? || ssd_path == 'NOT-SET'
61
+ puts '⚠️ SSD backup not configured for this brand'
62
+ puts ''
63
+ puts 'To configure, add ssd_backup to brands.json:'
64
+ puts ''
65
+ puts ' "locations": {'
66
+ puts ' "video_projects": "/path/to/projects",'
67
+ puts ' "ssd_backup": "/Volumes/T7/youtube-PUBLISHED/appydave"'
68
+ puts ' }'
69
+ return
70
+ end
71
+
72
+ puts "Path: #{ssd_path}"
73
+ puts ''
74
+
75
+ if Dir.exist?(ssd_path)
76
+ display_mounted_details(brand_info, ssd_path)
77
+ else
78
+ display_unmounted_details(ssd_path)
79
+ end
80
+ end
81
+
82
+ private
83
+
84
+ def collect_brand_statuses
85
+ @brands_config.brands.map do |brand_info|
86
+ ssd_path = brand_info.locations.ssd_backup
87
+ configured = ssd_path && !ssd_path.empty? && ssd_path != 'NOT-SET'
88
+ mounted = configured && Dir.exist?(ssd_path)
89
+
90
+ # Check if SSD volume is mounted but folder doesn't exist
91
+ volume_mounted = false
92
+ if configured && !mounted
93
+ volume_name = extract_volume_name(ssd_path)
94
+ volume_mounted = volume_name && Dir.exist?("/Volumes/#{volume_name}")
95
+ end
96
+
97
+ {
98
+ brand: brand_info,
99
+ ssd_path: ssd_path,
100
+ configured: configured,
101
+ mounted: mounted,
102
+ volume_mounted: volume_mounted
103
+ }
104
+ end
105
+ end
106
+
107
+ def display_brand_row(result)
108
+ brand_col = result[:brand].key.ljust(14)
109
+
110
+ if result[:configured]
111
+ path_col = truncate_path(result[:ssd_path], 38).ljust(38)
112
+ status_col = if result[:mounted]
113
+ '✅ Ready'
114
+ elsif result[:volume_mounted]
115
+ '⚠️ No folder'
116
+ else
117
+ '❌ Not mounted'
118
+ end
119
+ else
120
+ path_col = '(not configured)'.ljust(38)
121
+ status_col = '⚠️ N/A'
122
+ end
123
+
124
+ puts "| #{brand_col} | #{path_col} | #{status_col.ljust(12)} |"
125
+ end
126
+
127
+ def display_mounted_details(_brand_info, ssd_path)
128
+ puts '✅ SSD is mounted'
129
+ puts ''
130
+
131
+ # Count projects on SSD
132
+ project_dirs = Dir.glob(File.join(ssd_path, '*')).select { |f| File.directory?(f) }
133
+ project_count = project_dirs.size
134
+
135
+ # Calculate total size (quick estimate from directory count)
136
+ puts "Projects on SSD: #{project_count}"
137
+
138
+ # Show disk space info if available
139
+ show_disk_space(ssd_path)
140
+
141
+ # Show recent projects
142
+ return unless project_count.positive?
143
+
144
+ puts ''
145
+ puts 'Recent projects (last 5 modified):'
146
+ recent = project_dirs.sort_by { |d| File.mtime(d) }.reverse.first(5)
147
+ recent.each do |dir|
148
+ name = File.basename(dir)
149
+ age = FileHelper.format_age(File.mtime(dir))
150
+ puts " #{name} (#{age} ago)"
151
+ end
152
+ end
153
+
154
+ def display_unmounted_details(ssd_path)
155
+ # Try to identify the volume
156
+ volume_name = extract_volume_name(ssd_path)
157
+ volume_path = "/Volumes/#{volume_name}" if volume_name
158
+
159
+ if volume_name && Dir.exist?(volume_path)
160
+ # SSD is mounted, but specific folder doesn't exist
161
+ puts '⚠️ SSD is mounted but backup folder does NOT exist'
162
+ puts ''
163
+ puts "Volume '#{volume_name}' is connected, but the expected backup folder is missing."
164
+ puts ''
165
+ puts 'To create the backup folder:'
166
+ puts " mkdir -p #{ssd_path}"
167
+ puts ''
168
+ puts 'Or update brands.json with the correct path.'
169
+ else
170
+ # SSD is not mounted at all
171
+ puts '❌ SSD is NOT mounted'
172
+ puts ''
173
+ puts 'Expected path does not exist.'
174
+ puts ''
175
+
176
+ if volume_name
177
+ puts "Volume expected: #{volume_name}"
178
+ puts ''
179
+ puts 'To mount:'
180
+ puts " 1. Connect the '#{volume_name}' drive"
181
+ puts ' 2. Verify it appears in /Volumes/'
182
+ puts " 3. Run: ls #{ssd_path}"
183
+ end
184
+ end
185
+ end
186
+
187
+ def show_disk_space(ssd_path)
188
+ # Use df to get disk space info
189
+ output = `df -h "#{ssd_path}" 2>/dev/null`
190
+ return if output.empty?
191
+
192
+ lines = output.lines
193
+ return unless lines.size >= 2
194
+
195
+ # Parse df output (header + data line)
196
+ parts = lines[1].split
197
+ return unless parts.size >= 4
198
+
199
+ size = parts[1]
200
+ used = parts[2]
201
+ avail = parts[3]
202
+ capacity = parts[4] if parts.size >= 5
203
+
204
+ puts ''
205
+ puts 'Disk Space:'
206
+ puts " Total: #{size}"
207
+ puts " Used: #{used} (#{capacity})" if capacity
208
+ puts " Available: #{avail}"
209
+ end
210
+
211
+ def extract_volume_name(path)
212
+ # Extract volume name from /Volumes/VolumeName/...
213
+ match = path.match(%r{^/Volumes/([^/]+)})
214
+ match[1] if match
215
+ end
216
+
217
+ def truncate_path(path, max_length)
218
+ return path if path.nil? || path.length <= max_length
219
+
220
+ # Keep the end of the path (more useful)
221
+ "...#{path[-(max_length - 3)..]}"
222
+ end
223
+ end
224
+ end
225
+ end
226
+ end
@@ -42,7 +42,7 @@ module Appydave
42
42
  def show_project_status
43
43
  project_size = calculate_project_size
44
44
  last_modified = File.mtime(project_path)
45
- age = format_age(last_modified)
45
+ age = FileHelper.format_age(last_modified)
46
46
 
47
47
  puts "📊 Status: v-#{brand}/#{File.basename(project_path)} (#{format_size(project_size)})"
48
48
  puts " Last modified: #{age} ago"
@@ -155,7 +155,7 @@ module Appydave
155
155
 
156
156
  if Dir.exist?(ssd_full_path)
157
157
  last_modified = File.mtime(ssd_full_path)
158
- age = format_age(last_modified)
158
+ age = FileHelper.format_age(last_modified)
159
159
  puts " Last synced: #{age} ago"
160
160
  end
161
161
  end
@@ -163,29 +163,6 @@ module Appydave
163
163
  puts ''
164
164
  end
165
165
 
166
- def show_git_status
167
- puts 'Git:'
168
-
169
- status = git_status_info
170
-
171
- puts " 🌿 Branch: #{status[:branch]}"
172
- puts " 📡 Remote: #{status[:remote]}" if status[:remote]
173
-
174
- if status[:modified_count].positive? || status[:untracked_count].positive?
175
- puts " ↕️ Status: #{status[:modified_count]} modified, #{status[:untracked_count]} untracked"
176
- else
177
- puts ' ↕️ Status: Clean working directory'
178
- end
179
-
180
- if status[:ahead].positive? || status[:behind].positive?
181
- puts " 🔄 Sync: #{sync_status_text(status[:ahead], status[:behind])}"
182
- else
183
- puts ' 🔄 Sync: Up to date'
184
- end
185
-
186
- puts ''
187
- end
188
-
189
166
  def show_brand_git_status
190
167
  status = git_status_info
191
168
 
@@ -340,31 +317,6 @@ module Appydave
340
317
  FileHelper.format_size(bytes)
341
318
  end
342
319
 
343
- def format_age(time)
344
- return 'N/A' if time.nil?
345
-
346
- seconds = Time.now - time
347
- return 'just now' if seconds < 60
348
-
349
- minutes = seconds / 60
350
- return "#{minutes.round}m" if minutes < 60
351
-
352
- hours = minutes / 60
353
- return "#{hours.round}h" if hours < 24
354
-
355
- days = hours / 24
356
- return "#{days.round}d" if days < 7
357
-
358
- weeks = days / 7
359
- return "#{weeks.round}w" if weeks < 4
360
-
361
- months = days / 30
362
- return "#{months.round}mo" if months < 12
363
-
364
- years = days / 365
365
- "#{years.round}y"
366
- end
367
-
368
320
  def calculate_manifest_age(last_updated_str)
369
321
  last_updated = Time.parse(last_updated_str)
370
322
  Time.now - last_updated
@@ -468,7 +420,7 @@ module Appydave
468
420
 
469
421
  return unless latest_sync
470
422
 
471
- age = format_age(latest_sync)
423
+ age = FileHelper.format_age(latest_sync)
472
424
  puts " Last S3 sync: #{age} ago"
473
425
  end
474
426
  end