appydave-tools 0.70.0 → 0.71.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.claude/commands/brainstorming-agent.md +227 -0
- data/.claude/commands/cli-test.md +251 -0
- data/.claude/commands/dev.md +234 -0
- data/.claude/commands/po.md +227 -0
- data/.claude/commands/progress.md +51 -0
- data/.claude/commands/uat.md +321 -0
- data/.rubocop.yml +9 -0
- data/AGENTS.md +43 -0
- data/CHANGELOG.md +12 -0
- data/CLAUDE.md +26 -3
- data/README.md +15 -0
- data/bin/dam +21 -1
- data/bin/jump.rb +29 -0
- data/bin/subtitle_processor.rb +54 -1
- data/bin/zsh_history.rb +846 -0
- data/docs/README.md +162 -69
- data/docs/architecture/cli/exe-bin-convention.md +434 -0
- data/docs/architecture/cli-patterns.md +631 -0
- data/docs/architecture/gpt-context/gpt-context-architecture.md +325 -0
- data/docs/architecture/gpt-context/gpt-context-implementation-guide.md +419 -0
- data/docs/architecture/gpt-context/gpt-context-vision.md +179 -0
- data/docs/architecture/testing/testing-patterns.md +762 -0
- data/docs/backlog.md +120 -0
- data/docs/cli-tests/FR-3-jump-location-tool.md +515 -0
- data/docs/specs/fr-002-gpt-context-help-system.md +265 -0
- data/docs/specs/fr-003-jump-location-tool.md +779 -0
- data/docs/specs/zsh-history-tool.md +820 -0
- data/docs/uat/FR-3-jump-location-tool.md +741 -0
- data/exe/jump +11 -0
- data/exe/{subtitle_manager → subtitle_processor} +1 -1
- data/exe/zsh_history +11 -0
- data/lib/appydave/tools/configuration/openai.rb +1 -1
- data/lib/appydave/tools/dam/file_helper.rb +28 -0
- data/lib/appydave/tools/dam/project_listing.rb +4 -30
- data/lib/appydave/tools/dam/s3_operations.rb +2 -1
- data/lib/appydave/tools/dam/ssd_status.rb +226 -0
- data/lib/appydave/tools/dam/status.rb +3 -51
- data/lib/appydave/tools/jump/cli.rb +561 -0
- data/lib/appydave/tools/jump/commands/add.rb +52 -0
- data/lib/appydave/tools/jump/commands/base.rb +43 -0
- data/lib/appydave/tools/jump/commands/generate.rb +153 -0
- data/lib/appydave/tools/jump/commands/remove.rb +58 -0
- data/lib/appydave/tools/jump/commands/report.rb +214 -0
- data/lib/appydave/tools/jump/commands/update.rb +42 -0
- data/lib/appydave/tools/jump/commands/validate.rb +54 -0
- data/lib/appydave/tools/jump/config.rb +233 -0
- data/lib/appydave/tools/jump/formatters/base.rb +48 -0
- data/lib/appydave/tools/jump/formatters/json_formatter.rb +19 -0
- data/lib/appydave/tools/jump/formatters/paths_formatter.rb +21 -0
- data/lib/appydave/tools/jump/formatters/table_formatter.rb +183 -0
- data/lib/appydave/tools/jump/location.rb +134 -0
- data/lib/appydave/tools/jump/path_validator.rb +47 -0
- data/lib/appydave/tools/jump/search.rb +230 -0
- data/lib/appydave/tools/subtitle_processor/transcript.rb +51 -0
- data/lib/appydave/tools/version.rb +1 -1
- data/lib/appydave/tools/zsh_history/command.rb +37 -0
- data/lib/appydave/tools/zsh_history/config.rb +235 -0
- data/lib/appydave/tools/zsh_history/filter.rb +184 -0
- data/lib/appydave/tools/zsh_history/formatter.rb +75 -0
- data/lib/appydave/tools/zsh_history/parser.rb +101 -0
- data/lib/appydave/tools.rb +25 -0
- data/package.json +1 -1
- metadata +51 -4
data/exe/jump
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
#!/usr/bin/env ruby
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
$LOAD_PATH.unshift(File.expand_path('../lib', __dir__))
|
|
5
|
+
|
|
6
|
+
require 'appydave/tools'
|
|
7
|
+
|
|
8
|
+
# Set $PROGRAM_NAME to the bin/jump.rb file so the guard passes
|
|
9
|
+
$PROGRAM_NAME = File.expand_path('../bin/jump.rb', __dir__)
|
|
10
|
+
|
|
11
|
+
load $PROGRAM_NAME
|
data/exe/zsh_history
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
#!/usr/bin/env ruby
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
$LOAD_PATH.unshift(File.expand_path('../lib', __dir__))
|
|
5
|
+
|
|
6
|
+
require 'appydave/tools'
|
|
7
|
+
|
|
8
|
+
# Set $PROGRAM_NAME to the bin file so the guard passes
|
|
9
|
+
$PROGRAM_NAME = File.expand_path('../bin/zsh_history.rb', __dir__)
|
|
10
|
+
|
|
11
|
+
load $PROGRAM_NAME
|
|
@@ -8,7 +8,7 @@ OpenAI.configure do |config|
|
|
|
8
8
|
tools_enabled = ENV.fetch('TOOLS_ENABLED', 'false')
|
|
9
9
|
|
|
10
10
|
if tools_enabled == 'true'
|
|
11
|
-
puts 'Tools are enabled, OpenAI will allow net connections'
|
|
11
|
+
# puts 'Tools are enabled, OpenAI will allow net connections'
|
|
12
12
|
config.access_token = ENV.fetch('OPENAI_ACCESS_TOKEN')
|
|
13
13
|
config.organization_id = ENV.fetch('OPENAI_ORGANIZATION_ID', nil)
|
|
14
14
|
end
|
|
@@ -37,6 +37,34 @@ module Appydave
|
|
|
37
37
|
|
|
38
38
|
format('%<size>.1f %<unit>s', size: bytes.to_f / (1024**exp), unit: units[exp])
|
|
39
39
|
end
|
|
40
|
+
|
|
41
|
+
# Format time as relative age (e.g., "3d", "2w", "1mo")
|
|
42
|
+
# @param time [Time, nil] Time to format
|
|
43
|
+
# @return [String] Relative age string
|
|
44
|
+
def format_age(time)
|
|
45
|
+
return 'N/A' if time.nil?
|
|
46
|
+
|
|
47
|
+
seconds = Time.now - time
|
|
48
|
+
return 'just now' if seconds < 60
|
|
49
|
+
|
|
50
|
+
minutes = seconds / 60
|
|
51
|
+
return "#{minutes.round}m" if minutes < 60
|
|
52
|
+
|
|
53
|
+
hours = minutes / 60
|
|
54
|
+
return "#{hours.round}h" if hours < 24
|
|
55
|
+
|
|
56
|
+
days = hours / 24
|
|
57
|
+
return "#{days.round}d" if days < 7
|
|
58
|
+
|
|
59
|
+
weeks = days / 7
|
|
60
|
+
return "#{weeks.round}w" if weeks < 4
|
|
61
|
+
|
|
62
|
+
months = days / 30
|
|
63
|
+
return "#{months.round}mo" if months < 12
|
|
64
|
+
|
|
65
|
+
years = days / 365
|
|
66
|
+
"#{years.round}y"
|
|
67
|
+
end
|
|
40
68
|
end
|
|
41
69
|
end
|
|
42
70
|
end
|
|
@@ -217,8 +217,8 @@ module Appydave
|
|
|
217
217
|
age_display = data[:stale] ? "#{data[:age]} ⚠️" : data[:age]
|
|
218
218
|
|
|
219
219
|
if s3
|
|
220
|
-
s3_upload = data[:s3_last_upload] ? format_age(data[:s3_last_upload]) : 'N/A'
|
|
221
|
-
s3_download = data[:s3_last_download] ? format_age(data[:s3_last_download]) : 'N/A'
|
|
220
|
+
s3_upload = data[:s3_last_upload] ? FileHelper.format_age(data[:s3_last_upload]) : 'N/A'
|
|
221
|
+
s3_download = data[:s3_last_download] ? FileHelper.format_age(data[:s3_last_download]) : 'N/A'
|
|
222
222
|
|
|
223
223
|
puts format(
|
|
224
224
|
'%-45s %12s %15s %-15s %-12s %-65s %-18s %-18s %-30s %-15s %-15s',
|
|
@@ -338,7 +338,7 @@ module Appydave
|
|
|
338
338
|
path: project_path,
|
|
339
339
|
size: size,
|
|
340
340
|
modified: modified,
|
|
341
|
-
age: format_age(modified),
|
|
341
|
+
age: FileHelper.format_age(modified),
|
|
342
342
|
stale: stale?(modified)
|
|
343
343
|
}
|
|
344
344
|
end
|
|
@@ -570,7 +570,7 @@ module Appydave
|
|
|
570
570
|
path: project_path,
|
|
571
571
|
size: size,
|
|
572
572
|
modified: modified,
|
|
573
|
-
age: format_age(modified),
|
|
573
|
+
age: FileHelper.format_age(modified),
|
|
574
574
|
stale: stale?(modified),
|
|
575
575
|
git_status: git_status,
|
|
576
576
|
s3_sync: s3_sync
|
|
@@ -652,32 +652,6 @@ module Appydave
|
|
|
652
652
|
time.strftime('%Y-%m-%d %H:%M')
|
|
653
653
|
end
|
|
654
654
|
|
|
655
|
-
# Format age as relative time (e.g., "3 days", "2 weeks")
|
|
656
|
-
def self.format_age(time)
|
|
657
|
-
return 'N/A' if time.nil?
|
|
658
|
-
|
|
659
|
-
seconds = Time.now - time
|
|
660
|
-
return 'just now' if seconds < 60
|
|
661
|
-
|
|
662
|
-
minutes = seconds / 60
|
|
663
|
-
return "#{minutes.round}m" if minutes < 60
|
|
664
|
-
|
|
665
|
-
hours = minutes / 60
|
|
666
|
-
return "#{hours.round}h" if hours < 24
|
|
667
|
-
|
|
668
|
-
days = hours / 24
|
|
669
|
-
return "#{days.round}d" if days < 7
|
|
670
|
-
|
|
671
|
-
weeks = days / 7
|
|
672
|
-
return "#{weeks.round}w" if weeks < 4
|
|
673
|
-
|
|
674
|
-
months = days / 30
|
|
675
|
-
return "#{months.round}mo" if months < 12
|
|
676
|
-
|
|
677
|
-
years = days / 365
|
|
678
|
-
"#{years.round}y"
|
|
679
|
-
end
|
|
680
|
-
|
|
681
655
|
# Check if project is stale (>90 days old)
|
|
682
656
|
def self.stale?(time)
|
|
683
657
|
return false if time.nil?
|
|
@@ -210,7 +210,8 @@ module Appydave
|
|
|
210
210
|
return
|
|
211
211
|
end
|
|
212
212
|
|
|
213
|
-
|
|
213
|
+
total_size = s3_files.sum { |f| f['Size'] || 0 }
|
|
214
|
+
puts "📦 Downloading #{s3_files.size} file(s) (#{file_size_human(total_size)}) from S3 to #{project_id}/s3-staging/..."
|
|
214
215
|
puts ''
|
|
215
216
|
|
|
216
217
|
downloaded = 0
|
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Appydave
|
|
4
|
+
module Tools
|
|
5
|
+
module Dam
|
|
6
|
+
# Show SSD mount status for all brands
|
|
7
|
+
class SsdStatus
|
|
8
|
+
attr_reader :brands_config
|
|
9
|
+
|
|
10
|
+
def initialize(brands_config: nil)
|
|
11
|
+
if brands_config
|
|
12
|
+
@brands_config = brands_config
|
|
13
|
+
else
|
|
14
|
+
Appydave::Tools::Configuration::Config.configure
|
|
15
|
+
@brands_config = Appydave::Tools::Configuration::Config.brands
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
# Show SSD status for all brands
|
|
20
|
+
def show_all
|
|
21
|
+
results = collect_brand_statuses
|
|
22
|
+
|
|
23
|
+
# Identify unique volumes
|
|
24
|
+
volumes = results.select { |r| r[:configured] }
|
|
25
|
+
.map { |r| extract_volume_name(r[:ssd_path]) }
|
|
26
|
+
.compact
|
|
27
|
+
.uniq
|
|
28
|
+
|
|
29
|
+
if volumes.empty?
|
|
30
|
+
puts '⚠️ No SSD volumes configured'
|
|
31
|
+
return
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
# Show simple mount status for each volume
|
|
35
|
+
volumes.each do |volume|
|
|
36
|
+
volume_path = "/Volumes/#{volume}"
|
|
37
|
+
if Dir.exist?(volume_path)
|
|
38
|
+
puts "✅ #{volume} is MOUNTED"
|
|
39
|
+
else
|
|
40
|
+
puts "❌ #{volume} is NOT MOUNTED"
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
puts ''
|
|
45
|
+
puts '| Brand | Path | Status |'
|
|
46
|
+
puts '|----------------|----------------------------------------|--------------|'
|
|
47
|
+
results.each do |result|
|
|
48
|
+
display_brand_row(result)
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
# Show SSD status for a specific brand
|
|
53
|
+
def show(brand_key)
|
|
54
|
+
brand_info = @brands_config.get_brand(brand_key)
|
|
55
|
+
ssd_path = brand_info.locations.ssd_backup
|
|
56
|
+
|
|
57
|
+
puts "💾 SSD Status: #{brand_info.name} (#{brand_info.key})"
|
|
58
|
+
puts ''
|
|
59
|
+
|
|
60
|
+
if ssd_path.nil? || ssd_path.empty? || ssd_path == 'NOT-SET'
|
|
61
|
+
puts '⚠️ SSD backup not configured for this brand'
|
|
62
|
+
puts ''
|
|
63
|
+
puts 'To configure, add ssd_backup to brands.json:'
|
|
64
|
+
puts ''
|
|
65
|
+
puts ' "locations": {'
|
|
66
|
+
puts ' "video_projects": "/path/to/projects",'
|
|
67
|
+
puts ' "ssd_backup": "/Volumes/T7/youtube-PUBLISHED/appydave"'
|
|
68
|
+
puts ' }'
|
|
69
|
+
return
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
puts "Path: #{ssd_path}"
|
|
73
|
+
puts ''
|
|
74
|
+
|
|
75
|
+
if Dir.exist?(ssd_path)
|
|
76
|
+
display_mounted_details(brand_info, ssd_path)
|
|
77
|
+
else
|
|
78
|
+
display_unmounted_details(ssd_path)
|
|
79
|
+
end
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
private
|
|
83
|
+
|
|
84
|
+
def collect_brand_statuses
|
|
85
|
+
@brands_config.brands.map do |brand_info|
|
|
86
|
+
ssd_path = brand_info.locations.ssd_backup
|
|
87
|
+
configured = ssd_path && !ssd_path.empty? && ssd_path != 'NOT-SET'
|
|
88
|
+
mounted = configured && Dir.exist?(ssd_path)
|
|
89
|
+
|
|
90
|
+
# Check if SSD volume is mounted but folder doesn't exist
|
|
91
|
+
volume_mounted = false
|
|
92
|
+
if configured && !mounted
|
|
93
|
+
volume_name = extract_volume_name(ssd_path)
|
|
94
|
+
volume_mounted = volume_name && Dir.exist?("/Volumes/#{volume_name}")
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
{
|
|
98
|
+
brand: brand_info,
|
|
99
|
+
ssd_path: ssd_path,
|
|
100
|
+
configured: configured,
|
|
101
|
+
mounted: mounted,
|
|
102
|
+
volume_mounted: volume_mounted
|
|
103
|
+
}
|
|
104
|
+
end
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
def display_brand_row(result)
|
|
108
|
+
brand_col = result[:brand].key.ljust(14)
|
|
109
|
+
|
|
110
|
+
if result[:configured]
|
|
111
|
+
path_col = truncate_path(result[:ssd_path], 38).ljust(38)
|
|
112
|
+
status_col = if result[:mounted]
|
|
113
|
+
'✅ Ready'
|
|
114
|
+
elsif result[:volume_mounted]
|
|
115
|
+
'⚠️ No folder'
|
|
116
|
+
else
|
|
117
|
+
'❌ Not mounted'
|
|
118
|
+
end
|
|
119
|
+
else
|
|
120
|
+
path_col = '(not configured)'.ljust(38)
|
|
121
|
+
status_col = '⚠️ N/A'
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
puts "| #{brand_col} | #{path_col} | #{status_col.ljust(12)} |"
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
def display_mounted_details(_brand_info, ssd_path)
|
|
128
|
+
puts '✅ SSD is mounted'
|
|
129
|
+
puts ''
|
|
130
|
+
|
|
131
|
+
# Count projects on SSD
|
|
132
|
+
project_dirs = Dir.glob(File.join(ssd_path, '*')).select { |f| File.directory?(f) }
|
|
133
|
+
project_count = project_dirs.size
|
|
134
|
+
|
|
135
|
+
# Calculate total size (quick estimate from directory count)
|
|
136
|
+
puts "Projects on SSD: #{project_count}"
|
|
137
|
+
|
|
138
|
+
# Show disk space info if available
|
|
139
|
+
show_disk_space(ssd_path)
|
|
140
|
+
|
|
141
|
+
# Show recent projects
|
|
142
|
+
return unless project_count.positive?
|
|
143
|
+
|
|
144
|
+
puts ''
|
|
145
|
+
puts 'Recent projects (last 5 modified):'
|
|
146
|
+
recent = project_dirs.sort_by { |d| File.mtime(d) }.reverse.first(5)
|
|
147
|
+
recent.each do |dir|
|
|
148
|
+
name = File.basename(dir)
|
|
149
|
+
age = FileHelper.format_age(File.mtime(dir))
|
|
150
|
+
puts " #{name} (#{age} ago)"
|
|
151
|
+
end
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
def display_unmounted_details(ssd_path)
|
|
155
|
+
# Try to identify the volume
|
|
156
|
+
volume_name = extract_volume_name(ssd_path)
|
|
157
|
+
volume_path = "/Volumes/#{volume_name}" if volume_name
|
|
158
|
+
|
|
159
|
+
if volume_name && Dir.exist?(volume_path)
|
|
160
|
+
# SSD is mounted, but specific folder doesn't exist
|
|
161
|
+
puts '⚠️ SSD is mounted but backup folder does NOT exist'
|
|
162
|
+
puts ''
|
|
163
|
+
puts "Volume '#{volume_name}' is connected, but the expected backup folder is missing."
|
|
164
|
+
puts ''
|
|
165
|
+
puts 'To create the backup folder:'
|
|
166
|
+
puts " mkdir -p #{ssd_path}"
|
|
167
|
+
puts ''
|
|
168
|
+
puts 'Or update brands.json with the correct path.'
|
|
169
|
+
else
|
|
170
|
+
# SSD is not mounted at all
|
|
171
|
+
puts '❌ SSD is NOT mounted'
|
|
172
|
+
puts ''
|
|
173
|
+
puts 'Expected path does not exist.'
|
|
174
|
+
puts ''
|
|
175
|
+
|
|
176
|
+
if volume_name
|
|
177
|
+
puts "Volume expected: #{volume_name}"
|
|
178
|
+
puts ''
|
|
179
|
+
puts 'To mount:'
|
|
180
|
+
puts " 1. Connect the '#{volume_name}' drive"
|
|
181
|
+
puts ' 2. Verify it appears in /Volumes/'
|
|
182
|
+
puts " 3. Run: ls #{ssd_path}"
|
|
183
|
+
end
|
|
184
|
+
end
|
|
185
|
+
end
|
|
186
|
+
|
|
187
|
+
def show_disk_space(ssd_path)
|
|
188
|
+
# Use df to get disk space info
|
|
189
|
+
output = `df -h "#{ssd_path}" 2>/dev/null`
|
|
190
|
+
return if output.empty?
|
|
191
|
+
|
|
192
|
+
lines = output.lines
|
|
193
|
+
return unless lines.size >= 2
|
|
194
|
+
|
|
195
|
+
# Parse df output (header + data line)
|
|
196
|
+
parts = lines[1].split
|
|
197
|
+
return unless parts.size >= 4
|
|
198
|
+
|
|
199
|
+
size = parts[1]
|
|
200
|
+
used = parts[2]
|
|
201
|
+
avail = parts[3]
|
|
202
|
+
capacity = parts[4] if parts.size >= 5
|
|
203
|
+
|
|
204
|
+
puts ''
|
|
205
|
+
puts 'Disk Space:'
|
|
206
|
+
puts " Total: #{size}"
|
|
207
|
+
puts " Used: #{used} (#{capacity})" if capacity
|
|
208
|
+
puts " Available: #{avail}"
|
|
209
|
+
end
|
|
210
|
+
|
|
211
|
+
def extract_volume_name(path)
|
|
212
|
+
# Extract volume name from /Volumes/VolumeName/...
|
|
213
|
+
match = path.match(%r{^/Volumes/([^/]+)})
|
|
214
|
+
match[1] if match
|
|
215
|
+
end
|
|
216
|
+
|
|
217
|
+
def truncate_path(path, max_length)
|
|
218
|
+
return path if path.nil? || path.length <= max_length
|
|
219
|
+
|
|
220
|
+
# Keep the end of the path (more useful)
|
|
221
|
+
"...#{path[-(max_length - 3)..]}"
|
|
222
|
+
end
|
|
223
|
+
end
|
|
224
|
+
end
|
|
225
|
+
end
|
|
226
|
+
end
|
|
@@ -42,7 +42,7 @@ module Appydave
|
|
|
42
42
|
def show_project_status
|
|
43
43
|
project_size = calculate_project_size
|
|
44
44
|
last_modified = File.mtime(project_path)
|
|
45
|
-
age = format_age(last_modified)
|
|
45
|
+
age = FileHelper.format_age(last_modified)
|
|
46
46
|
|
|
47
47
|
puts "📊 Status: v-#{brand}/#{File.basename(project_path)} (#{format_size(project_size)})"
|
|
48
48
|
puts " Last modified: #{age} ago"
|
|
@@ -155,7 +155,7 @@ module Appydave
|
|
|
155
155
|
|
|
156
156
|
if Dir.exist?(ssd_full_path)
|
|
157
157
|
last_modified = File.mtime(ssd_full_path)
|
|
158
|
-
age = format_age(last_modified)
|
|
158
|
+
age = FileHelper.format_age(last_modified)
|
|
159
159
|
puts " Last synced: #{age} ago"
|
|
160
160
|
end
|
|
161
161
|
end
|
|
@@ -163,29 +163,6 @@ module Appydave
|
|
|
163
163
|
puts ''
|
|
164
164
|
end
|
|
165
165
|
|
|
166
|
-
def show_git_status
|
|
167
|
-
puts 'Git:'
|
|
168
|
-
|
|
169
|
-
status = git_status_info
|
|
170
|
-
|
|
171
|
-
puts " 🌿 Branch: #{status[:branch]}"
|
|
172
|
-
puts " 📡 Remote: #{status[:remote]}" if status[:remote]
|
|
173
|
-
|
|
174
|
-
if status[:modified_count].positive? || status[:untracked_count].positive?
|
|
175
|
-
puts " ↕️ Status: #{status[:modified_count]} modified, #{status[:untracked_count]} untracked"
|
|
176
|
-
else
|
|
177
|
-
puts ' ↕️ Status: Clean working directory'
|
|
178
|
-
end
|
|
179
|
-
|
|
180
|
-
if status[:ahead].positive? || status[:behind].positive?
|
|
181
|
-
puts " 🔄 Sync: #{sync_status_text(status[:ahead], status[:behind])}"
|
|
182
|
-
else
|
|
183
|
-
puts ' 🔄 Sync: Up to date'
|
|
184
|
-
end
|
|
185
|
-
|
|
186
|
-
puts ''
|
|
187
|
-
end
|
|
188
|
-
|
|
189
166
|
def show_brand_git_status
|
|
190
167
|
status = git_status_info
|
|
191
168
|
|
|
@@ -340,31 +317,6 @@ module Appydave
|
|
|
340
317
|
FileHelper.format_size(bytes)
|
|
341
318
|
end
|
|
342
319
|
|
|
343
|
-
def format_age(time)
|
|
344
|
-
return 'N/A' if time.nil?
|
|
345
|
-
|
|
346
|
-
seconds = Time.now - time
|
|
347
|
-
return 'just now' if seconds < 60
|
|
348
|
-
|
|
349
|
-
minutes = seconds / 60
|
|
350
|
-
return "#{minutes.round}m" if minutes < 60
|
|
351
|
-
|
|
352
|
-
hours = minutes / 60
|
|
353
|
-
return "#{hours.round}h" if hours < 24
|
|
354
|
-
|
|
355
|
-
days = hours / 24
|
|
356
|
-
return "#{days.round}d" if days < 7
|
|
357
|
-
|
|
358
|
-
weeks = days / 7
|
|
359
|
-
return "#{weeks.round}w" if weeks < 4
|
|
360
|
-
|
|
361
|
-
months = days / 30
|
|
362
|
-
return "#{months.round}mo" if months < 12
|
|
363
|
-
|
|
364
|
-
years = days / 365
|
|
365
|
-
"#{years.round}y"
|
|
366
|
-
end
|
|
367
|
-
|
|
368
320
|
def calculate_manifest_age(last_updated_str)
|
|
369
321
|
last_updated = Time.parse(last_updated_str)
|
|
370
322
|
Time.now - last_updated
|
|
@@ -468,7 +420,7 @@ module Appydave
|
|
|
468
420
|
|
|
469
421
|
return unless latest_sync
|
|
470
422
|
|
|
471
|
-
age = format_age(latest_sync)
|
|
423
|
+
age = FileHelper.format_age(latest_sync)
|
|
472
424
|
puts " Last S3 sync: #{age} ago"
|
|
473
425
|
end
|
|
474
426
|
end
|