appydave-tools 0.77.4 → 0.77.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +15 -0
- data/lib/appydave/tools/dam/s3_archiver.rb +258 -0
- data/lib/appydave/tools/dam/s3_operations.rb +8 -424
- data/lib/appydave/tools/dam/s3_status_checker.rb +178 -0
- data/lib/appydave/tools/version.rb +1 -1
- data/lib/appydave/tools.rb +2 -0
- data/package.json +1 -1
- metadata +3 -1
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 1ca0547c8abec9adcf7081ade94ffe5753c477e21288020bd0b43dd7609747bb
|
|
4
|
+
data.tar.gz: aa03c3205607e0394eaea6b75113a417b0edcf5fcafdba797de9f1372d34e4e9
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 4ca27e6b5d96647c8c8eeed68c1d3ec502ac436052e8b5b12975b5beca581297d405895dc4c1585d2ca38db29f5f6f27b16d86ed76c38959668d3497ae4f7f7c
|
|
7
|
+
data.tar.gz: 837b4aab157a1e14845e7d572513d94ae8e4aef37784bc84f88a05894c329ce5c088aa5ade430f0dfb2a54984fe0efa9b1e84af5a8f170392bf980edcfd14c39
|
data/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,18 @@
|
|
|
1
|
+
## [0.77.5](https://github.com/appydave/appydave-tools/compare/v0.77.4...v0.77.5) (2026-03-20)
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
### Bug Fixes
|
|
5
|
+
|
|
6
|
+
* extract S3StatusChecker from S3Operations; status/calculate_sync_status/sync_timestamps delegate to focused class ([6766897](https://github.com/appydave/appydave-tools/commit/6766897421e37c5b3695db7a3e11f3696a1b2f09))
|
|
7
|
+
* remove redundant rubocop disable directives from S3StatusChecker (CI rubocop 1.85.1) ([9f15b34](https://github.com/appydave/appydave-tools/commit/9f15b34c7ab3e139d208d1a5692fc15a42bcfe1a))
|
|
8
|
+
|
|
9
|
+
## [0.77.4](https://github.com/appydave/appydave-tools/compare/v0.77.3...v0.77.4) (2026-03-20)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
### Bug Fixes
|
|
13
|
+
|
|
14
|
+
* extract S3Downloader from S3Operations; download delegates to focused class ([4d97cdf](https://github.com/appydave/appydave-tools/commit/4d97cdf18ea56ca9b658c8858efd9041dcb7ca5b))
|
|
15
|
+
|
|
1
16
|
## [0.77.3](https://github.com/appydave/appydave-tools/compare/v0.77.2...v0.77.3) (2026-03-20)
|
|
2
17
|
|
|
3
18
|
|
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Appydave
|
|
4
|
+
module Tools
|
|
5
|
+
module Dam
|
|
6
|
+
# Handles S3 cleanup and SSD archive operations.
|
|
7
|
+
# Inherits shared infrastructure and helpers from S3Base.
|
|
8
|
+
class S3Archiver < S3Base
|
|
9
|
+
# Cleanup S3 files
|
|
10
|
+
def cleanup(force: false, dry_run: false)
|
|
11
|
+
s3_files = list_s3_files
|
|
12
|
+
|
|
13
|
+
if s3_files.empty?
|
|
14
|
+
puts "❌ No files found in S3 for #{brand}/#{project_id}"
|
|
15
|
+
return
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
puts "🗑️ Found #{s3_files.size} file(s) in S3 for #{brand}/#{project_id}"
|
|
19
|
+
puts ''
|
|
20
|
+
|
|
21
|
+
unless force
|
|
22
|
+
puts '⚠️ This will DELETE all files from S3 for this project.'
|
|
23
|
+
puts 'Use --force to confirm deletion.'
|
|
24
|
+
return
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
deleted = 0
|
|
28
|
+
failed = 0
|
|
29
|
+
|
|
30
|
+
s3_files.each do |s3_file|
|
|
31
|
+
key = s3_file['Key']
|
|
32
|
+
relative_path = extract_relative_path(key)
|
|
33
|
+
|
|
34
|
+
if delete_s3_file(key, dry_run: dry_run)
|
|
35
|
+
puts " ✓ Deleted: #{relative_path}"
|
|
36
|
+
deleted += 1
|
|
37
|
+
else
|
|
38
|
+
puts " ✗ Failed: #{relative_path}"
|
|
39
|
+
failed += 1
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
puts ''
|
|
44
|
+
puts '✅ Cleanup complete!'
|
|
45
|
+
puts " Deleted: #{deleted}, Failed: #{failed}"
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
# Cleanup local s3-staging files
|
|
49
|
+
def cleanup_local(force: false, dry_run: false)
|
|
50
|
+
project_dir = project_directory_path
|
|
51
|
+
staging_dir = File.join(project_dir, 's3-staging')
|
|
52
|
+
|
|
53
|
+
unless Dir.exist?(staging_dir)
|
|
54
|
+
puts "❌ No s3-staging directory found: #{staging_dir}"
|
|
55
|
+
return
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
files = Dir.glob("#{staging_dir}/**/*").select { |f| File.file?(f) }
|
|
59
|
+
|
|
60
|
+
if files.empty?
|
|
61
|
+
puts '❌ No files found in s3-staging/'
|
|
62
|
+
return
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
puts "🗑️ Found #{files.size} file(s) in local s3-staging/"
|
|
66
|
+
puts ''
|
|
67
|
+
|
|
68
|
+
unless force
|
|
69
|
+
puts '⚠️ This will DELETE all files from s3-staging/ for this project.'
|
|
70
|
+
puts 'Use --force to confirm deletion.'
|
|
71
|
+
return
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
deleted = 0
|
|
75
|
+
failed = 0
|
|
76
|
+
|
|
77
|
+
files.each do |file|
|
|
78
|
+
relative_path = file.sub("#{staging_dir}/", '')
|
|
79
|
+
|
|
80
|
+
if delete_local_file(file, dry_run: dry_run)
|
|
81
|
+
puts " ✓ Deleted: #{relative_path}"
|
|
82
|
+
deleted += 1
|
|
83
|
+
else
|
|
84
|
+
puts " ✗ Failed: #{relative_path}"
|
|
85
|
+
failed += 1
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
Dir.glob("#{staging_dir}/**/*").select { |d| File.directory?(d) }.sort.reverse.each do |dir|
|
|
90
|
+
Dir.rmdir(dir) if Dir.empty?(dir)
|
|
91
|
+
rescue StandardError
|
|
92
|
+
nil
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
puts ''
|
|
96
|
+
puts '✅ Local cleanup complete!'
|
|
97
|
+
puts " Deleted: #{deleted}, Failed: #{failed}"
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
# Archive project to SSD
|
|
101
|
+
def archive(force: false, dry_run: false)
|
|
102
|
+
ssd_backup = brand_info.locations.ssd_backup
|
|
103
|
+
|
|
104
|
+
unless ssd_backup && !ssd_backup.empty?
|
|
105
|
+
puts "❌ SSD backup location not configured for brand '#{brand}'"
|
|
106
|
+
return
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
unless Dir.exist?(ssd_backup)
|
|
110
|
+
puts "❌ SSD not mounted at #{ssd_backup}"
|
|
111
|
+
puts ' Please connect the SSD before archiving.'
|
|
112
|
+
return
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
project_dir = project_directory_path
|
|
116
|
+
|
|
117
|
+
unless Dir.exist?(project_dir)
|
|
118
|
+
puts "❌ Project not found: #{project_dir}"
|
|
119
|
+
puts ''
|
|
120
|
+
puts " Try: dam list #{brand} # See available projects"
|
|
121
|
+
return
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
ssd_project_dir = File.join(ssd_backup, project_id)
|
|
125
|
+
|
|
126
|
+
puts "📦 Archive: #{brand}/#{project_id}"
|
|
127
|
+
puts ''
|
|
128
|
+
|
|
129
|
+
if copy_to_ssd(project_dir, ssd_project_dir, dry_run: dry_run)
|
|
130
|
+
if force
|
|
131
|
+
delete_local_project(project_dir, dry_run: dry_run)
|
|
132
|
+
else
|
|
133
|
+
puts ''
|
|
134
|
+
puts '⚠️ Project copied to SSD but NOT deleted locally.'
|
|
135
|
+
puts ' Use --force to delete local copy after archiving.'
|
|
136
|
+
end
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
puts ''
|
|
140
|
+
puts dry_run ? '✅ Archive dry-run complete!' : '✅ Archive complete!'
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
private
|
|
144
|
+
|
|
145
|
+
def delete_s3_file(s3_key, dry_run: false)
|
|
146
|
+
if dry_run
|
|
147
|
+
puts " [DRY-RUN] Would delete: s3://#{brand_info.aws.s3_bucket}/#{s3_key}"
|
|
148
|
+
return true
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
s3_client.delete_object(
|
|
152
|
+
bucket: brand_info.aws.s3_bucket,
|
|
153
|
+
key: s3_key
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
true
|
|
157
|
+
rescue Aws::S3::Errors::ServiceError => e
|
|
158
|
+
puts " Error: #{e.message}"
|
|
159
|
+
false
|
|
160
|
+
end
|
|
161
|
+
|
|
162
|
+
def delete_local_file(file_path, dry_run: false)
|
|
163
|
+
if dry_run
|
|
164
|
+
puts " [DRY-RUN] Would delete: #{file_path}"
|
|
165
|
+
return true
|
|
166
|
+
end
|
|
167
|
+
|
|
168
|
+
File.delete(file_path)
|
|
169
|
+
true
|
|
170
|
+
rescue StandardError => e
|
|
171
|
+
puts " Error: #{e.message}"
|
|
172
|
+
false
|
|
173
|
+
end
|
|
174
|
+
|
|
175
|
+
def copy_to_ssd(source_dir, dest_dir, dry_run: false)
|
|
176
|
+
if Dir.exist?(dest_dir)
|
|
177
|
+
puts '⚠️ Already exists on SSD'
|
|
178
|
+
puts " Path: #{dest_dir}"
|
|
179
|
+
puts ' Skipping copy step'
|
|
180
|
+
return true
|
|
181
|
+
end
|
|
182
|
+
|
|
183
|
+
size = calculate_directory_size(source_dir)
|
|
184
|
+
puts '📋 Copy to SSD (excluding generated files):'
|
|
185
|
+
puts " From: #{source_dir}"
|
|
186
|
+
puts " To: #{dest_dir}"
|
|
187
|
+
puts " Size: #{file_size_human(size)}"
|
|
188
|
+
puts ''
|
|
189
|
+
|
|
190
|
+
if dry_run
|
|
191
|
+
puts ' [DRY-RUN] Would copy project to SSD (excluding node_modules, .git, etc.)'
|
|
192
|
+
return true
|
|
193
|
+
end
|
|
194
|
+
|
|
195
|
+
FileUtils.mkdir_p(dest_dir)
|
|
196
|
+
stats = copy_with_exclusions(source_dir, dest_dir)
|
|
197
|
+
puts " ✅ Copied to SSD (#{stats[:files]} files, excluded #{stats[:excluded]} generated files)"
|
|
198
|
+
|
|
199
|
+
true
|
|
200
|
+
rescue StandardError => e
|
|
201
|
+
puts " ✗ Failed to copy: #{e.message}"
|
|
202
|
+
false
|
|
203
|
+
end
|
|
204
|
+
|
|
205
|
+
def copy_with_exclusions(source_dir, dest_dir)
|
|
206
|
+
stats = { files: 0, excluded: 0 }
|
|
207
|
+
|
|
208
|
+
Dir.glob(File.join(source_dir, '**', '*'), File::FNM_DOTMATCH).each do |source_path|
|
|
209
|
+
next if File.directory?(source_path)
|
|
210
|
+
next if ['.', '..'].include?(File.basename(source_path))
|
|
211
|
+
|
|
212
|
+
relative_path = source_path.sub("#{source_dir}/", '')
|
|
213
|
+
|
|
214
|
+
if excluded_path?(relative_path)
|
|
215
|
+
stats[:excluded] += 1
|
|
216
|
+
next
|
|
217
|
+
end
|
|
218
|
+
|
|
219
|
+
dest_path = File.join(dest_dir, relative_path)
|
|
220
|
+
FileUtils.mkdir_p(File.dirname(dest_path))
|
|
221
|
+
FileUtils.cp(source_path, dest_path, preserve: true)
|
|
222
|
+
stats[:files] += 1
|
|
223
|
+
end
|
|
224
|
+
|
|
225
|
+
stats
|
|
226
|
+
end
|
|
227
|
+
|
|
228
|
+
def delete_local_project(project_dir, dry_run: false)
|
|
229
|
+
size = calculate_directory_size(project_dir)
|
|
230
|
+
|
|
231
|
+
puts ''
|
|
232
|
+
puts '🗑️ Delete local project:'
|
|
233
|
+
puts " Path: #{project_dir}"
|
|
234
|
+
puts " Size: #{file_size_human(size)}"
|
|
235
|
+
puts ''
|
|
236
|
+
|
|
237
|
+
if dry_run
|
|
238
|
+
puts ' [DRY-RUN] Would delete entire local folder'
|
|
239
|
+
return true
|
|
240
|
+
end
|
|
241
|
+
|
|
242
|
+
FileUtils.rm_rf(project_dir)
|
|
243
|
+
puts ' ✅ Deleted local folder'
|
|
244
|
+
puts " 💾 Freed: #{file_size_human(size)}"
|
|
245
|
+
|
|
246
|
+
true
|
|
247
|
+
rescue StandardError => e
|
|
248
|
+
puts " ✗ Failed to delete: #{e.message}"
|
|
249
|
+
false
|
|
250
|
+
end
|
|
251
|
+
|
|
252
|
+
def calculate_directory_size(dir_path)
|
|
253
|
+
FileHelper.calculate_directory_size(dir_path)
|
|
254
|
+
end
|
|
255
|
+
end
|
|
256
|
+
end
|
|
257
|
+
end
|
|
258
|
+
end
|
|
@@ -3,9 +3,9 @@
|
|
|
3
3
|
module Appydave
|
|
4
4
|
module Tools
|
|
5
5
|
module Dam
|
|
6
|
-
#
|
|
6
|
+
# Thin delegation facade for S3 operations.
|
|
7
|
+
# Each method delegates to a focused class that handles one concern.
|
|
7
8
|
# Inherits shared infrastructure and helpers from S3Base.
|
|
8
|
-
# Will become a thin delegation facade as focused classes are extracted (B020).
|
|
9
9
|
class S3Operations < S3Base
|
|
10
10
|
# Upload files from s3-staging/ to S3
|
|
11
11
|
def upload(dry_run: false)
|
|
@@ -19,330 +19,34 @@ module Appydave
|
|
|
19
19
|
|
|
20
20
|
# Show sync status
|
|
21
21
|
def status
|
|
22
|
-
|
|
23
|
-
staging_dir = File.join(project_dir, 's3-staging')
|
|
24
|
-
|
|
25
|
-
# Check if project directory exists
|
|
26
|
-
unless Dir.exist?(project_dir)
|
|
27
|
-
puts "❌ Project not found: #{brand}/#{project_id}"
|
|
28
|
-
puts ''
|
|
29
|
-
puts ' This project does not exist locally.'
|
|
30
|
-
puts ' Possible causes:'
|
|
31
|
-
puts ' - Project name might be misspelled'
|
|
32
|
-
puts ' - Project may not exist in this brand'
|
|
33
|
-
puts ''
|
|
34
|
-
puts " Try: dam list #{brand} # See all projects for this brand"
|
|
35
|
-
return
|
|
36
|
-
end
|
|
37
|
-
|
|
38
|
-
s3_files = list_s3_files
|
|
39
|
-
local_files = list_local_files(staging_dir)
|
|
40
|
-
|
|
41
|
-
# Build a map of S3 files for quick lookup
|
|
42
|
-
s3_files_map = s3_files.each_with_object({}) do |file, hash|
|
|
43
|
-
relative_path = extract_relative_path(file['Key'])
|
|
44
|
-
hash[relative_path] = file
|
|
45
|
-
end
|
|
46
|
-
|
|
47
|
-
if s3_files.empty? && local_files.empty?
|
|
48
|
-
puts "ℹ️ No files in S3 or s3-staging/ for #{brand}/#{project_id}"
|
|
49
|
-
puts ''
|
|
50
|
-
puts ' This project exists but has no heavy files ready for S3 sync.'
|
|
51
|
-
puts ''
|
|
52
|
-
puts ' Next steps:'
|
|
53
|
-
puts " 1. Add video files to: #{staging_dir}/"
|
|
54
|
-
puts " 2. Upload to S3: dam s3-up #{brand} #{project_id}"
|
|
55
|
-
return
|
|
56
|
-
end
|
|
57
|
-
|
|
58
|
-
puts "📊 S3 Sync Status for #{brand}/#{project_id}"
|
|
59
|
-
|
|
60
|
-
# Show last sync time
|
|
61
|
-
if s3_files.any?
|
|
62
|
-
most_recent = s3_files.map { |f| f['LastModified'] }.compact.max
|
|
63
|
-
if most_recent
|
|
64
|
-
time_ago = format_time_ago(Time.now - most_recent)
|
|
65
|
-
puts " Last synced: #{time_ago} ago (#{most_recent.strftime('%Y-%m-%d %H:%M')})"
|
|
66
|
-
end
|
|
67
|
-
end
|
|
68
|
-
puts ''
|
|
69
|
-
|
|
70
|
-
# Combine all file paths (S3 + local)
|
|
71
|
-
all_paths = (s3_files_map.keys + local_files.keys).uniq.sort
|
|
72
|
-
|
|
73
|
-
total_s3_size = 0
|
|
74
|
-
total_local_size = 0
|
|
75
|
-
|
|
76
|
-
all_paths.each do |relative_path|
|
|
77
|
-
s3_file = s3_files_map[relative_path]
|
|
78
|
-
local_file = File.join(staging_dir, relative_path)
|
|
79
|
-
|
|
80
|
-
if s3_file && File.exist?(local_file)
|
|
81
|
-
# File exists in both S3 and local
|
|
82
|
-
s3_size = s3_file['Size']
|
|
83
|
-
local_size = File.size(local_file)
|
|
84
|
-
total_s3_size += s3_size
|
|
85
|
-
total_local_size += local_size
|
|
86
|
-
|
|
87
|
-
s3_etag = s3_file['ETag'].gsub('"', '')
|
|
88
|
-
match_status = compare_files(local_file: local_file, s3_etag: s3_etag, s3_size: s3_size)
|
|
89
|
-
|
|
90
|
-
if match_status == :synced
|
|
91
|
-
status_label = multipart_etag?(s3_etag) ? 'synced*' : 'synced'
|
|
92
|
-
puts " ✓ #{relative_path} (#{file_size_human(s3_size)}) [#{status_label}]"
|
|
93
|
-
else
|
|
94
|
-
puts " ⚠️ #{relative_path} (#{file_size_human(s3_size)}) [modified]"
|
|
95
|
-
end
|
|
96
|
-
elsif s3_file
|
|
97
|
-
# File only in S3
|
|
98
|
-
s3_size = s3_file['Size']
|
|
99
|
-
total_s3_size += s3_size
|
|
100
|
-
puts " ☁️ #{relative_path} (#{file_size_human(s3_size)}) [S3 only]"
|
|
101
|
-
else
|
|
102
|
-
# File only local
|
|
103
|
-
local_size = File.size(local_file)
|
|
104
|
-
total_local_size += local_size
|
|
105
|
-
puts " 📁 #{relative_path} (#{file_size_human(local_size)}) [local only]"
|
|
106
|
-
end
|
|
107
|
-
end
|
|
108
|
-
|
|
109
|
-
puts ''
|
|
110
|
-
puts "S3 files: #{s3_files.size}, Local files: #{local_files.size}"
|
|
111
|
-
puts "S3 size: #{file_size_human(total_s3_size)}, Local size: #{file_size_human(total_local_size)}"
|
|
22
|
+
S3StatusChecker.new(brand, project_id, **delegated_opts).status
|
|
112
23
|
end
|
|
113
24
|
|
|
114
25
|
# Cleanup S3 files
|
|
115
26
|
def cleanup(force: false, dry_run: false)
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
if s3_files.empty?
|
|
119
|
-
puts "❌ No files found in S3 for #{brand}/#{project_id}"
|
|
120
|
-
return
|
|
121
|
-
end
|
|
122
|
-
|
|
123
|
-
puts "🗑️ Found #{s3_files.size} file(s) in S3 for #{brand}/#{project_id}"
|
|
124
|
-
puts ''
|
|
125
|
-
|
|
126
|
-
unless force
|
|
127
|
-
puts '⚠️ This will DELETE all files from S3 for this project.'
|
|
128
|
-
puts 'Use --force to confirm deletion.'
|
|
129
|
-
return
|
|
130
|
-
end
|
|
131
|
-
|
|
132
|
-
deleted = 0
|
|
133
|
-
failed = 0
|
|
134
|
-
|
|
135
|
-
s3_files.each do |s3_file|
|
|
136
|
-
key = s3_file['Key']
|
|
137
|
-
relative_path = extract_relative_path(key)
|
|
138
|
-
|
|
139
|
-
if delete_s3_file(key, dry_run: dry_run)
|
|
140
|
-
puts " ✓ Deleted: #{relative_path}"
|
|
141
|
-
deleted += 1
|
|
142
|
-
else
|
|
143
|
-
puts " ✗ Failed: #{relative_path}"
|
|
144
|
-
failed += 1
|
|
145
|
-
end
|
|
146
|
-
end
|
|
147
|
-
|
|
148
|
-
puts ''
|
|
149
|
-
puts '✅ Cleanup complete!'
|
|
150
|
-
puts " Deleted: #{deleted}, Failed: #{failed}"
|
|
27
|
+
S3Archiver.new(brand, project_id, **delegated_opts).cleanup(force: force, dry_run: dry_run)
|
|
151
28
|
end
|
|
152
29
|
|
|
153
30
|
# Cleanup local s3-staging files
|
|
154
31
|
def cleanup_local(force: false, dry_run: false)
|
|
155
|
-
|
|
156
|
-
staging_dir = File.join(project_dir, 's3-staging')
|
|
157
|
-
|
|
158
|
-
unless Dir.exist?(staging_dir)
|
|
159
|
-
puts "❌ No s3-staging directory found: #{staging_dir}"
|
|
160
|
-
return
|
|
161
|
-
end
|
|
162
|
-
|
|
163
|
-
files = Dir.glob("#{staging_dir}/**/*").select { |f| File.file?(f) }
|
|
164
|
-
|
|
165
|
-
if files.empty?
|
|
166
|
-
puts '❌ No files found in s3-staging/'
|
|
167
|
-
return
|
|
168
|
-
end
|
|
169
|
-
|
|
170
|
-
puts "🗑️ Found #{files.size} file(s) in local s3-staging/"
|
|
171
|
-
puts ''
|
|
172
|
-
|
|
173
|
-
unless force
|
|
174
|
-
puts '⚠️ This will DELETE all files from s3-staging/ for this project.'
|
|
175
|
-
puts 'Use --force to confirm deletion.'
|
|
176
|
-
return
|
|
177
|
-
end
|
|
178
|
-
|
|
179
|
-
deleted = 0
|
|
180
|
-
failed = 0
|
|
181
|
-
|
|
182
|
-
files.each do |file|
|
|
183
|
-
relative_path = file.sub("#{staging_dir}/", '')
|
|
184
|
-
|
|
185
|
-
if delete_local_file(file, dry_run: dry_run)
|
|
186
|
-
puts " ✓ Deleted: #{relative_path}"
|
|
187
|
-
deleted += 1
|
|
188
|
-
else
|
|
189
|
-
puts " ✗ Failed: #{relative_path}"
|
|
190
|
-
failed += 1
|
|
191
|
-
end
|
|
192
|
-
end
|
|
193
|
-
|
|
194
|
-
# Remove empty directories
|
|
195
|
-
Dir.glob("#{staging_dir}/**/*").select { |d| File.directory?(d) }.sort.reverse.each do |dir|
|
|
196
|
-
Dir.rmdir(dir) if Dir.empty?(dir)
|
|
197
|
-
rescue StandardError
|
|
198
|
-
nil
|
|
199
|
-
end
|
|
200
|
-
|
|
201
|
-
puts ''
|
|
202
|
-
puts '✅ Local cleanup complete!'
|
|
203
|
-
puts " Deleted: #{deleted}, Failed: #{failed}"
|
|
32
|
+
S3Archiver.new(brand, project_id, **delegated_opts).cleanup_local(force: force, dry_run: dry_run)
|
|
204
33
|
end
|
|
205
34
|
|
|
206
35
|
# Archive project to SSD
|
|
207
36
|
def archive(force: false, dry_run: false)
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
unless ssd_backup && !ssd_backup.empty?
|
|
211
|
-
puts "❌ SSD backup location not configured for brand '#{brand}'"
|
|
212
|
-
return
|
|
213
|
-
end
|
|
214
|
-
|
|
215
|
-
unless Dir.exist?(ssd_backup)
|
|
216
|
-
puts "❌ SSD not mounted at #{ssd_backup}"
|
|
217
|
-
puts ' Please connect the SSD before archiving.'
|
|
218
|
-
return
|
|
219
|
-
end
|
|
220
|
-
|
|
221
|
-
project_dir = project_directory_path
|
|
222
|
-
|
|
223
|
-
unless Dir.exist?(project_dir)
|
|
224
|
-
puts "❌ Project not found: #{project_dir}"
|
|
225
|
-
puts ''
|
|
226
|
-
puts " Try: dam list #{brand} # See available projects"
|
|
227
|
-
return
|
|
228
|
-
end
|
|
229
|
-
|
|
230
|
-
# Determine SSD destination path
|
|
231
|
-
ssd_project_dir = File.join(ssd_backup, project_id)
|
|
232
|
-
|
|
233
|
-
puts "📦 Archive: #{brand}/#{project_id}"
|
|
234
|
-
puts ''
|
|
235
|
-
|
|
236
|
-
# Step 1: Copy to SSD
|
|
237
|
-
if copy_to_ssd(project_dir, ssd_project_dir, dry_run: dry_run)
|
|
238
|
-
# Step 2: Delete local project (if force is true)
|
|
239
|
-
if force
|
|
240
|
-
delete_local_project(project_dir, dry_run: dry_run)
|
|
241
|
-
else
|
|
242
|
-
puts ''
|
|
243
|
-
puts '⚠️ Project copied to SSD but NOT deleted locally.'
|
|
244
|
-
puts ' Use --force to delete local copy after archiving.'
|
|
245
|
-
end
|
|
246
|
-
end
|
|
247
|
-
|
|
248
|
-
puts ''
|
|
249
|
-
puts dry_run ? '✅ Archive dry-run complete!' : '✅ Archive complete!'
|
|
37
|
+
S3Archiver.new(brand, project_id, **delegated_opts).archive(force: force, dry_run: dry_run)
|
|
250
38
|
end
|
|
251
39
|
|
|
252
40
|
# Calculate 3-state S3 sync status
|
|
253
41
|
# @return [String] One of: '↑ upload', '↓ download', '✓ synced', 'none'
|
|
254
42
|
def calculate_sync_status
|
|
255
|
-
|
|
256
|
-
staging_dir = File.join(project_dir, 's3-staging')
|
|
257
|
-
|
|
258
|
-
# No s3-staging directory means no S3 intent
|
|
259
|
-
return 'none' unless Dir.exist?(staging_dir)
|
|
260
|
-
|
|
261
|
-
# Get S3 files (if S3 configured)
|
|
262
|
-
begin
|
|
263
|
-
s3_files = list_s3_files
|
|
264
|
-
rescue StandardError
|
|
265
|
-
# S3 not configured or not accessible
|
|
266
|
-
return 'none'
|
|
267
|
-
end
|
|
268
|
-
|
|
269
|
-
local_files = list_local_files(staging_dir)
|
|
270
|
-
|
|
271
|
-
# No files anywhere
|
|
272
|
-
return 'none' if s3_files.empty? && local_files.empty?
|
|
273
|
-
|
|
274
|
-
# Build S3 files map
|
|
275
|
-
s3_files_map = s3_files.each_with_object({}) do |file, hash|
|
|
276
|
-
relative_path = extract_relative_path(file['Key'])
|
|
277
|
-
hash[relative_path] = file
|
|
278
|
-
end
|
|
279
|
-
|
|
280
|
-
# Check for differences
|
|
281
|
-
needs_upload = false
|
|
282
|
-
needs_download = false
|
|
283
|
-
|
|
284
|
-
# Check all local files
|
|
285
|
-
local_files.each_key do |relative_path|
|
|
286
|
-
local_file = File.join(staging_dir, relative_path)
|
|
287
|
-
s3_file = s3_files_map[relative_path]
|
|
288
|
-
|
|
289
|
-
if s3_file
|
|
290
|
-
# Compare using multipart-aware comparison
|
|
291
|
-
s3_etag = s3_file['ETag'].gsub('"', '')
|
|
292
|
-
s3_size = s3_file['Size']
|
|
293
|
-
match_status = compare_files(local_file: local_file, s3_etag: s3_etag, s3_size: s3_size)
|
|
294
|
-
needs_upload = true if match_status != :synced
|
|
295
|
-
else
|
|
296
|
-
# Local file not in S3
|
|
297
|
-
needs_upload = true
|
|
298
|
-
end
|
|
299
|
-
end
|
|
300
|
-
|
|
301
|
-
# Check for S3-only files
|
|
302
|
-
s3_files_map.each_key do |relative_path|
|
|
303
|
-
local_file = File.join(staging_dir, relative_path)
|
|
304
|
-
needs_download = true unless File.exist?(local_file)
|
|
305
|
-
end
|
|
306
|
-
|
|
307
|
-
# Return status based on what's needed
|
|
308
|
-
if needs_upload && needs_download
|
|
309
|
-
'⚠️ both'
|
|
310
|
-
elsif needs_upload
|
|
311
|
-
'↑ upload'
|
|
312
|
-
elsif needs_download
|
|
313
|
-
'↓ download'
|
|
314
|
-
else
|
|
315
|
-
'✓ synced'
|
|
316
|
-
end
|
|
43
|
+
S3StatusChecker.new(brand, project_id, **delegated_opts).calculate_sync_status
|
|
317
44
|
end
|
|
318
45
|
|
|
319
46
|
# Calculate S3 sync timestamps (last upload/download times)
|
|
320
47
|
# @return [Hash] { last_upload: Time|nil, last_download: Time|nil }
|
|
321
48
|
def sync_timestamps
|
|
322
|
-
|
|
323
|
-
staging_dir = File.join(project_dir, 's3-staging')
|
|
324
|
-
|
|
325
|
-
# No s3-staging directory means no S3 intent
|
|
326
|
-
return { last_upload: nil, last_download: nil } unless Dir.exist?(staging_dir)
|
|
327
|
-
|
|
328
|
-
# Get S3 files (if S3 configured)
|
|
329
|
-
begin
|
|
330
|
-
s3_files = list_s3_files
|
|
331
|
-
rescue StandardError
|
|
332
|
-
# S3 not configured or not accessible
|
|
333
|
-
return { last_upload: nil, last_download: nil }
|
|
334
|
-
end
|
|
335
|
-
|
|
336
|
-
# Last upload time = most recent S3 file LastModified
|
|
337
|
-
last_upload = s3_files.map { |f| f['LastModified'] }.compact.max if s3_files.any?
|
|
338
|
-
|
|
339
|
-
# Last download time = most recent local file mtime (in s3-staging)
|
|
340
|
-
last_download = if Dir.exist?(staging_dir)
|
|
341
|
-
local_files = Dir.glob(File.join(staging_dir, '**/*')).select { |f| File.file?(f) }
|
|
342
|
-
local_files.map { |f| File.mtime(f) }.max if local_files.any?
|
|
343
|
-
end
|
|
344
|
-
|
|
345
|
-
{ last_upload: last_upload, last_download: last_download }
|
|
49
|
+
S3StatusChecker.new(brand, project_id, **delegated_opts).sync_timestamps
|
|
346
50
|
end
|
|
347
51
|
|
|
348
52
|
private
|
|
@@ -350,126 +54,6 @@ module Appydave
|
|
|
350
54
|
def delegated_opts
|
|
351
55
|
{ brand_info: brand_info, brand_path: brand_path, s3_client: @s3_client_override }
|
|
352
56
|
end
|
|
353
|
-
|
|
354
|
-
# Delete file from S3
|
|
355
|
-
def delete_s3_file(s3_key, dry_run: false)
|
|
356
|
-
if dry_run
|
|
357
|
-
puts " [DRY-RUN] Would delete: s3://#{brand_info.aws.s3_bucket}/#{s3_key}"
|
|
358
|
-
return true
|
|
359
|
-
end
|
|
360
|
-
|
|
361
|
-
s3_client.delete_object(
|
|
362
|
-
bucket: brand_info.aws.s3_bucket,
|
|
363
|
-
key: s3_key
|
|
364
|
-
)
|
|
365
|
-
|
|
366
|
-
true
|
|
367
|
-
rescue Aws::S3::Errors::ServiceError => e
|
|
368
|
-
puts " Error: #{e.message}"
|
|
369
|
-
false
|
|
370
|
-
end
|
|
371
|
-
|
|
372
|
-
# Delete local file
|
|
373
|
-
def delete_local_file(file_path, dry_run: false)
|
|
374
|
-
if dry_run
|
|
375
|
-
puts " [DRY-RUN] Would delete: #{file_path}"
|
|
376
|
-
return true
|
|
377
|
-
end
|
|
378
|
-
|
|
379
|
-
File.delete(file_path)
|
|
380
|
-
true
|
|
381
|
-
rescue StandardError => e
|
|
382
|
-
puts " Error: #{e.message}"
|
|
383
|
-
false
|
|
384
|
-
end
|
|
385
|
-
|
|
386
|
-
# Copy project to SSD
|
|
387
|
-
def copy_to_ssd(source_dir, dest_dir, dry_run: false)
|
|
388
|
-
if Dir.exist?(dest_dir)
|
|
389
|
-
puts '⚠️ Already exists on SSD'
|
|
390
|
-
puts " Path: #{dest_dir}"
|
|
391
|
-
puts ' Skipping copy step'
|
|
392
|
-
return true
|
|
393
|
-
end
|
|
394
|
-
|
|
395
|
-
size = calculate_directory_size(source_dir)
|
|
396
|
-
puts '📋 Copy to SSD (excluding generated files):'
|
|
397
|
-
puts " From: #{source_dir}"
|
|
398
|
-
puts " To: #{dest_dir}"
|
|
399
|
-
puts " Size: #{file_size_human(size)}"
|
|
400
|
-
puts ''
|
|
401
|
-
|
|
402
|
-
if dry_run
|
|
403
|
-
puts ' [DRY-RUN] Would copy project to SSD (excluding node_modules, .git, etc.)'
|
|
404
|
-
return true
|
|
405
|
-
end
|
|
406
|
-
|
|
407
|
-
FileUtils.mkdir_p(dest_dir)
|
|
408
|
-
|
|
409
|
-
# Copy files with exclusion filtering
|
|
410
|
-
stats = copy_with_exclusions(source_dir, dest_dir)
|
|
411
|
-
|
|
412
|
-
puts " ✅ Copied to SSD (#{stats[:files]} files, excluded #{stats[:excluded]} generated files)"
|
|
413
|
-
|
|
414
|
-
true
|
|
415
|
-
rescue StandardError => e
|
|
416
|
-
puts " ✗ Failed to copy: #{e.message}"
|
|
417
|
-
false
|
|
418
|
-
end
|
|
419
|
-
|
|
420
|
-
# Copy directory contents with exclusion filtering
|
|
421
|
-
def copy_with_exclusions(source_dir, dest_dir)
|
|
422
|
-
stats = { files: 0, excluded: 0 }
|
|
423
|
-
|
|
424
|
-
Dir.glob(File.join(source_dir, '**', '*'), File::FNM_DOTMATCH).each do |source_path|
|
|
425
|
-
next if File.directory?(source_path)
|
|
426
|
-
next if ['.', '..'].include?(File.basename(source_path))
|
|
427
|
-
|
|
428
|
-
relative_path = source_path.sub("#{source_dir}/", '')
|
|
429
|
-
|
|
430
|
-
if excluded_path?(relative_path)
|
|
431
|
-
stats[:excluded] += 1
|
|
432
|
-
next
|
|
433
|
-
end
|
|
434
|
-
|
|
435
|
-
dest_path = File.join(dest_dir, relative_path)
|
|
436
|
-
FileUtils.mkdir_p(File.dirname(dest_path))
|
|
437
|
-
FileUtils.cp(source_path, dest_path, preserve: true)
|
|
438
|
-
stats[:files] += 1
|
|
439
|
-
end
|
|
440
|
-
|
|
441
|
-
stats
|
|
442
|
-
end
|
|
443
|
-
|
|
444
|
-
# Delete local project directory
|
|
445
|
-
def delete_local_project(project_dir, dry_run: false)
|
|
446
|
-
size = calculate_directory_size(project_dir)
|
|
447
|
-
|
|
448
|
-
puts ''
|
|
449
|
-
puts '🗑️ Delete local project:'
|
|
450
|
-
puts " Path: #{project_dir}"
|
|
451
|
-
puts " Size: #{file_size_human(size)}"
|
|
452
|
-
puts ''
|
|
453
|
-
|
|
454
|
-
if dry_run
|
|
455
|
-
puts ' [DRY-RUN] Would delete entire local folder'
|
|
456
|
-
return true
|
|
457
|
-
end
|
|
458
|
-
|
|
459
|
-
FileUtils.rm_rf(project_dir)
|
|
460
|
-
puts ' ✅ Deleted local folder'
|
|
461
|
-
puts " 💾 Freed: #{file_size_human(size)}"
|
|
462
|
-
|
|
463
|
-
true
|
|
464
|
-
rescue StandardError => e
|
|
465
|
-
puts " ✗ Failed to delete: #{e.message}"
|
|
466
|
-
false
|
|
467
|
-
end
|
|
468
|
-
|
|
469
|
-
# Calculate total size of a directory
|
|
470
|
-
def calculate_directory_size(dir_path)
|
|
471
|
-
FileHelper.calculate_directory_size(dir_path)
|
|
472
|
-
end
|
|
473
57
|
end
|
|
474
58
|
end
|
|
475
59
|
end
|
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Appydave
|
|
4
|
+
module Tools
|
|
5
|
+
module Dam
|
|
6
|
+
# Handles S3 status and sync state operations.
|
|
7
|
+
# Inherits shared infrastructure and helpers from S3Base.
|
|
8
|
+
class S3StatusChecker < S3Base
|
|
9
|
+
# Show sync status
|
|
10
|
+
def status
|
|
11
|
+
project_dir = project_directory_path
|
|
12
|
+
staging_dir = File.join(project_dir, 's3-staging')
|
|
13
|
+
|
|
14
|
+
unless Dir.exist?(project_dir)
|
|
15
|
+
puts "❌ Project not found: #{brand}/#{project_id}"
|
|
16
|
+
puts ''
|
|
17
|
+
puts ' This project does not exist locally.'
|
|
18
|
+
puts ' Possible causes:'
|
|
19
|
+
puts ' - Project name might be misspelled'
|
|
20
|
+
puts ' - Project may not exist in this brand'
|
|
21
|
+
puts ''
|
|
22
|
+
puts " Try: dam list #{brand} # See all projects for this brand"
|
|
23
|
+
return
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
s3_files = list_s3_files
|
|
27
|
+
local_files = list_local_files(staging_dir)
|
|
28
|
+
|
|
29
|
+
s3_files_map = s3_files.each_with_object({}) do |file, hash|
|
|
30
|
+
relative_path = extract_relative_path(file['Key'])
|
|
31
|
+
hash[relative_path] = file
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
if s3_files.empty? && local_files.empty?
|
|
35
|
+
puts "ℹ️ No files in S3 or s3-staging/ for #{brand}/#{project_id}"
|
|
36
|
+
puts ''
|
|
37
|
+
puts ' This project exists but has no heavy files ready for S3 sync.'
|
|
38
|
+
puts ''
|
|
39
|
+
puts ' Next steps:'
|
|
40
|
+
puts " 1. Add video files to: #{staging_dir}/"
|
|
41
|
+
puts " 2. Upload to S3: dam s3-up #{brand} #{project_id}"
|
|
42
|
+
return
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
puts "📊 S3 Sync Status for #{brand}/#{project_id}"
|
|
46
|
+
|
|
47
|
+
if s3_files.any?
|
|
48
|
+
most_recent = s3_files.map { |f| f['LastModified'] }.compact.max
|
|
49
|
+
if most_recent
|
|
50
|
+
time_ago = format_time_ago(Time.now - most_recent)
|
|
51
|
+
puts " Last synced: #{time_ago} ago (#{most_recent.strftime('%Y-%m-%d %H:%M')})"
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
puts ''
|
|
55
|
+
|
|
56
|
+
all_paths = (s3_files_map.keys + local_files.keys).uniq.sort
|
|
57
|
+
|
|
58
|
+
total_s3_size = 0
|
|
59
|
+
total_local_size = 0
|
|
60
|
+
|
|
61
|
+
all_paths.each do |relative_path|
|
|
62
|
+
s3_file = s3_files_map[relative_path]
|
|
63
|
+
local_file = File.join(staging_dir, relative_path)
|
|
64
|
+
|
|
65
|
+
if s3_file && File.exist?(local_file)
|
|
66
|
+
s3_size = s3_file['Size']
|
|
67
|
+
local_size = File.size(local_file)
|
|
68
|
+
total_s3_size += s3_size
|
|
69
|
+
total_local_size += local_size
|
|
70
|
+
|
|
71
|
+
s3_etag = s3_file['ETag'].gsub('"', '')
|
|
72
|
+
match_status = compare_files(local_file: local_file, s3_etag: s3_etag, s3_size: s3_size)
|
|
73
|
+
|
|
74
|
+
if match_status == :synced
|
|
75
|
+
status_label = multipart_etag?(s3_etag) ? 'synced*' : 'synced'
|
|
76
|
+
puts " ✓ #{relative_path} (#{file_size_human(s3_size)}) [#{status_label}]"
|
|
77
|
+
else
|
|
78
|
+
puts " ⚠️ #{relative_path} (#{file_size_human(s3_size)}) [modified]"
|
|
79
|
+
end
|
|
80
|
+
elsif s3_file
|
|
81
|
+
s3_size = s3_file['Size']
|
|
82
|
+
total_s3_size += s3_size
|
|
83
|
+
puts " ☁️ #{relative_path} (#{file_size_human(s3_size)}) [S3 only]"
|
|
84
|
+
else
|
|
85
|
+
local_size = File.size(local_file)
|
|
86
|
+
total_local_size += local_size
|
|
87
|
+
puts " 📁 #{relative_path} (#{file_size_human(local_size)}) [local only]"
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
puts ''
|
|
92
|
+
puts "S3 files: #{s3_files.size}, Local files: #{local_files.size}"
|
|
93
|
+
puts "S3 size: #{file_size_human(total_s3_size)}, Local size: #{file_size_human(total_local_size)}"
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
# Calculate 3-state S3 sync status
|
|
97
|
+
# @return [String] One of: '↑ upload', '↓ download', '✓ synced', 'none'
|
|
98
|
+
def calculate_sync_status
|
|
99
|
+
project_dir = project_directory_path
|
|
100
|
+
staging_dir = File.join(project_dir, 's3-staging')
|
|
101
|
+
|
|
102
|
+
return 'none' unless Dir.exist?(staging_dir)
|
|
103
|
+
|
|
104
|
+
begin
|
|
105
|
+
s3_files = list_s3_files
|
|
106
|
+
rescue StandardError
|
|
107
|
+
return 'none'
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
local_files = list_local_files(staging_dir)
|
|
111
|
+
|
|
112
|
+
return 'none' if s3_files.empty? && local_files.empty?
|
|
113
|
+
|
|
114
|
+
s3_files_map = s3_files.each_with_object({}) do |file, hash|
|
|
115
|
+
relative_path = extract_relative_path(file['Key'])
|
|
116
|
+
hash[relative_path] = file
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
needs_upload = false
|
|
120
|
+
needs_download = false
|
|
121
|
+
|
|
122
|
+
local_files.each_key do |relative_path|
|
|
123
|
+
local_file = File.join(staging_dir, relative_path)
|
|
124
|
+
s3_file = s3_files_map[relative_path]
|
|
125
|
+
|
|
126
|
+
if s3_file
|
|
127
|
+
s3_etag = s3_file['ETag'].gsub('"', '')
|
|
128
|
+
s3_size = s3_file['Size']
|
|
129
|
+
match_status = compare_files(local_file: local_file, s3_etag: s3_etag, s3_size: s3_size)
|
|
130
|
+
needs_upload = true if match_status != :synced
|
|
131
|
+
else
|
|
132
|
+
needs_upload = true
|
|
133
|
+
end
|
|
134
|
+
end
|
|
135
|
+
|
|
136
|
+
s3_files_map.each_key do |relative_path|
|
|
137
|
+
local_file = File.join(staging_dir, relative_path)
|
|
138
|
+
needs_download = true unless File.exist?(local_file)
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
if needs_upload && needs_download
|
|
142
|
+
'⚠️ both'
|
|
143
|
+
elsif needs_upload
|
|
144
|
+
'↑ upload'
|
|
145
|
+
elsif needs_download
|
|
146
|
+
'↓ download'
|
|
147
|
+
else
|
|
148
|
+
'✓ synced'
|
|
149
|
+
end
|
|
150
|
+
end
|
|
151
|
+
|
|
152
|
+
# Calculate S3 sync timestamps (last upload/download times)
|
|
153
|
+
# @return [Hash] { last_upload: Time|nil, last_download: Time|nil }
|
|
154
|
+
def sync_timestamps
|
|
155
|
+
project_dir = project_directory_path
|
|
156
|
+
staging_dir = File.join(project_dir, 's3-staging')
|
|
157
|
+
|
|
158
|
+
return { last_upload: nil, last_download: nil } unless Dir.exist?(staging_dir)
|
|
159
|
+
|
|
160
|
+
begin
|
|
161
|
+
s3_files = list_s3_files
|
|
162
|
+
rescue StandardError
|
|
163
|
+
return { last_upload: nil, last_download: nil }
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
last_upload = s3_files.map { |f| f['LastModified'] }.compact.max if s3_files.any?
|
|
167
|
+
|
|
168
|
+
last_download = if Dir.exist?(staging_dir)
|
|
169
|
+
local_files = Dir.glob(File.join(staging_dir, '**/*')).select { |f| File.file?(f) }
|
|
170
|
+
local_files.map { |f| File.mtime(f) }.max if local_files.any?
|
|
171
|
+
end
|
|
172
|
+
|
|
173
|
+
{ last_upload: last_upload, last_download: last_download }
|
|
174
|
+
end
|
|
175
|
+
end
|
|
176
|
+
end
|
|
177
|
+
end
|
|
178
|
+
end
|
data/lib/appydave/tools.rb
CHANGED
|
@@ -70,6 +70,8 @@ require 'appydave/tools/dam/config_loader'
|
|
|
70
70
|
require 'appydave/tools/dam/s3_base'
|
|
71
71
|
require 'appydave/tools/dam/s3_uploader'
|
|
72
72
|
require 'appydave/tools/dam/s3_downloader'
|
|
73
|
+
require 'appydave/tools/dam/s3_status_checker'
|
|
74
|
+
require 'appydave/tools/dam/s3_archiver'
|
|
73
75
|
require 'appydave/tools/dam/s3_operations'
|
|
74
76
|
require 'appydave/tools/dam/s3_scanner'
|
|
75
77
|
require 'appydave/tools/dam/share_operations'
|
data/package.json
CHANGED
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: appydave-tools
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.77.
|
|
4
|
+
version: 0.77.6
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- David Cruwys
|
|
@@ -373,12 +373,14 @@ files:
|
|
|
373
373
|
- lib/appydave/tools/dam/repo_push.rb
|
|
374
374
|
- lib/appydave/tools/dam/repo_status.rb
|
|
375
375
|
- lib/appydave/tools/dam/repo_sync.rb
|
|
376
|
+
- lib/appydave/tools/dam/s3_archiver.rb
|
|
376
377
|
- lib/appydave/tools/dam/s3_arg_parser.rb
|
|
377
378
|
- lib/appydave/tools/dam/s3_base.rb
|
|
378
379
|
- lib/appydave/tools/dam/s3_downloader.rb
|
|
379
380
|
- lib/appydave/tools/dam/s3_operations.rb
|
|
380
381
|
- lib/appydave/tools/dam/s3_scan_command.rb
|
|
381
382
|
- lib/appydave/tools/dam/s3_scanner.rb
|
|
383
|
+
- lib/appydave/tools/dam/s3_status_checker.rb
|
|
382
384
|
- lib/appydave/tools/dam/s3_uploader.rb
|
|
383
385
|
- lib/appydave/tools/dam/share_operations.rb
|
|
384
386
|
- lib/appydave/tools/dam/ssd_status.rb
|