appydave-tools 0.16.0 → 0.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop.yml +6 -0
- data/AGENTS.md +22 -0
- data/CHANGELOG.md +12 -0
- data/CLAUDE.md +206 -51
- data/README.md +144 -11
- data/bin/archive_project.rb +249 -0
- data/bin/configuration.rb +21 -1
- data/bin/generate_manifest.rb +357 -0
- data/bin/sync_from_ssd.rb +236 -0
- data/bin/vat +623 -0
- data/docs/README.md +169 -0
- data/docs/configuration/.env.example +19 -0
- data/docs/configuration/README.md +394 -0
- data/docs/configuration/channels.example.json +26 -0
- data/docs/configuration/settings.example.json +6 -0
- data/docs/development/CODEX-recommendations.md +123 -0
- data/docs/development/README.md +100 -0
- data/docs/development/cli-architecture-patterns.md +1604 -0
- data/docs/development/pattern-comparison.md +284 -0
- data/docs/prd-unified-brands-configuration.md +792 -0
- data/docs/project-brand-systems-analysis.md +934 -0
- data/docs/vat/dam-vision.md +123 -0
- data/docs/vat/session-summary-2025-11-09.md +297 -0
- data/docs/vat/usage.md +508 -0
- data/docs/vat/vat-testing-plan.md +801 -0
- data/lib/appydave/tools/configuration/models/brands_config.rb +238 -0
- data/lib/appydave/tools/configuration/models/config_base.rb +7 -0
- data/lib/appydave/tools/configuration/models/settings_config.rb +4 -0
- data/lib/appydave/tools/vat/config.rb +153 -0
- data/lib/appydave/tools/vat/config_loader.rb +91 -0
- data/lib/appydave/tools/vat/manifest_generator.rb +239 -0
- data/lib/appydave/tools/vat/project_listing.rb +198 -0
- data/lib/appydave/tools/vat/project_resolver.rb +132 -0
- data/lib/appydave/tools/vat/s3_operations.rb +560 -0
- data/lib/appydave/tools/version.rb +1 -1
- data/lib/appydave/tools.rb +9 -1
- data/package.json +1 -1
- metadata +57 -3
- data/docs/dam/overview.md +0 -28
|
@@ -0,0 +1,560 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'fileutils'
|
|
4
|
+
require 'json'
|
|
5
|
+
require 'digest'
|
|
6
|
+
require 'aws-sdk-s3'
|
|
7
|
+
|
|
8
|
+
module Appydave
|
|
9
|
+
module Tools
|
|
10
|
+
module Vat
|
|
11
|
+
# S3 operations for VAT (upload, download, status, cleanup)
|
|
12
|
+
class S3Operations
|
|
13
|
+
attr_reader :brand_info, :brand, :project_id, :brand_path, :s3_client
|
|
14
|
+
|
|
15
|
+
def initialize(brand, project_id, brand_info: nil, brand_path: nil, s3_client: nil)
|
|
16
|
+
@brand = brand
|
|
17
|
+
@project_id = project_id
|
|
18
|
+
|
|
19
|
+
# Use injected dependencies or load from configuration
|
|
20
|
+
@brand_path = brand_path || Config.brand_path(brand)
|
|
21
|
+
@brand_info = brand_info || load_brand_info(brand)
|
|
22
|
+
@s3_client = s3_client || create_s3_client(@brand_info)
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
private
|
|
26
|
+
|
|
27
|
+
def load_brand_info(brand)
|
|
28
|
+
Appydave::Tools::Configuration::Config.configure
|
|
29
|
+
Appydave::Tools::Configuration::Config.brands.get_brand(brand)
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
def create_s3_client(brand_info)
|
|
33
|
+
profile_name = brand_info.aws.profile
|
|
34
|
+
raise "AWS profile not configured for brand '#{brand}'" if profile_name.nil? || profile_name.empty?
|
|
35
|
+
|
|
36
|
+
credentials = Aws::SharedCredentials.new(profile_name: profile_name)
|
|
37
|
+
Aws::S3::Client.new(
|
|
38
|
+
credentials: credentials,
|
|
39
|
+
region: brand_info.aws.region,
|
|
40
|
+
http_wire_trace: false,
|
|
41
|
+
ssl_verify_peer: true,
|
|
42
|
+
ssl_ca_bundle: '/etc/ssl/cert.pem' # macOS system certificates
|
|
43
|
+
)
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
public
|
|
47
|
+
|
|
48
|
+
# Upload files from s3-staging/ to S3
|
|
49
|
+
def upload(dry_run: false)
|
|
50
|
+
project_dir = File.join(brand_path, project_id)
|
|
51
|
+
staging_dir = File.join(project_dir, 's3-staging')
|
|
52
|
+
|
|
53
|
+
unless Dir.exist?(staging_dir)
|
|
54
|
+
puts "❌ No s3-staging directory found: #{staging_dir}"
|
|
55
|
+
puts 'Nothing to upload.'
|
|
56
|
+
return
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
files = Dir.glob("#{staging_dir}/**/*").select { |f| File.file?(f) }
|
|
60
|
+
|
|
61
|
+
if files.empty?
|
|
62
|
+
puts '❌ No files found in s3-staging/'
|
|
63
|
+
return
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
puts "📦 Uploading #{files.size} file(s) from #{project_id}/s3-staging/ to S3..."
|
|
67
|
+
puts ''
|
|
68
|
+
|
|
69
|
+
uploaded = 0
|
|
70
|
+
skipped = 0
|
|
71
|
+
failed = 0
|
|
72
|
+
|
|
73
|
+
files.each do |file|
|
|
74
|
+
relative_path = file.sub("#{staging_dir}/", '')
|
|
75
|
+
s3_path = build_s3_key(relative_path)
|
|
76
|
+
|
|
77
|
+
# Check if file already exists with same MD5
|
|
78
|
+
local_md5 = file_md5(file)
|
|
79
|
+
s3_md5 = s3_file_md5(s3_path)
|
|
80
|
+
|
|
81
|
+
if local_md5 == s3_md5
|
|
82
|
+
puts " ⏭️ Skipped: #{relative_path} (unchanged)"
|
|
83
|
+
skipped += 1
|
|
84
|
+
elsif upload_file(file, s3_path, dry_run: dry_run)
|
|
85
|
+
uploaded += 1
|
|
86
|
+
else
|
|
87
|
+
failed += 1
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
puts ''
|
|
92
|
+
puts '✅ Upload complete!'
|
|
93
|
+
puts " Uploaded: #{uploaded}, Skipped: #{skipped}, Failed: #{failed}"
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
# Download files from S3 to s3-staging/
|
|
97
|
+
def download(dry_run: false)
|
|
98
|
+
project_dir = File.join(brand_path, project_id)
|
|
99
|
+
staging_dir = File.join(project_dir, 's3-staging')
|
|
100
|
+
|
|
101
|
+
s3_files = list_s3_files
|
|
102
|
+
|
|
103
|
+
if s3_files.empty?
|
|
104
|
+
puts "❌ No files found in S3 for #{brand}/#{project_id}"
|
|
105
|
+
return
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
puts "📦 Downloading #{s3_files.size} file(s) from S3 to #{project_id}/s3-staging/..."
|
|
109
|
+
puts ''
|
|
110
|
+
|
|
111
|
+
downloaded = 0
|
|
112
|
+
skipped = 0
|
|
113
|
+
failed = 0
|
|
114
|
+
|
|
115
|
+
s3_files.each do |s3_file|
|
|
116
|
+
key = s3_file['Key']
|
|
117
|
+
relative_path = extract_relative_path(key)
|
|
118
|
+
local_file = File.join(staging_dir, relative_path)
|
|
119
|
+
|
|
120
|
+
# Check if file already exists with same MD5
|
|
121
|
+
s3_md5 = s3_file['ETag'].gsub('"', '')
|
|
122
|
+
local_md5 = File.exist?(local_file) ? file_md5(local_file) : nil
|
|
123
|
+
|
|
124
|
+
if local_md5 == s3_md5
|
|
125
|
+
puts " ⏭️ Skipped: #{relative_path} (unchanged)"
|
|
126
|
+
skipped += 1
|
|
127
|
+
elsif download_file(key, local_file, dry_run: dry_run)
|
|
128
|
+
downloaded += 1
|
|
129
|
+
else
|
|
130
|
+
failed += 1
|
|
131
|
+
end
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
puts ''
|
|
135
|
+
puts '✅ Download complete!'
|
|
136
|
+
puts " Downloaded: #{downloaded}, Skipped: #{skipped}, Failed: #{failed}"
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
# Show sync status
|
|
140
|
+
def status
|
|
141
|
+
project_dir = File.join(brand_path, project_id)
|
|
142
|
+
staging_dir = File.join(project_dir, 's3-staging')
|
|
143
|
+
|
|
144
|
+
s3_files = list_s3_files
|
|
145
|
+
local_files = list_local_files(staging_dir)
|
|
146
|
+
|
|
147
|
+
# Build a map of S3 files for quick lookup
|
|
148
|
+
s3_files_map = s3_files.each_with_object({}) do |file, hash|
|
|
149
|
+
relative_path = extract_relative_path(file['Key'])
|
|
150
|
+
hash[relative_path] = file
|
|
151
|
+
end
|
|
152
|
+
|
|
153
|
+
if s3_files.empty? && local_files.empty?
|
|
154
|
+
puts "❌ No files found in S3 or locally for #{brand}/#{project_id}"
|
|
155
|
+
return
|
|
156
|
+
end
|
|
157
|
+
|
|
158
|
+
puts "📊 S3 Sync Status for #{brand}/#{project_id}"
|
|
159
|
+
puts ''
|
|
160
|
+
|
|
161
|
+
# Combine all file paths (S3 + local)
|
|
162
|
+
all_paths = (s3_files_map.keys + local_files.keys).uniq.sort
|
|
163
|
+
|
|
164
|
+
total_s3_size = 0
|
|
165
|
+
total_local_size = 0
|
|
166
|
+
|
|
167
|
+
all_paths.each do |relative_path|
|
|
168
|
+
s3_file = s3_files_map[relative_path]
|
|
169
|
+
local_file = File.join(staging_dir, relative_path)
|
|
170
|
+
|
|
171
|
+
if s3_file && File.exist?(local_file)
|
|
172
|
+
# File exists in both S3 and local
|
|
173
|
+
s3_size = s3_file['Size']
|
|
174
|
+
local_size = File.size(local_file)
|
|
175
|
+
total_s3_size += s3_size
|
|
176
|
+
total_local_size += local_size
|
|
177
|
+
|
|
178
|
+
local_md5 = file_md5(local_file)
|
|
179
|
+
s3_md5 = s3_file['ETag'].gsub('"', '')
|
|
180
|
+
|
|
181
|
+
if local_md5 == s3_md5
|
|
182
|
+
puts " ✓ #{relative_path} (#{file_size_human(s3_size)}) [synced]"
|
|
183
|
+
else
|
|
184
|
+
puts " ⚠️ #{relative_path} (#{file_size_human(s3_size)}) [modified]"
|
|
185
|
+
end
|
|
186
|
+
elsif s3_file
|
|
187
|
+
# File only in S3
|
|
188
|
+
s3_size = s3_file['Size']
|
|
189
|
+
total_s3_size += s3_size
|
|
190
|
+
puts " ☁️ #{relative_path} (#{file_size_human(s3_size)}) [S3 only]"
|
|
191
|
+
else
|
|
192
|
+
# File only local
|
|
193
|
+
local_size = File.size(local_file)
|
|
194
|
+
total_local_size += local_size
|
|
195
|
+
puts " 📁 #{relative_path} (#{file_size_human(local_size)}) [local only]"
|
|
196
|
+
end
|
|
197
|
+
end
|
|
198
|
+
|
|
199
|
+
puts ''
|
|
200
|
+
puts "S3 files: #{s3_files.size}, Local files: #{local_files.size}"
|
|
201
|
+
puts "S3 size: #{file_size_human(total_s3_size)}, Local size: #{file_size_human(total_local_size)}"
|
|
202
|
+
end
|
|
203
|
+
|
|
204
|
+
# Cleanup S3 files
|
|
205
|
+
def cleanup(force: false, dry_run: false)
|
|
206
|
+
s3_files = list_s3_files
|
|
207
|
+
|
|
208
|
+
if s3_files.empty?
|
|
209
|
+
puts "❌ No files found in S3 for #{brand}/#{project_id}"
|
|
210
|
+
return
|
|
211
|
+
end
|
|
212
|
+
|
|
213
|
+
puts "🗑️ Found #{s3_files.size} file(s) in S3 for #{brand}/#{project_id}"
|
|
214
|
+
puts ''
|
|
215
|
+
|
|
216
|
+
unless force
|
|
217
|
+
puts '⚠️ This will DELETE all files from S3 for this project.'
|
|
218
|
+
puts 'Use --force to confirm deletion.'
|
|
219
|
+
return
|
|
220
|
+
end
|
|
221
|
+
|
|
222
|
+
deleted = 0
|
|
223
|
+
failed = 0
|
|
224
|
+
|
|
225
|
+
s3_files.each do |s3_file|
|
|
226
|
+
key = s3_file['Key']
|
|
227
|
+
relative_path = extract_relative_path(key)
|
|
228
|
+
|
|
229
|
+
if delete_s3_file(key, dry_run: dry_run)
|
|
230
|
+
puts " ✓ Deleted: #{relative_path}"
|
|
231
|
+
deleted += 1
|
|
232
|
+
else
|
|
233
|
+
puts " ✗ Failed: #{relative_path}"
|
|
234
|
+
failed += 1
|
|
235
|
+
end
|
|
236
|
+
end
|
|
237
|
+
|
|
238
|
+
puts ''
|
|
239
|
+
puts '✅ Cleanup complete!'
|
|
240
|
+
puts " Deleted: #{deleted}, Failed: #{failed}"
|
|
241
|
+
end
|
|
242
|
+
|
|
243
|
+
# Cleanup local s3-staging files
|
|
244
|
+
def cleanup_local(force: false, dry_run: false)
|
|
245
|
+
project_dir = File.join(brand_path, project_id)
|
|
246
|
+
staging_dir = File.join(project_dir, 's3-staging')
|
|
247
|
+
|
|
248
|
+
unless Dir.exist?(staging_dir)
|
|
249
|
+
puts "❌ No s3-staging directory found: #{staging_dir}"
|
|
250
|
+
return
|
|
251
|
+
end
|
|
252
|
+
|
|
253
|
+
files = Dir.glob("#{staging_dir}/**/*").select { |f| File.file?(f) }
|
|
254
|
+
|
|
255
|
+
if files.empty?
|
|
256
|
+
puts '❌ No files found in s3-staging/'
|
|
257
|
+
return
|
|
258
|
+
end
|
|
259
|
+
|
|
260
|
+
puts "🗑️ Found #{files.size} file(s) in #{project_id}/s3-staging/"
|
|
261
|
+
puts ''
|
|
262
|
+
|
|
263
|
+
unless force
|
|
264
|
+
puts '⚠️ This will DELETE all local files in s3-staging/ for this project.'
|
|
265
|
+
puts 'Use --force to confirm deletion.'
|
|
266
|
+
return
|
|
267
|
+
end
|
|
268
|
+
|
|
269
|
+
deleted = 0
|
|
270
|
+
failed = 0
|
|
271
|
+
|
|
272
|
+
files.each do |file|
|
|
273
|
+
relative_path = file.sub("#{staging_dir}/", '')
|
|
274
|
+
|
|
275
|
+
if delete_local_file(file, dry_run: dry_run)
|
|
276
|
+
puts " ✓ Deleted: #{relative_path}"
|
|
277
|
+
deleted += 1
|
|
278
|
+
else
|
|
279
|
+
puts " ✗ Failed: #{relative_path}"
|
|
280
|
+
failed += 1
|
|
281
|
+
end
|
|
282
|
+
end
|
|
283
|
+
|
|
284
|
+
# Clean up empty directories
|
|
285
|
+
unless dry_run
|
|
286
|
+
Dir.glob("#{staging_dir}/**/").reverse_each do |dir|
|
|
287
|
+
Dir.rmdir(dir) if Dir.empty?(dir)
|
|
288
|
+
rescue SystemCallError
|
|
289
|
+
# Directory not empty, skip
|
|
290
|
+
end
|
|
291
|
+
end
|
|
292
|
+
|
|
293
|
+
puts ''
|
|
294
|
+
puts '✅ Local cleanup complete!'
|
|
295
|
+
puts " Deleted: #{deleted}, Failed: #{failed}"
|
|
296
|
+
end
|
|
297
|
+
|
|
298
|
+
# Archive project to SSD
|
|
299
|
+
def archive(force: false, dry_run: false)
|
|
300
|
+
ssd_backup = brand_info.locations.ssd_backup
|
|
301
|
+
|
|
302
|
+
unless ssd_backup && !ssd_backup.empty?
|
|
303
|
+
puts "❌ SSD backup location not configured for brand '#{brand}'"
|
|
304
|
+
return
|
|
305
|
+
end
|
|
306
|
+
|
|
307
|
+
unless Dir.exist?(ssd_backup)
|
|
308
|
+
puts "❌ SSD not mounted at #{ssd_backup}"
|
|
309
|
+
puts ' Please connect the SSD before archiving.'
|
|
310
|
+
return
|
|
311
|
+
end
|
|
312
|
+
|
|
313
|
+
project_dir = File.join(brand_path, project_id)
|
|
314
|
+
|
|
315
|
+
unless Dir.exist?(project_dir)
|
|
316
|
+
puts "❌ Project not found: #{project_dir}"
|
|
317
|
+
return
|
|
318
|
+
end
|
|
319
|
+
|
|
320
|
+
# Determine SSD destination path
|
|
321
|
+
ssd_project_dir = File.join(ssd_backup, project_id)
|
|
322
|
+
|
|
323
|
+
puts "📦 Archive: #{brand}/#{project_id}"
|
|
324
|
+
puts ''
|
|
325
|
+
|
|
326
|
+
# Step 1: Copy to SSD
|
|
327
|
+
if copy_to_ssd(project_dir, ssd_project_dir, dry_run: dry_run)
|
|
328
|
+
# Step 2: Delete local project (if force is true)
|
|
329
|
+
if force
|
|
330
|
+
delete_local_project(project_dir, dry_run: dry_run)
|
|
331
|
+
else
|
|
332
|
+
puts ''
|
|
333
|
+
puts '⚠️ Project copied to SSD but NOT deleted locally.'
|
|
334
|
+
puts ' Use --force to delete local copy after archiving.'
|
|
335
|
+
end
|
|
336
|
+
end
|
|
337
|
+
|
|
338
|
+
puts ''
|
|
339
|
+
puts dry_run ? '✅ Archive dry-run complete!' : '✅ Archive complete!'
|
|
340
|
+
end
|
|
341
|
+
|
|
342
|
+
# Build S3 key for a file
|
|
343
|
+
def build_s3_key(relative_path)
|
|
344
|
+
"#{brand_info.aws.s3_prefix}#{project_id}/#{relative_path}"
|
|
345
|
+
end
|
|
346
|
+
|
|
347
|
+
# Extract relative path from S3 key
|
|
348
|
+
def extract_relative_path(s3_key)
|
|
349
|
+
s3_key.sub("#{brand_info.aws.s3_prefix}#{project_id}/", '')
|
|
350
|
+
end
|
|
351
|
+
|
|
352
|
+
# Calculate MD5 hash of a file
|
|
353
|
+
def file_md5(file_path)
|
|
354
|
+
Digest::MD5.file(file_path).hexdigest
|
|
355
|
+
end
|
|
356
|
+
|
|
357
|
+
# Get MD5 of file in S3 (from ETag)
|
|
358
|
+
def s3_file_md5(s3_path)
|
|
359
|
+
response = s3_client.head_object(
|
|
360
|
+
bucket: brand_info.aws.s3_bucket,
|
|
361
|
+
key: s3_path
|
|
362
|
+
)
|
|
363
|
+
response.etag.gsub('"', '')
|
|
364
|
+
rescue Aws::S3::Errors::NotFound, Aws::S3::Errors::ServiceError
|
|
365
|
+
nil
|
|
366
|
+
end
|
|
367
|
+
|
|
368
|
+
# Upload file to S3
|
|
369
|
+
def upload_file(local_file, s3_path, dry_run: false)
|
|
370
|
+
if dry_run
|
|
371
|
+
puts " [DRY-RUN] Would upload: #{local_file} → s3://#{brand_info.aws.s3_bucket}/#{s3_path}"
|
|
372
|
+
return true
|
|
373
|
+
end
|
|
374
|
+
|
|
375
|
+
File.open(local_file, 'rb') do |file|
|
|
376
|
+
s3_client.put_object(
|
|
377
|
+
bucket: brand_info.aws.s3_bucket,
|
|
378
|
+
key: s3_path,
|
|
379
|
+
body: file
|
|
380
|
+
)
|
|
381
|
+
end
|
|
382
|
+
|
|
383
|
+
puts " ✓ Uploaded: #{File.basename(local_file)} (#{file_size_human(File.size(local_file))})"
|
|
384
|
+
true
|
|
385
|
+
rescue Aws::S3::Errors::ServiceError => e
|
|
386
|
+
puts " ✗ Failed: #{File.basename(local_file)}"
|
|
387
|
+
puts " Error: #{e.message}"
|
|
388
|
+
false
|
|
389
|
+
end
|
|
390
|
+
|
|
391
|
+
# Download file from S3
|
|
392
|
+
def download_file(s3_key, local_file, dry_run: false)
|
|
393
|
+
if dry_run
|
|
394
|
+
puts " [DRY-RUN] Would download: s3://#{brand_info.aws.s3_bucket}/#{s3_key} → #{local_file}"
|
|
395
|
+
return true
|
|
396
|
+
end
|
|
397
|
+
|
|
398
|
+
FileUtils.mkdir_p(File.dirname(local_file))
|
|
399
|
+
|
|
400
|
+
s3_client.get_object(
|
|
401
|
+
bucket: brand_info.aws.s3_bucket,
|
|
402
|
+
key: s3_key,
|
|
403
|
+
response_target: local_file
|
|
404
|
+
)
|
|
405
|
+
|
|
406
|
+
puts " ✓ Downloaded: #{File.basename(local_file)} (#{file_size_human(File.size(local_file))})"
|
|
407
|
+
true
|
|
408
|
+
rescue Aws::S3::Errors::ServiceError => e
|
|
409
|
+
puts " ✗ Failed: #{File.basename(local_file)}"
|
|
410
|
+
puts " Error: #{e.message}"
|
|
411
|
+
false
|
|
412
|
+
end
|
|
413
|
+
|
|
414
|
+
# Delete file from S3
|
|
415
|
+
def delete_s3_file(s3_key, dry_run: false)
|
|
416
|
+
if dry_run
|
|
417
|
+
puts " [DRY-RUN] Would delete: s3://#{brand_info.aws.s3_bucket}/#{s3_key}"
|
|
418
|
+
return true
|
|
419
|
+
end
|
|
420
|
+
|
|
421
|
+
s3_client.delete_object(
|
|
422
|
+
bucket: brand_info.aws.s3_bucket,
|
|
423
|
+
key: s3_key
|
|
424
|
+
)
|
|
425
|
+
|
|
426
|
+
true
|
|
427
|
+
rescue Aws::S3::Errors::ServiceError => e
|
|
428
|
+
puts " Error: #{e.message}"
|
|
429
|
+
false
|
|
430
|
+
end
|
|
431
|
+
|
|
432
|
+
# Delete local file
|
|
433
|
+
def delete_local_file(file_path, dry_run: false)
|
|
434
|
+
if dry_run
|
|
435
|
+
puts " [DRY-RUN] Would delete: #{file_path}"
|
|
436
|
+
return true
|
|
437
|
+
end
|
|
438
|
+
|
|
439
|
+
File.delete(file_path)
|
|
440
|
+
true
|
|
441
|
+
rescue StandardError => e
|
|
442
|
+
puts " Error: #{e.message}"
|
|
443
|
+
false
|
|
444
|
+
end
|
|
445
|
+
|
|
446
|
+
# List files in S3 for a project
|
|
447
|
+
def list_s3_files
|
|
448
|
+
prefix = build_s3_key('')
|
|
449
|
+
|
|
450
|
+
response = s3_client.list_objects_v2(
|
|
451
|
+
bucket: brand_info.aws.s3_bucket,
|
|
452
|
+
prefix: prefix
|
|
453
|
+
)
|
|
454
|
+
|
|
455
|
+
return [] unless response.contents
|
|
456
|
+
|
|
457
|
+
response.contents.map do |obj|
|
|
458
|
+
{
|
|
459
|
+
'Key' => obj.key,
|
|
460
|
+
'Size' => obj.size,
|
|
461
|
+
'ETag' => obj.etag
|
|
462
|
+
}
|
|
463
|
+
end
|
|
464
|
+
rescue Aws::S3::Errors::ServiceError
|
|
465
|
+
[]
|
|
466
|
+
end
|
|
467
|
+
|
|
468
|
+
# List local files in staging directory
|
|
469
|
+
def list_local_files(staging_dir)
|
|
470
|
+
return {} unless Dir.exist?(staging_dir)
|
|
471
|
+
|
|
472
|
+
files = Dir.glob("#{staging_dir}/**/*").select { |f| File.file?(f) }
|
|
473
|
+
|
|
474
|
+
files.each_with_object({}) do |file, hash|
|
|
475
|
+
relative_path = file.sub("#{staging_dir}/", '')
|
|
476
|
+
hash[relative_path] = file
|
|
477
|
+
end
|
|
478
|
+
end
|
|
479
|
+
|
|
480
|
+
# Human-readable file size
|
|
481
|
+
def file_size_human(bytes)
|
|
482
|
+
if bytes < 1024
|
|
483
|
+
"#{bytes} B"
|
|
484
|
+
elsif bytes < 1024 * 1024
|
|
485
|
+
"#{(bytes / 1024.0).round(1)} KB"
|
|
486
|
+
elsif bytes < 1024 * 1024 * 1024
|
|
487
|
+
"#{(bytes / (1024.0 * 1024)).round(1)} MB"
|
|
488
|
+
else
|
|
489
|
+
"#{(bytes / (1024.0 * 1024 * 1024)).round(2)} GB"
|
|
490
|
+
end
|
|
491
|
+
end
|
|
492
|
+
|
|
493
|
+
# Copy project to SSD
|
|
494
|
+
def copy_to_ssd(source_dir, dest_dir, dry_run: false)
|
|
495
|
+
if Dir.exist?(dest_dir)
|
|
496
|
+
puts '⚠️ Already exists on SSD'
|
|
497
|
+
puts " Path: #{dest_dir}"
|
|
498
|
+
puts ' Skipping copy step'
|
|
499
|
+
return true
|
|
500
|
+
end
|
|
501
|
+
|
|
502
|
+
size = calculate_directory_size(source_dir)
|
|
503
|
+
puts '📋 Copy to SSD:'
|
|
504
|
+
puts " Source: #{source_dir}"
|
|
505
|
+
puts " Dest: #{dest_dir}"
|
|
506
|
+
puts " Size: #{file_size_human(size)}"
|
|
507
|
+
puts ''
|
|
508
|
+
|
|
509
|
+
if dry_run
|
|
510
|
+
puts ' [DRY-RUN] Would copy entire project to SSD'
|
|
511
|
+
return true
|
|
512
|
+
end
|
|
513
|
+
|
|
514
|
+
FileUtils.mkdir_p(File.dirname(dest_dir))
|
|
515
|
+
FileUtils.cp_r(source_dir, dest_dir, preserve: true)
|
|
516
|
+
puts ' ✅ Copied to SSD'
|
|
517
|
+
|
|
518
|
+
true
|
|
519
|
+
rescue StandardError => e
|
|
520
|
+
puts " ✗ Failed to copy: #{e.message}"
|
|
521
|
+
false
|
|
522
|
+
end
|
|
523
|
+
|
|
524
|
+
# Delete local project directory
|
|
525
|
+
def delete_local_project(project_dir, dry_run: false)
|
|
526
|
+
size = calculate_directory_size(project_dir)
|
|
527
|
+
|
|
528
|
+
puts ''
|
|
529
|
+
puts '🗑️ Delete local project:'
|
|
530
|
+
puts " Path: #{project_dir}"
|
|
531
|
+
puts " Size: #{file_size_human(size)}"
|
|
532
|
+
puts ''
|
|
533
|
+
|
|
534
|
+
if dry_run
|
|
535
|
+
puts ' [DRY-RUN] Would delete entire local folder'
|
|
536
|
+
return true
|
|
537
|
+
end
|
|
538
|
+
|
|
539
|
+
FileUtils.rm_rf(project_dir)
|
|
540
|
+
puts ' ✅ Deleted local folder'
|
|
541
|
+
puts " 💾 Freed: #{file_size_human(size)}"
|
|
542
|
+
|
|
543
|
+
true
|
|
544
|
+
rescue StandardError => e
|
|
545
|
+
puts " ✗ Failed to delete: #{e.message}"
|
|
546
|
+
false
|
|
547
|
+
end
|
|
548
|
+
|
|
549
|
+
# Calculate total size of a directory
|
|
550
|
+
def calculate_directory_size(dir_path)
|
|
551
|
+
total = 0
|
|
552
|
+
Dir.glob(File.join(dir_path, '**', '*'), File::FNM_DOTMATCH).each do |file|
|
|
553
|
+
total += File.size(file) if File.file?(file)
|
|
554
|
+
end
|
|
555
|
+
total
|
|
556
|
+
end
|
|
557
|
+
end
|
|
558
|
+
end
|
|
559
|
+
end
|
|
560
|
+
end
|
data/lib/appydave/tools.rb
CHANGED
|
@@ -42,6 +42,7 @@ require 'appydave/tools/configuration/configurable'
|
|
|
42
42
|
require 'appydave/tools/configuration/config'
|
|
43
43
|
require 'appydave/tools/configuration/models/config_base'
|
|
44
44
|
require 'appydave/tools/configuration/models/settings_config'
|
|
45
|
+
require 'appydave/tools/configuration/models/brands_config'
|
|
45
46
|
require 'appydave/tools/configuration/models/channels_config'
|
|
46
47
|
require 'appydave/tools/configuration/models/youtube_automation_config'
|
|
47
48
|
require 'appydave/tools/name_manager/project_name'
|
|
@@ -51,6 +52,13 @@ require 'appydave/tools/prompt_tools/prompt_completion'
|
|
|
51
52
|
require 'appydave/tools/subtitle_processor/clean'
|
|
52
53
|
require 'appydave/tools/subtitle_processor/join'
|
|
53
54
|
|
|
55
|
+
require 'appydave/tools/vat/config'
|
|
56
|
+
require 'appydave/tools/vat/project_resolver'
|
|
57
|
+
require 'appydave/tools/vat/config_loader'
|
|
58
|
+
require 'appydave/tools/vat/s3_operations'
|
|
59
|
+
require 'appydave/tools/vat/project_listing'
|
|
60
|
+
require 'appydave/tools/vat/manifest_generator'
|
|
61
|
+
|
|
54
62
|
require 'appydave/tools/youtube_automation/gpt_agent'
|
|
55
63
|
|
|
56
64
|
require 'appydave/tools/youtube_manager/models/youtube_details'
|
|
@@ -65,7 +73,7 @@ require 'appydave/tools/youtube_manager/reports/video_content_report'
|
|
|
65
73
|
Appydave::Tools::Configuration::Config.set_default do |config|
|
|
66
74
|
config.config_path = File.expand_path('~/.config/appydave')
|
|
67
75
|
config.register(:settings, Appydave::Tools::Configuration::Models::SettingsConfig)
|
|
68
|
-
|
|
76
|
+
config.register(:brands, Appydave::Tools::Configuration::Models::BrandsConfig)
|
|
69
77
|
config.register(:channels, Appydave::Tools::Configuration::Models::ChannelsConfig)
|
|
70
78
|
config.register(:youtube_automation, Appydave::Tools::Configuration::Models::YoutubeAutomationConfig)
|
|
71
79
|
end
|