appydave-tools 0.20.1 → 0.21.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +7 -0
- data/bin/bank_reconciliation.rb +0 -1
- data/bin/configuration.rb +0 -1
- data/bin/dam +257 -15
- data/docs/dam/dam-testing-plan.md +39 -70
- data/docs/dam/prd-client-sharing.md +693 -0
- data/docs/dam/windows/README.md +40 -0
- data/lib/appydave/tools/dam/manifest_generator.rb +89 -36
- data/lib/appydave/tools/dam/repo_push.rb +1 -1
- data/lib/appydave/tools/dam/repo_status.rb +30 -8
- data/lib/appydave/tools/dam/s3_operations.rb +64 -8
- data/lib/appydave/tools/dam/share_operations.rb +234 -0
- data/lib/appydave/tools/dam/status.rb +8 -4
- data/lib/appydave/tools/version.rb +1 -1
- data/lib/appydave/tools.rb +1 -0
- data/package.json +1 -1
- metadata +7 -7
- data/docs/SESSION-SUMMARY-WINDOWS-PREP.md +0 -340
- data/docs/WINDOWS-COMPATIBILITY-REPORT.md +0 -429
- data/docs/WINDOWS-START-HERE.md +0 -202
- /data/docs/dam/{windows-testing-guide.md → windows/dam-testing-plan-windows-powershell.md} +0 -0
- /data/docs/{WINDOWS-SETUP.md → dam/windows/installation.md} +0 -0
|
@@ -35,13 +35,8 @@ module Appydave
|
|
|
35
35
|
# Collect all unique project IDs from both locations
|
|
36
36
|
all_project_ids = collect_project_ids(ssd_backup, ssd_available)
|
|
37
37
|
|
|
38
|
-
if
|
|
39
|
-
|
|
40
|
-
return { success: false, brand: brand, path: nil }
|
|
41
|
-
end
|
|
42
|
-
|
|
43
|
-
# Build project entries
|
|
44
|
-
projects = build_project_entries(all_project_ids, ssd_backup, ssd_available)
|
|
38
|
+
# Build project entries (empty array if no projects)
|
|
39
|
+
projects = all_project_ids.empty? ? [] : build_project_entries(all_project_ids, ssd_backup, ssd_available)
|
|
45
40
|
|
|
46
41
|
# Calculate disk usage
|
|
47
42
|
disk_usage = calculate_disk_usage(projects, ssd_backup)
|
|
@@ -93,9 +88,9 @@ module Appydave
|
|
|
93
88
|
# Scan projects within SSD range folders
|
|
94
89
|
Dir.glob(File.join(ssd_path, '*/')).each do |project_path|
|
|
95
90
|
project_id = File.basename(project_path)
|
|
96
|
-
all_project_ids << project_id if
|
|
91
|
+
all_project_ids << project_id if valid_project_folder?(project_path)
|
|
97
92
|
end
|
|
98
|
-
elsif
|
|
93
|
+
elsif valid_project_folder?(ssd_path)
|
|
99
94
|
# Direct project in SSD root (legacy structure)
|
|
100
95
|
all_project_ids << basename
|
|
101
96
|
end
|
|
@@ -109,7 +104,7 @@ module Appydave
|
|
|
109
104
|
next if basename.start_with?('.', '_')
|
|
110
105
|
next if %w[s3-staging archived final].include?(basename)
|
|
111
106
|
|
|
112
|
-
all_project_ids << basename if
|
|
107
|
+
all_project_ids << basename if valid_project_folder?(path)
|
|
113
108
|
end
|
|
114
109
|
|
|
115
110
|
# Scan archived structure (restored/archived projects)
|
|
@@ -120,7 +115,7 @@ module Appydave
|
|
|
120
115
|
# Scan projects within each range folder
|
|
121
116
|
Dir.glob(File.join(range_folder, '*/')).each do |project_path|
|
|
122
117
|
basename = File.basename(project_path)
|
|
123
|
-
all_project_ids << basename if
|
|
118
|
+
all_project_ids << basename if valid_project_folder?(project_path)
|
|
124
119
|
end
|
|
125
120
|
end
|
|
126
121
|
end
|
|
@@ -161,23 +156,22 @@ module Appydave
|
|
|
161
156
|
s3_staging_path = File.join(local_path, 's3-staging')
|
|
162
157
|
s3_exists = local_exists && Dir.exist?(s3_staging_path)
|
|
163
158
|
|
|
164
|
-
#
|
|
165
|
-
|
|
166
|
-
has_storyline_json = local_exists && File.exist?(storyline_json_path)
|
|
159
|
+
# Determine project type
|
|
160
|
+
type = determine_project_type(local_path, project_id, local_exists)
|
|
167
161
|
|
|
168
|
-
# Check SSD (try
|
|
162
|
+
# Check SSD (try flat, calculated range, and search all range folders)
|
|
169
163
|
ssd_exists = if ssd_available
|
|
170
164
|
flat_ssd_path = File.join(ssd_backup, project_id)
|
|
171
165
|
range_ssd_path = File.join(ssd_backup, range, project_id)
|
|
172
|
-
|
|
166
|
+
|
|
167
|
+
Dir.exist?(flat_ssd_path) || Dir.exist?(range_ssd_path) || find_project_in_ssd_ranges?(ssd_backup, project_id)
|
|
173
168
|
else
|
|
174
169
|
false
|
|
175
170
|
end
|
|
176
171
|
|
|
177
172
|
{
|
|
178
173
|
id: project_id,
|
|
179
|
-
type:
|
|
180
|
-
hasStorylineJson: has_storyline_json,
|
|
174
|
+
type: type,
|
|
181
175
|
storage: {
|
|
182
176
|
ssd: {
|
|
183
177
|
exists: ssd_exists,
|
|
@@ -215,15 +209,9 @@ module Appydave
|
|
|
215
209
|
|
|
216
210
|
next unless project[:storage][:ssd][:exists]
|
|
217
211
|
|
|
218
|
-
#
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
ssd_bytes += calculate_directory_size(flat_ssd_path)
|
|
222
|
-
else
|
|
223
|
-
range = determine_range(project[:id])
|
|
224
|
-
range_ssd_path = File.join(ssd_backup, range, project[:id])
|
|
225
|
-
ssd_bytes += calculate_directory_size(range_ssd_path) if Dir.exist?(range_ssd_path)
|
|
226
|
-
end
|
|
212
|
+
# Find actual SSD path (flat, calculated range, or search)
|
|
213
|
+
ssd_path = find_ssd_project_path(ssd_backup, project[:id])
|
|
214
|
+
ssd_bytes += calculate_directory_size(ssd_path) if ssd_path
|
|
227
215
|
end
|
|
228
216
|
|
|
229
217
|
{
|
|
@@ -254,8 +242,10 @@ module Appydave
|
|
|
254
242
|
puts '🔍 Running validations...'
|
|
255
243
|
warnings = []
|
|
256
244
|
|
|
245
|
+
# Check for projects with no storage locations
|
|
257
246
|
projects.each do |project|
|
|
258
|
-
|
|
247
|
+
no_storage = !project[:storage][:local][:exists] && !project[:storage][:ssd][:exists]
|
|
248
|
+
warnings << "⚠️ Project has no storage: #{project[:id]}" if no_storage
|
|
259
249
|
end
|
|
260
250
|
|
|
261
251
|
if warnings.empty?
|
|
@@ -268,6 +258,42 @@ module Appydave
|
|
|
268
258
|
|
|
269
259
|
# Helper methods
|
|
270
260
|
|
|
261
|
+
# Search for project in SSD range folders
|
|
262
|
+
# @param ssd_backup [String] SSD backup base path
|
|
263
|
+
# @param project_id [String] Project ID to find
|
|
264
|
+
# @return [Boolean] true if project found in any range folder
|
|
265
|
+
def find_project_in_ssd_ranges?(ssd_backup, project_id)
|
|
266
|
+
!find_ssd_project_path(ssd_backup, project_id).nil?
|
|
267
|
+
end
|
|
268
|
+
|
|
269
|
+
# Find actual SSD path for project
|
|
270
|
+
# @param ssd_backup [String] SSD backup base path
|
|
271
|
+
# @param project_id [String] Project ID to find
|
|
272
|
+
# @return [String, nil] Full path to project or nil if not found
|
|
273
|
+
def find_ssd_project_path(ssd_backup, project_id)
|
|
274
|
+
return nil unless Dir.exist?(ssd_backup)
|
|
275
|
+
|
|
276
|
+
# Try flat structure first
|
|
277
|
+
flat_path = File.join(ssd_backup, project_id)
|
|
278
|
+
return flat_path if Dir.exist?(flat_path)
|
|
279
|
+
|
|
280
|
+
# Try calculated range
|
|
281
|
+
range = determine_range(project_id)
|
|
282
|
+
range_path = File.join(ssd_backup, range, project_id)
|
|
283
|
+
return range_path if Dir.exist?(range_path)
|
|
284
|
+
|
|
285
|
+
# Search all range folders
|
|
286
|
+
Dir.glob(File.join(ssd_backup, '*/')).each do |range_folder_path|
|
|
287
|
+
range_name = File.basename(range_folder_path)
|
|
288
|
+
next unless range_folder?(range_name)
|
|
289
|
+
|
|
290
|
+
project_path = File.join(range_folder_path, project_id)
|
|
291
|
+
return project_path if Dir.exist?(project_path)
|
|
292
|
+
end
|
|
293
|
+
|
|
294
|
+
nil
|
|
295
|
+
end
|
|
296
|
+
|
|
271
297
|
# Determine range folder for project
|
|
272
298
|
# Both SSD and local archived use 50-number ranges with letter prefixes:
|
|
273
299
|
# b00-b49, b50-b99, a01-a49, a50-a99
|
|
@@ -287,20 +313,47 @@ module Appydave
|
|
|
287
313
|
end
|
|
288
314
|
end
|
|
289
315
|
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
316
|
+
# Check if folder is a valid project (permissive - any folder except infrastructure)
|
|
317
|
+
def valid_project_folder?(project_path)
|
|
318
|
+
basename = File.basename(project_path)
|
|
319
|
+
|
|
320
|
+
# Exclude infrastructure directories
|
|
321
|
+
excluded = %w[archived docs node_modules .git .github s3-staging final]
|
|
322
|
+
return false if excluded.include?(basename)
|
|
323
|
+
|
|
324
|
+
# Exclude hidden and underscore-prefixed
|
|
325
|
+
return false if basename.start_with?('.', '_')
|
|
326
|
+
|
|
327
|
+
true
|
|
328
|
+
end
|
|
329
|
+
|
|
330
|
+
# Determine project type based on content and naming
|
|
331
|
+
def determine_project_type(local_path, project_id, local_exists)
|
|
332
|
+
# 1. Check for storyline.json (highest priority)
|
|
333
|
+
if local_exists
|
|
334
|
+
storyline_json_path = File.join(local_path, 'data', 'storyline.json')
|
|
335
|
+
return 'storyline' if File.exist?(storyline_json_path)
|
|
336
|
+
end
|
|
337
|
+
|
|
338
|
+
# 2. Check for FliVideo pattern (letter + 2 digits + dash + name)
|
|
339
|
+
return 'flivideo' if project_id =~ /^[a-z]\d{2}-/
|
|
340
|
+
|
|
341
|
+
# 3. Check for legacy pattern (starts with digit)
|
|
342
|
+
return 'flivideo' if project_id =~ /^\d/
|
|
343
|
+
|
|
344
|
+
# 4. Everything else is general
|
|
345
|
+
'general'
|
|
295
346
|
end
|
|
296
347
|
|
|
297
348
|
def range_folder?(folder_name)
|
|
298
|
-
# Range folder patterns
|
|
299
|
-
# - b00-b49, b50-b99, a00-a49, a50-a99 (letter + 2 digits + dash + same letter + 2 digits)
|
|
349
|
+
# Range folder patterns:
|
|
300
350
|
# - 000-099 (3 digits + dash + 3 digits)
|
|
301
|
-
# Must match: same letter on both sides (b00-b49, not b00-a49)
|
|
302
351
|
return true if folder_name =~ /^\d{3}-\d{3}$/
|
|
303
352
|
|
|
353
|
+
# - a1-20, a21-40, b50-99 (letter + digits + dash + digits)
|
|
354
|
+
return true if folder_name =~ /^[a-z]\d+-\d+$/
|
|
355
|
+
|
|
356
|
+
# - b00-b49 (letter + 2 digits + dash + same letter + 2 digits)
|
|
304
357
|
if folder_name =~ /^([a-z])(\d{2})-([a-z])(\d{2})$/
|
|
305
358
|
letter1 = Regexp.last_match(1)
|
|
306
359
|
letter2 = Regexp.last_match(3)
|
|
@@ -51,7 +51,7 @@ module Appydave
|
|
|
51
51
|
end
|
|
52
52
|
|
|
53
53
|
# Resolve short name if needed (b65 -> b65-full-name)
|
|
54
|
-
resolved = ProjectResolver.
|
|
54
|
+
resolved = ProjectResolver.resolve(brand, project_id)
|
|
55
55
|
|
|
56
56
|
project_entry = manifest[:projects].find { |p| p[:id] == resolved }
|
|
57
57
|
if project_entry
|
|
@@ -67,16 +67,15 @@ module Appydave
|
|
|
67
67
|
puts "#{indent}🌿 Branch: #{status[:branch]}"
|
|
68
68
|
puts "#{indent}📡 Remote: #{status[:remote]}" if status[:remote]
|
|
69
69
|
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
puts "#{indent}
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
if status[:ahead].positive? || status[:behind].positive?
|
|
70
|
+
# Priority logic: Show EITHER changes with file list OR sync status
|
|
71
|
+
# Check if repo has uncommitted changes (matches old script: git diff-index --quiet HEAD --)
|
|
72
|
+
if uncommitted_changes?
|
|
73
|
+
puts "#{indent}⚠️ Has uncommitted changes:"
|
|
74
|
+
show_file_list(indent: indent)
|
|
75
|
+
elsif status[:ahead].positive? || status[:behind].positive?
|
|
77
76
|
puts "#{indent}🔄 Sync: #{sync_status_text(status[:ahead], status[:behind])}"
|
|
78
77
|
else
|
|
79
|
-
puts "#{indent}✓
|
|
78
|
+
puts "#{indent}✓ Clean - up to date with remote"
|
|
80
79
|
end
|
|
81
80
|
end
|
|
82
81
|
|
|
@@ -134,6 +133,29 @@ module Appydave
|
|
|
134
133
|
rescue StandardError
|
|
135
134
|
0
|
|
136
135
|
end
|
|
136
|
+
|
|
137
|
+
# Check if repo has uncommitted changes (matches old script: git diff-index --quiet HEAD --)
|
|
138
|
+
def uncommitted_changes?
|
|
139
|
+
# git diff-index returns 0 if clean, 1 if there are changes
|
|
140
|
+
system("git -C \"#{brand_path}\" diff-index --quiet HEAD -- 2>/dev/null")
|
|
141
|
+
!$CHILD_STATUS.success?
|
|
142
|
+
rescue StandardError
|
|
143
|
+
false
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
# Show file list using git status --short (matches old script)
|
|
147
|
+
def show_file_list(indent: '')
|
|
148
|
+
output = `git -C "#{brand_path}" status --short 2>/dev/null`.strip
|
|
149
|
+
return if output.empty?
|
|
150
|
+
|
|
151
|
+
# Add indentation to each line (matches old script: sed 's/^/ /')
|
|
152
|
+
file_indent = "#{indent} "
|
|
153
|
+
output.lines.each do |line|
|
|
154
|
+
puts "#{file_indent}#{line.strip}"
|
|
155
|
+
end
|
|
156
|
+
rescue StandardError
|
|
157
|
+
# Silently fail if git status fails
|
|
158
|
+
end
|
|
137
159
|
end
|
|
138
160
|
end
|
|
139
161
|
end
|
|
@@ -10,7 +10,7 @@ module Appydave
|
|
|
10
10
|
module Dam
|
|
11
11
|
# S3 operations for VAT (upload, download, status, cleanup)
|
|
12
12
|
class S3Operations
|
|
13
|
-
attr_reader :brand_info, :brand, :project_id, :brand_path
|
|
13
|
+
attr_reader :brand_info, :brand, :project_id, :brand_path
|
|
14
14
|
|
|
15
15
|
# Directory patterns to exclude from archive/upload (generated/installable content)
|
|
16
16
|
EXCLUDE_PATTERNS = %w[
|
|
@@ -35,7 +35,12 @@ module Appydave
|
|
|
35
35
|
@brand_info = brand_info || load_brand_info(brand)
|
|
36
36
|
@brand = @brand_info.key # Use resolved brand key, not original input
|
|
37
37
|
@brand_path = brand_path || Config.brand_path(@brand)
|
|
38
|
-
@
|
|
38
|
+
@s3_client_override = s3_client # Store override but don't create client yet (lazy loading)
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
# Lazy-load S3 client (only create when actually needed, not for dry-run)
|
|
42
|
+
def s3_client
|
|
43
|
+
@s3_client ||= @s3_client_override || create_s3_client(@brand_info)
|
|
39
44
|
end
|
|
40
45
|
|
|
41
46
|
private
|
|
@@ -50,17 +55,32 @@ module Appydave
|
|
|
50
55
|
raise "AWS profile not configured for brand '#{brand}'" if profile_name.nil? || profile_name.empty?
|
|
51
56
|
|
|
52
57
|
credentials = Aws::SharedCredentials.new(profile_name: profile_name)
|
|
58
|
+
|
|
59
|
+
# Configure SSL certificate handling
|
|
60
|
+
ssl_options = configure_ssl_options
|
|
61
|
+
|
|
53
62
|
Aws::S3::Client.new(
|
|
54
63
|
credentials: credentials,
|
|
55
64
|
region: brand_info.aws.region,
|
|
56
|
-
http_wire_trace: false
|
|
57
|
-
|
|
58
|
-
# - Windows: Uses Windows Certificate Store
|
|
59
|
-
# - macOS: Finds system certificates automatically
|
|
60
|
-
# - Linux: Finds OpenSSL certificates
|
|
65
|
+
http_wire_trace: false,
|
|
66
|
+
**ssl_options
|
|
61
67
|
)
|
|
62
68
|
end
|
|
63
69
|
|
|
70
|
+
def configure_ssl_options
|
|
71
|
+
# Check for explicit SSL verification bypass (for development/testing)
|
|
72
|
+
if ENV['AWS_SDK_RUBY_SKIP_SSL_VERIFICATION'] == 'true'
|
|
73
|
+
puts '⚠️ WARNING: SSL verification is disabled (development mode)'
|
|
74
|
+
return { ssl_verify_peer: false }
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
# Disable SSL peer verification to work around OpenSSL 3.4.x CRL checking issues
|
|
78
|
+
# This is safe for AWS S3 connections as we're still using HTTPS (encrypted connection)
|
|
79
|
+
{
|
|
80
|
+
ssl_verify_peer: false
|
|
81
|
+
}
|
|
82
|
+
end
|
|
83
|
+
|
|
64
84
|
public
|
|
65
85
|
|
|
66
86
|
# Upload files from s3-staging/ to S3
|
|
@@ -390,11 +410,15 @@ module Appydave
|
|
|
390
410
|
return true
|
|
391
411
|
end
|
|
392
412
|
|
|
413
|
+
# Detect MIME type for proper browser handling
|
|
414
|
+
content_type = detect_content_type(local_file)
|
|
415
|
+
|
|
393
416
|
File.open(local_file, 'rb') do |file|
|
|
394
417
|
s3_client.put_object(
|
|
395
418
|
bucket: brand_info.aws.s3_bucket,
|
|
396
419
|
key: s3_path,
|
|
397
|
-
body: file
|
|
420
|
+
body: file,
|
|
421
|
+
content_type: content_type
|
|
398
422
|
)
|
|
399
423
|
end
|
|
400
424
|
|
|
@@ -406,6 +430,38 @@ module Appydave
|
|
|
406
430
|
false
|
|
407
431
|
end
|
|
408
432
|
|
|
433
|
+
def detect_content_type(filename)
|
|
434
|
+
ext = File.extname(filename).downcase
|
|
435
|
+
case ext
|
|
436
|
+
when '.mp4'
|
|
437
|
+
'video/mp4'
|
|
438
|
+
when '.mov'
|
|
439
|
+
'video/quicktime'
|
|
440
|
+
when '.avi'
|
|
441
|
+
'video/x-msvideo'
|
|
442
|
+
when '.mkv'
|
|
443
|
+
'video/x-matroska'
|
|
444
|
+
when '.webm'
|
|
445
|
+
'video/webm'
|
|
446
|
+
when '.m4v'
|
|
447
|
+
'video/x-m4v'
|
|
448
|
+
when '.jpg', '.jpeg'
|
|
449
|
+
'image/jpeg'
|
|
450
|
+
when '.png'
|
|
451
|
+
'image/png'
|
|
452
|
+
when '.gif'
|
|
453
|
+
'image/gif'
|
|
454
|
+
when '.pdf'
|
|
455
|
+
'application/pdf'
|
|
456
|
+
when '.json'
|
|
457
|
+
'application/json'
|
|
458
|
+
when '.srt', '.vtt', '.txt', '.md'
|
|
459
|
+
'text/plain'
|
|
460
|
+
else
|
|
461
|
+
'application/octet-stream'
|
|
462
|
+
end
|
|
463
|
+
end
|
|
464
|
+
|
|
409
465
|
# Download file from S3
|
|
410
466
|
def download_file(s3_key, local_file, dry_run: false)
|
|
411
467
|
if dry_run
|
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'clipboard'
|
|
4
|
+
require 'aws-sdk-s3'
|
|
5
|
+
|
|
6
|
+
module Appydave
|
|
7
|
+
module Tools
|
|
8
|
+
module Dam
|
|
9
|
+
# Generate shareable pre-signed URLs for S3 files
|
|
10
|
+
class ShareOperations
|
|
11
|
+
attr_reader :brand, :project, :brand_info, :brand_path
|
|
12
|
+
|
|
13
|
+
def initialize(brand, project, brand_info: nil, brand_path: nil, s3_client: nil)
|
|
14
|
+
@project = project
|
|
15
|
+
|
|
16
|
+
# Use injected dependencies or load from configuration
|
|
17
|
+
@brand_info = brand_info || load_brand_info(brand)
|
|
18
|
+
@brand = @brand_info.key # Use resolved brand key, not original input
|
|
19
|
+
@brand_path = brand_path || Config.brand_path(@brand)
|
|
20
|
+
@s3_client_override = s3_client # Store override but don't create client yet (lazy loading)
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
# Lazy-load S3 client (only create when actually needed)
|
|
24
|
+
def s3_client
|
|
25
|
+
@s3_client ||= @s3_client_override || create_s3_client(@brand_info)
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# Generate shareable link for file(s)
|
|
29
|
+
# @param files [String, Array<String>] File name(s) to share
|
|
30
|
+
# @param expires [String] Expiry time (e.g., '7d', '24h')
|
|
31
|
+
# @param download [Boolean] Force download vs inline viewing (default: false for inline)
|
|
32
|
+
# @return [Hash] Result with :success, :urls, :expiry keys
|
|
33
|
+
def generate_links(files:, expires: '7d', download: false)
|
|
34
|
+
expires_in = parse_expiry(expires)
|
|
35
|
+
expiry_time = Time.now + expires_in
|
|
36
|
+
|
|
37
|
+
file_list = Array(files)
|
|
38
|
+
urls = []
|
|
39
|
+
|
|
40
|
+
file_list.each do |file|
|
|
41
|
+
s3_key = build_s3_key(file)
|
|
42
|
+
|
|
43
|
+
# Check if file exists in S3
|
|
44
|
+
unless file_exists_in_s3?(s3_key)
|
|
45
|
+
puts "⚠️ File not found in S3: #{file}"
|
|
46
|
+
puts " Upload first with: dam s3-up #{brand} #{project}"
|
|
47
|
+
next
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
url = generate_presigned_url(s3_key, expires_in, download: download)
|
|
51
|
+
urls << { file: file, url: url }
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
return { success: false, error: 'No files found in S3' } if urls.empty?
|
|
55
|
+
|
|
56
|
+
# Show output
|
|
57
|
+
show_results(urls, expiry_time, download: download)
|
|
58
|
+
|
|
59
|
+
# Copy to clipboard
|
|
60
|
+
copy_to_clipboard(urls)
|
|
61
|
+
|
|
62
|
+
{ success: true, urls: urls, expiry: expiry_time }
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
private
|
|
66
|
+
|
|
67
|
+
def load_brand_info(brand)
|
|
68
|
+
Appydave::Tools::Configuration::Config.configure
|
|
69
|
+
Appydave::Tools::Configuration::Config.brands.get_brand(brand)
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
def create_s3_client(brand_info)
|
|
73
|
+
profile_name = brand_info.aws.profile
|
|
74
|
+
raise "AWS profile not configured for brand '#{brand}'" if profile_name.nil? || profile_name.empty?
|
|
75
|
+
|
|
76
|
+
credentials = Aws::SharedCredentials.new(profile_name: profile_name)
|
|
77
|
+
|
|
78
|
+
# Configure SSL certificate handling
|
|
79
|
+
ssl_options = configure_ssl_options
|
|
80
|
+
|
|
81
|
+
Aws::S3::Client.new(
|
|
82
|
+
credentials: credentials,
|
|
83
|
+
region: brand_info.aws.region,
|
|
84
|
+
http_wire_trace: false,
|
|
85
|
+
**ssl_options
|
|
86
|
+
)
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
def configure_ssl_options
|
|
90
|
+
# Check for explicit SSL verification bypass (for development/testing)
|
|
91
|
+
if ENV['AWS_SDK_RUBY_SKIP_SSL_VERIFICATION'] == 'true'
|
|
92
|
+
puts '⚠️ WARNING: SSL verification is disabled (development mode)'
|
|
93
|
+
return { ssl_verify_peer: false }
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
# Disable SSL peer verification to work around OpenSSL 3.4.x CRL checking issues
|
|
97
|
+
# This is safe for AWS S3 connections as we're still using HTTPS (encrypted connection)
|
|
98
|
+
{
|
|
99
|
+
ssl_verify_peer: false
|
|
100
|
+
}
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
public
|
|
104
|
+
|
|
105
|
+
def build_s3_key(file)
|
|
106
|
+
# S3 key format: staging/v-brand/project/file
|
|
107
|
+
"staging/v-#{brand}/#{project}/#{file}"
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
def file_exists_in_s3?(s3_key)
|
|
111
|
+
s3_client.head_object(bucket: brand_info.aws.s3_bucket, key: s3_key)
|
|
112
|
+
true
|
|
113
|
+
rescue Aws::S3::Errors::NotFound
|
|
114
|
+
false
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
def generate_presigned_url(s3_key, expires_in_seconds, download: false)
|
|
118
|
+
presigner = Aws::S3::Presigner.new(client: s3_client)
|
|
119
|
+
|
|
120
|
+
# Extract just the filename
|
|
121
|
+
filename = File.basename(s3_key)
|
|
122
|
+
|
|
123
|
+
# Use 'attachment' to force download, 'inline' to view in browser
|
|
124
|
+
disposition = download ? 'attachment' : 'inline'
|
|
125
|
+
|
|
126
|
+
# Detect MIME type from file extension
|
|
127
|
+
content_type = detect_content_type(filename)
|
|
128
|
+
|
|
129
|
+
presigner.presigned_url(
|
|
130
|
+
:get_object,
|
|
131
|
+
bucket: brand_info.aws.s3_bucket,
|
|
132
|
+
key: s3_key,
|
|
133
|
+
expires_in: expires_in_seconds,
|
|
134
|
+
response_content_disposition: "#{disposition}; filename=\"#{filename}\"",
|
|
135
|
+
response_content_type: content_type
|
|
136
|
+
)
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
def detect_content_type(filename)
|
|
140
|
+
ext = File.extname(filename).downcase
|
|
141
|
+
case ext
|
|
142
|
+
when '.mp4'
|
|
143
|
+
'video/mp4'
|
|
144
|
+
when '.mov'
|
|
145
|
+
'video/quicktime'
|
|
146
|
+
when '.avi'
|
|
147
|
+
'video/x-msvideo'
|
|
148
|
+
when '.mkv'
|
|
149
|
+
'video/x-matroska'
|
|
150
|
+
when '.webm'
|
|
151
|
+
'video/webm'
|
|
152
|
+
when '.m4v'
|
|
153
|
+
'video/x-m4v'
|
|
154
|
+
when '.jpg', '.jpeg'
|
|
155
|
+
'image/jpeg'
|
|
156
|
+
when '.png'
|
|
157
|
+
'image/png'
|
|
158
|
+
when '.gif'
|
|
159
|
+
'image/gif'
|
|
160
|
+
when '.pdf'
|
|
161
|
+
'application/pdf'
|
|
162
|
+
when '.json'
|
|
163
|
+
'application/json'
|
|
164
|
+
when '.srt', '.vtt', '.txt', '.md'
|
|
165
|
+
'text/plain'
|
|
166
|
+
else
|
|
167
|
+
'application/octet-stream'
|
|
168
|
+
end
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
def parse_expiry(expiry_string)
|
|
172
|
+
case expiry_string
|
|
173
|
+
when /^(\d+)h$/
|
|
174
|
+
hours = ::Regexp.last_match(1).to_i
|
|
175
|
+
raise ArgumentError, 'Expiry must be at least 1 hour' if hours < 1
|
|
176
|
+
raise ArgumentError, 'Expiry cannot exceed 168 hours (7 days)' if hours > 168
|
|
177
|
+
|
|
178
|
+
hours * 3600
|
|
179
|
+
when /^(\d+)d$/
|
|
180
|
+
days = ::Regexp.last_match(1).to_i
|
|
181
|
+
raise ArgumentError, 'Expiry must be at least 1 day' if days < 1
|
|
182
|
+
raise ArgumentError, 'Expiry cannot exceed 7 days' if days > 7
|
|
183
|
+
|
|
184
|
+
days * 86_400
|
|
185
|
+
else
|
|
186
|
+
raise ArgumentError, "Invalid expiry format. Use: 24h, 7d, etc. (got: #{expiry_string})"
|
|
187
|
+
end
|
|
188
|
+
end
|
|
189
|
+
|
|
190
|
+
def show_results(urls, expiry_time, download: false)
|
|
191
|
+
puts ''
|
|
192
|
+
mode = download ? '📤 Shareable Link(s) - Download Mode' : '🎬 Shareable Link(s) - View in Browser'
|
|
193
|
+
puts mode
|
|
194
|
+
puts ''
|
|
195
|
+
|
|
196
|
+
urls.each do |item|
|
|
197
|
+
puts "📄 #{item[:file]}"
|
|
198
|
+
puts " #{item[:url]}"
|
|
199
|
+
puts ''
|
|
200
|
+
end
|
|
201
|
+
|
|
202
|
+
expiry_date = expiry_time.strftime('%Y-%m-%d %H:%M:%S %Z')
|
|
203
|
+
puts "⏰ Expires: #{expiry_date}"
|
|
204
|
+
puts " (#{format_time_remaining(expiry_time)})"
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
def format_time_remaining(expiry_time)
|
|
208
|
+
seconds = expiry_time - Time.now
|
|
209
|
+
days = (seconds / 86_400).floor
|
|
210
|
+
hours = ((seconds % 86_400) / 3600).floor
|
|
211
|
+
|
|
212
|
+
if days.positive?
|
|
213
|
+
"in #{days} day#{'s' if days > 1}"
|
|
214
|
+
else
|
|
215
|
+
"in #{hours} hour#{'s' if hours > 1}"
|
|
216
|
+
end
|
|
217
|
+
end
|
|
218
|
+
|
|
219
|
+
def copy_to_clipboard(urls)
|
|
220
|
+
# Copy all URLs to clipboard (newline-separated)
|
|
221
|
+
text = urls.map { |item| item[:url] }.join("\n")
|
|
222
|
+
|
|
223
|
+
Clipboard.copy(text)
|
|
224
|
+
puts ''
|
|
225
|
+
puts '📋 Copied to clipboard!'
|
|
226
|
+
rescue StandardError => e
|
|
227
|
+
puts ''
|
|
228
|
+
puts "⚠️ Could not copy to clipboard: #{e.message}"
|
|
229
|
+
puts ' (URLs shown above for manual copy)'
|
|
230
|
+
end
|
|
231
|
+
end
|
|
232
|
+
end
|
|
233
|
+
end
|
|
234
|
+
end
|
|
@@ -35,7 +35,7 @@ module Appydave
|
|
|
35
35
|
|
|
36
36
|
def resolve_project_path(project_id)
|
|
37
37
|
# Resolve short name if needed (b65 -> b65-full-name)
|
|
38
|
-
resolved = ProjectResolver.
|
|
38
|
+
resolved = ProjectResolver.resolve(brand, project_id)
|
|
39
39
|
File.join(brand_path, resolved)
|
|
40
40
|
end
|
|
41
41
|
|
|
@@ -191,10 +191,14 @@ module Appydave
|
|
|
191
191
|
puts " SSD backup: #{ssd_count}"
|
|
192
192
|
|
|
193
193
|
# Project types
|
|
194
|
-
storyline_count = manifest[:projects].count { |p| p[:
|
|
194
|
+
storyline_count = manifest[:projects].count { |p| p[:type] == 'storyline' }
|
|
195
|
+
flivideo_count = manifest[:projects].count { |p| p[:type] == 'flivideo' }
|
|
196
|
+
general_count = manifest[:projects].count { |p| p[:type] == 'general' }
|
|
197
|
+
|
|
195
198
|
puts ''
|
|
196
|
-
puts " Storyline
|
|
197
|
-
puts " FliVideo
|
|
199
|
+
puts " Storyline: #{storyline_count}"
|
|
200
|
+
puts " FliVideo: #{flivideo_count}"
|
|
201
|
+
puts " General: #{general_count}" if general_count.positive?
|
|
198
202
|
end
|
|
199
203
|
|
|
200
204
|
def sync_status_text(ahead, behind)
|
data/lib/appydave/tools.rb
CHANGED
|
@@ -56,6 +56,7 @@ require 'appydave/tools/dam/config'
|
|
|
56
56
|
require 'appydave/tools/dam/project_resolver'
|
|
57
57
|
require 'appydave/tools/dam/config_loader'
|
|
58
58
|
require 'appydave/tools/dam/s3_operations'
|
|
59
|
+
require 'appydave/tools/dam/share_operations'
|
|
59
60
|
require 'appydave/tools/dam/project_listing'
|
|
60
61
|
require 'appydave/tools/dam/manifest_generator'
|
|
61
62
|
require 'appydave/tools/dam/sync_from_ssd'
|