appydave-tools 0.77.2 → 0.77.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +7 -0
- data/lib/appydave/tools/dam/s3_operations.rb +3 -154
- data/lib/appydave/tools/dam/s3_uploader.rb +171 -0
- data/lib/appydave/tools/version.rb +1 -1
- data/lib/appydave/tools.rb +1 -0
- data/package.json +1 -1
- metadata +2 -1
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 6e33a370f45100cf866d707844fdcff92c44f2bb3bc5b5047e5438d2fdeec625
|
|
4
|
+
data.tar.gz: bca8378e46444366ffd120e8b48930b4d24d38ee45e23b214c0774cda2523f2e
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 0ca158d9958463a5786a24380def618e9ad96d92e1a2114816c509b332f138a1f4cea014a738fd3078f03acfe4b5fa6e58212546a3891e222362f08eae290078
|
|
7
|
+
data.tar.gz: 16415e3c6d0889b881720b643058d486876a6f6f2668b46eb738785e9b81326f3b0e39d79f660e4c7e54ad90cc3c3204a6b10bdd570e200ca40e3a8b96341e54
|
data/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,10 @@
|
|
|
1
|
+
## [0.77.2](https://github.com/appydave/appydave-tools/compare/v0.77.1...v0.77.2) (2026-03-20)
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
### Bug Fixes
|
|
5
|
+
|
|
6
|
+
* extract S3Base with shared infrastructure and helpers from S3Operations ([e2c3f66](https://github.com/appydave/appydave-tools/commit/e2c3f66c96599170d987c308dd90e3524e6be4b6))
|
|
7
|
+
|
|
1
8
|
## [0.77.1](https://github.com/appydave/appydave-tools/compare/v0.77.0...v0.77.1) (2026-03-20)
|
|
2
9
|
|
|
3
10
|
|
|
@@ -9,79 +9,7 @@ module Appydave
|
|
|
9
9
|
class S3Operations < S3Base
|
|
10
10
|
# Upload files from s3-staging/ to S3
|
|
11
11
|
def upload(dry_run: false)
|
|
12
|
-
|
|
13
|
-
staging_dir = File.join(project_dir, 's3-staging')
|
|
14
|
-
|
|
15
|
-
unless Dir.exist?(staging_dir)
|
|
16
|
-
puts "❌ No s3-staging directory found: #{staging_dir}"
|
|
17
|
-
puts 'Nothing to upload.'
|
|
18
|
-
return
|
|
19
|
-
end
|
|
20
|
-
|
|
21
|
-
files = Dir.glob("#{staging_dir}/**/*").select { |f| File.file?(f) }
|
|
22
|
-
|
|
23
|
-
if files.empty?
|
|
24
|
-
puts '❌ No files found in s3-staging/'
|
|
25
|
-
return
|
|
26
|
-
end
|
|
27
|
-
|
|
28
|
-
puts "📦 Uploading #{files.size} file(s) from #{project_id}/s3-staging/ to S3..."
|
|
29
|
-
puts ''
|
|
30
|
-
|
|
31
|
-
uploaded = 0
|
|
32
|
-
skipped = 0
|
|
33
|
-
failed = 0
|
|
34
|
-
|
|
35
|
-
# rubocop:disable Metrics/BlockLength
|
|
36
|
-
files.each do |file|
|
|
37
|
-
relative_path = file.sub("#{staging_dir}/", '')
|
|
38
|
-
|
|
39
|
-
# Skip excluded files (e.g., Windows Zone.Identifier, .DS_Store)
|
|
40
|
-
if excluded_path?(relative_path)
|
|
41
|
-
skipped += 1
|
|
42
|
-
next
|
|
43
|
-
end
|
|
44
|
-
|
|
45
|
-
s3_path = build_s3_key(relative_path)
|
|
46
|
-
|
|
47
|
-
# Check if file already exists in S3 and compare
|
|
48
|
-
s3_info = get_s3_file_info(s3_path)
|
|
49
|
-
|
|
50
|
-
if s3_info
|
|
51
|
-
s3_etag = s3_info['ETag'].gsub('"', '')
|
|
52
|
-
s3_size = s3_info['Size']
|
|
53
|
-
match_status = compare_files(local_file: file, s3_etag: s3_etag, s3_size: s3_size)
|
|
54
|
-
|
|
55
|
-
if match_status == :synced
|
|
56
|
-
comparison_method = multipart_etag?(s3_etag) ? 'size match' : 'unchanged'
|
|
57
|
-
puts " ⏭️ Skipped: #{relative_path} (#{comparison_method})"
|
|
58
|
-
skipped += 1
|
|
59
|
-
next
|
|
60
|
-
end
|
|
61
|
-
|
|
62
|
-
# File exists but content differs - warn before overwriting
|
|
63
|
-
puts " ⚠️ Warning: #{relative_path} exists in S3 with different content"
|
|
64
|
-
puts ' (multipart upload detected - comparing by size)' if multipart_etag?(s3_etag)
|
|
65
|
-
|
|
66
|
-
s3_time = s3_info['LastModified']
|
|
67
|
-
local_time = File.mtime(file)
|
|
68
|
-
puts " S3: #{s3_time.strftime('%Y-%m-%d %H:%M')} | Local: #{local_time.strftime('%Y-%m-%d %H:%M')}"
|
|
69
|
-
|
|
70
|
-
puts ' ⚠️ S3 file is NEWER than local - you may be overwriting recent changes!' if s3_time > local_time
|
|
71
|
-
puts ' Uploading will overwrite S3 version...'
|
|
72
|
-
end
|
|
73
|
-
|
|
74
|
-
if upload_file(file, s3_path, dry_run: dry_run)
|
|
75
|
-
uploaded += 1
|
|
76
|
-
else
|
|
77
|
-
failed += 1
|
|
78
|
-
end
|
|
79
|
-
end
|
|
80
|
-
# rubocop:enable Metrics/BlockLength
|
|
81
|
-
|
|
82
|
-
puts ''
|
|
83
|
-
puts '✅ Upload complete!'
|
|
84
|
-
puts " Uploaded: #{uploaded}, Skipped: #{skipped}, Failed: #{failed}"
|
|
12
|
+
S3Uploader.new(brand, project_id, **delegated_opts).upload(dry_run: dry_run)
|
|
85
13
|
end
|
|
86
14
|
|
|
87
15
|
# Download files from S3 to s3-staging/
|
|
@@ -486,87 +414,8 @@ module Appydave
|
|
|
486
414
|
|
|
487
415
|
private
|
|
488
416
|
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
if dry_run
|
|
492
|
-
puts " [DRY-RUN] Would upload: #{local_file} → s3://#{brand_info.aws.s3_bucket}/#{s3_path}"
|
|
493
|
-
return true
|
|
494
|
-
end
|
|
495
|
-
|
|
496
|
-
# Detect MIME type for proper browser handling
|
|
497
|
-
content_type = detect_content_type(local_file)
|
|
498
|
-
|
|
499
|
-
# For large files, use TransferManager for managed uploads (supports multipart)
|
|
500
|
-
file_size = File.size(local_file)
|
|
501
|
-
start_time = Time.now
|
|
502
|
-
|
|
503
|
-
if file_size > 100 * 1024 * 1024 # > 100MB
|
|
504
|
-
puts " 📤 Uploading large file (#{file_size_human(file_size)})..."
|
|
505
|
-
|
|
506
|
-
# Use TransferManager for multipart upload (modern AWS SDK approach)
|
|
507
|
-
transfer_manager = Aws::S3::TransferManager.new(client: s3_client)
|
|
508
|
-
transfer_manager.upload_file(
|
|
509
|
-
local_file,
|
|
510
|
-
bucket: brand_info.aws.s3_bucket,
|
|
511
|
-
key: s3_path,
|
|
512
|
-
content_type: content_type
|
|
513
|
-
)
|
|
514
|
-
else
|
|
515
|
-
# For smaller files, use direct put_object
|
|
516
|
-
File.open(local_file, 'rb') do |file|
|
|
517
|
-
s3_client.put_object(
|
|
518
|
-
bucket: brand_info.aws.s3_bucket,
|
|
519
|
-
key: s3_path,
|
|
520
|
-
body: file,
|
|
521
|
-
content_type: content_type
|
|
522
|
-
)
|
|
523
|
-
end
|
|
524
|
-
end
|
|
525
|
-
|
|
526
|
-
elapsed = Time.now - start_time
|
|
527
|
-
elapsed_str = format_duration(elapsed)
|
|
528
|
-
puts " ✓ Uploaded: #{File.basename(local_file)} (#{file_size_human(file_size)}) in #{elapsed_str}"
|
|
529
|
-
true
|
|
530
|
-
rescue Aws::S3::Errors::ServiceError => e
|
|
531
|
-
puts " ✗ Failed: #{File.basename(local_file)}"
|
|
532
|
-
puts " Error: #{e.message}"
|
|
533
|
-
false
|
|
534
|
-
rescue StandardError => e
|
|
535
|
-
puts " ✗ Failed: #{File.basename(local_file)}"
|
|
536
|
-
puts " Error: #{e.class} - #{e.message}"
|
|
537
|
-
false
|
|
538
|
-
end
|
|
539
|
-
|
|
540
|
-
def detect_content_type(filename)
|
|
541
|
-
ext = File.extname(filename).downcase
|
|
542
|
-
case ext
|
|
543
|
-
when '.mp4'
|
|
544
|
-
'video/mp4'
|
|
545
|
-
when '.mov'
|
|
546
|
-
'video/quicktime'
|
|
547
|
-
when '.avi'
|
|
548
|
-
'video/x-msvideo'
|
|
549
|
-
when '.mkv'
|
|
550
|
-
'video/x-matroska'
|
|
551
|
-
when '.webm'
|
|
552
|
-
'video/webm'
|
|
553
|
-
when '.m4v'
|
|
554
|
-
'video/x-m4v'
|
|
555
|
-
when '.jpg', '.jpeg'
|
|
556
|
-
'image/jpeg'
|
|
557
|
-
when '.png'
|
|
558
|
-
'image/png'
|
|
559
|
-
when '.gif'
|
|
560
|
-
'image/gif'
|
|
561
|
-
when '.pdf'
|
|
562
|
-
'application/pdf'
|
|
563
|
-
when '.json'
|
|
564
|
-
'application/json'
|
|
565
|
-
when '.srt', '.vtt', '.txt', '.md'
|
|
566
|
-
'text/plain'
|
|
567
|
-
else
|
|
568
|
-
'application/octet-stream'
|
|
569
|
-
end
|
|
417
|
+
def delegated_opts
|
|
418
|
+
{ brand_info: brand_info, brand_path: brand_path, s3_client: @s3_client_override }
|
|
570
419
|
end
|
|
571
420
|
|
|
572
421
|
# Download file from S3
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Appydave
|
|
4
|
+
module Tools
|
|
5
|
+
module Dam
|
|
6
|
+
# Handles S3 upload operations.
|
|
7
|
+
# Inherits shared infrastructure and helpers from S3Base.
|
|
8
|
+
class S3Uploader < S3Base
|
|
9
|
+
def upload(dry_run: false)
|
|
10
|
+
project_dir = project_directory_path
|
|
11
|
+
staging_dir = File.join(project_dir, 's3-staging')
|
|
12
|
+
|
|
13
|
+
unless Dir.exist?(staging_dir)
|
|
14
|
+
puts "❌ No s3-staging directory found: #{staging_dir}"
|
|
15
|
+
puts 'Nothing to upload.'
|
|
16
|
+
return
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
files = Dir.glob("#{staging_dir}/**/*").select { |f| File.file?(f) }
|
|
20
|
+
|
|
21
|
+
if files.empty?
|
|
22
|
+
puts '❌ No files found in s3-staging/'
|
|
23
|
+
return
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
puts "📦 Uploading #{files.size} file(s) from #{project_id}/s3-staging/ to S3..."
|
|
27
|
+
puts ''
|
|
28
|
+
|
|
29
|
+
uploaded = 0
|
|
30
|
+
skipped = 0
|
|
31
|
+
failed = 0
|
|
32
|
+
|
|
33
|
+
# rubocop:disable Metrics/BlockLength
|
|
34
|
+
files.each do |file|
|
|
35
|
+
relative_path = file.sub("#{staging_dir}/", '')
|
|
36
|
+
|
|
37
|
+
# Skip excluded files (e.g., Windows Zone.Identifier, .DS_Store)
|
|
38
|
+
if excluded_path?(relative_path)
|
|
39
|
+
skipped += 1
|
|
40
|
+
next
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
s3_path = build_s3_key(relative_path)
|
|
44
|
+
|
|
45
|
+
# Check if file already exists in S3 and compare
|
|
46
|
+
s3_info = get_s3_file_info(s3_path)
|
|
47
|
+
|
|
48
|
+
if s3_info
|
|
49
|
+
s3_etag = s3_info['ETag'].gsub('"', '')
|
|
50
|
+
s3_size = s3_info['Size']
|
|
51
|
+
match_status = compare_files(local_file: file, s3_etag: s3_etag, s3_size: s3_size)
|
|
52
|
+
|
|
53
|
+
if match_status == :synced
|
|
54
|
+
comparison_method = multipart_etag?(s3_etag) ? 'size match' : 'unchanged'
|
|
55
|
+
puts " ⏭️ Skipped: #{relative_path} (#{comparison_method})"
|
|
56
|
+
skipped += 1
|
|
57
|
+
next
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
# File exists but content differs - warn before overwriting
|
|
61
|
+
puts " ⚠️ Warning: #{relative_path} exists in S3 with different content"
|
|
62
|
+
puts ' (multipart upload detected - comparing by size)' if multipart_etag?(s3_etag)
|
|
63
|
+
|
|
64
|
+
s3_time = s3_info['LastModified']
|
|
65
|
+
local_time = File.mtime(file)
|
|
66
|
+
puts " S3: #{s3_time.strftime('%Y-%m-%d %H:%M')} | Local: #{local_time.strftime('%Y-%m-%d %H:%M')}"
|
|
67
|
+
|
|
68
|
+
puts ' ⚠️ S3 file is NEWER than local - you may be overwriting recent changes!' if s3_time > local_time
|
|
69
|
+
puts ' Uploading will overwrite S3 version...'
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
if upload_file(file, s3_path, dry_run: dry_run)
|
|
73
|
+
uploaded += 1
|
|
74
|
+
else
|
|
75
|
+
failed += 1
|
|
76
|
+
end
|
|
77
|
+
end
|
|
78
|
+
# rubocop:enable Metrics/BlockLength
|
|
79
|
+
|
|
80
|
+
puts ''
|
|
81
|
+
puts '✅ Upload complete!'
|
|
82
|
+
puts " Uploaded: #{uploaded}, Skipped: #{skipped}, Failed: #{failed}"
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
private
|
|
86
|
+
|
|
87
|
+
def upload_file(local_file, s3_path, dry_run: false)
|
|
88
|
+
if dry_run
|
|
89
|
+
puts " [DRY-RUN] Would upload: #{local_file} → s3://#{brand_info.aws.s3_bucket}/#{s3_path}"
|
|
90
|
+
return true
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
# Detect MIME type for proper browser handling
|
|
94
|
+
content_type = detect_content_type(local_file)
|
|
95
|
+
|
|
96
|
+
# For large files, use TransferManager for managed uploads (supports multipart)
|
|
97
|
+
file_size = File.size(local_file)
|
|
98
|
+
start_time = Time.now
|
|
99
|
+
|
|
100
|
+
if file_size > 100 * 1024 * 1024 # > 100MB
|
|
101
|
+
puts " 📤 Uploading large file (#{file_size_human(file_size)})..."
|
|
102
|
+
|
|
103
|
+
# Use TransferManager for multipart upload (modern AWS SDK approach)
|
|
104
|
+
transfer_manager = Aws::S3::TransferManager.new(client: s3_client)
|
|
105
|
+
transfer_manager.upload_file(
|
|
106
|
+
local_file,
|
|
107
|
+
bucket: brand_info.aws.s3_bucket,
|
|
108
|
+
key: s3_path,
|
|
109
|
+
content_type: content_type
|
|
110
|
+
)
|
|
111
|
+
else
|
|
112
|
+
# For smaller files, use direct put_object
|
|
113
|
+
File.open(local_file, 'rb') do |file|
|
|
114
|
+
s3_client.put_object(
|
|
115
|
+
bucket: brand_info.aws.s3_bucket,
|
|
116
|
+
key: s3_path,
|
|
117
|
+
body: file,
|
|
118
|
+
content_type: content_type
|
|
119
|
+
)
|
|
120
|
+
end
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
elapsed = Time.now - start_time
|
|
124
|
+
elapsed_str = format_duration(elapsed)
|
|
125
|
+
puts " ✓ Uploaded: #{File.basename(local_file)} (#{file_size_human(file_size)}) in #{elapsed_str}"
|
|
126
|
+
true
|
|
127
|
+
rescue Aws::S3::Errors::ServiceError => e
|
|
128
|
+
puts " ✗ Failed: #{File.basename(local_file)}"
|
|
129
|
+
puts " Error: #{e.message}"
|
|
130
|
+
false
|
|
131
|
+
rescue StandardError => e
|
|
132
|
+
puts " ✗ Failed: #{File.basename(local_file)}"
|
|
133
|
+
puts " Error: #{e.class} - #{e.message}"
|
|
134
|
+
false
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
def detect_content_type(filename)
|
|
138
|
+
ext = File.extname(filename).downcase
|
|
139
|
+
case ext
|
|
140
|
+
when '.mp4'
|
|
141
|
+
'video/mp4'
|
|
142
|
+
when '.mov'
|
|
143
|
+
'video/quicktime'
|
|
144
|
+
when '.avi'
|
|
145
|
+
'video/x-msvideo'
|
|
146
|
+
when '.mkv'
|
|
147
|
+
'video/x-matroska'
|
|
148
|
+
when '.webm'
|
|
149
|
+
'video/webm'
|
|
150
|
+
when '.m4v'
|
|
151
|
+
'video/x-m4v'
|
|
152
|
+
when '.jpg', '.jpeg'
|
|
153
|
+
'image/jpeg'
|
|
154
|
+
when '.png'
|
|
155
|
+
'image/png'
|
|
156
|
+
when '.gif'
|
|
157
|
+
'image/gif'
|
|
158
|
+
when '.pdf'
|
|
159
|
+
'application/pdf'
|
|
160
|
+
when '.json'
|
|
161
|
+
'application/json'
|
|
162
|
+
when '.srt', '.vtt', '.txt', '.md'
|
|
163
|
+
'text/plain'
|
|
164
|
+
else
|
|
165
|
+
'application/octet-stream'
|
|
166
|
+
end
|
|
167
|
+
end
|
|
168
|
+
end
|
|
169
|
+
end
|
|
170
|
+
end
|
|
171
|
+
end
|
data/lib/appydave/tools.rb
CHANGED
|
@@ -68,6 +68,7 @@ require 'appydave/tools/dam/config'
|
|
|
68
68
|
require 'appydave/tools/dam/project_resolver'
|
|
69
69
|
require 'appydave/tools/dam/config_loader'
|
|
70
70
|
require 'appydave/tools/dam/s3_base'
|
|
71
|
+
require 'appydave/tools/dam/s3_uploader'
|
|
71
72
|
require 'appydave/tools/dam/s3_operations'
|
|
72
73
|
require 'appydave/tools/dam/s3_scanner'
|
|
73
74
|
require 'appydave/tools/dam/share_operations'
|
data/package.json
CHANGED
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: appydave-tools
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.77.
|
|
4
|
+
version: 0.77.3
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- David Cruwys
|
|
@@ -378,6 +378,7 @@ files:
|
|
|
378
378
|
- lib/appydave/tools/dam/s3_operations.rb
|
|
379
379
|
- lib/appydave/tools/dam/s3_scan_command.rb
|
|
380
380
|
- lib/appydave/tools/dam/s3_scanner.rb
|
|
381
|
+
- lib/appydave/tools/dam/s3_uploader.rb
|
|
381
382
|
- lib/appydave/tools/dam/share_operations.rb
|
|
382
383
|
- lib/appydave/tools/dam/ssd_status.rb
|
|
383
384
|
- lib/appydave/tools/dam/status.rb
|