s3_backup 0.0.5 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: c8df1802a52812399f8b1f91abebe675329f5dd0
4
- data.tar.gz: 1d3f6f9c0407707e6906ce249e59f091c0c6aafd
3
+ metadata.gz: f8d4e48f3d5a0125d3d100f14b18834b66560986
4
+ data.tar.gz: d01288f1afffeccebd2256c971366be957e45400
5
5
  SHA512:
6
- metadata.gz: d931c51fca7861bb7f5fac42563fcb6b1da05299ac56c09d790bedfc2d23fcb958d3da9f646cb0996b04a7bde31ab6085fd00766776b7649158808e7ab4d7af7
7
- data.tar.gz: 35fa04c64c85e1374d62e3bf89abc8834c05897e86e440eb439fe744c429a95ad611b6067d4415f7cae3a7046f804a19ea6f3c3d38f45de06658fe535fa1d3cb
6
+ metadata.gz: 1f361b09a6dd62d72f63dda4ed20c99817ca856656f222ca40a388a530b86722550d835dfed540d0b3798219ca8aebeef7a2ef9aeac2d192723e83ff02d2e16f
7
+ data.tar.gz: b4ed851c477696e54ed578a2a43fe091091e9b9b6715407564b6f1c42cc3827eb585e17733015535e273b3433aaea6a5950bbacf16ba054255f0ac0a5a7870bd
data/README.md CHANGED
@@ -42,6 +42,9 @@ s3:
42
42
  aws_secret_access_key: <%= ENV['AWS_SECRET_ACCESS_KEY'] %>
43
43
  bucket: <%= ENV['S3_BUCKET'] %>
44
44
  aws_region: <%= ENV['AWS_REGION'] %>
45
+ aws_endpoint: <%= ENV['AWS_ENDPOINT'] %>
46
+ server_side_encryption: 'AES256'
47
+ stub_responses: false
45
48
  pg_path: rds_backup
46
49
  redis_path: redis_backup
47
50
  keep: 5
@@ -12,8 +12,11 @@ module S3Backup
12
12
  attr_accessor :redis_dump_path
13
13
  attr_accessor :aws_access_key_id
14
14
  attr_accessor :aws_secret_access_key
15
+ attr_accessor :aws_endpoint
16
+ attr_accessor :aws_server_side_encryption
15
17
  attr_accessor :bucket
16
18
  attr_accessor :aws_region
19
+ attr_accessor :aws_stub_responses
17
20
  attr_accessor :s3_pg_path
18
21
  attr_accessor :s3_redis_path
19
22
  attr_accessor :s3_keep
@@ -29,13 +32,16 @@ module S3Backup
29
32
 
30
33
  self.redis_dump_path = config('redis', 'dump_path')
31
34
 
32
- self.aws_access_key_id = config('s3', 'aws_access_key_id')
33
- self.aws_secret_access_key = config('s3', 'aws_secret_access_key')
34
- self.bucket = config('s3', 'bucket')
35
- self.aws_region = config('s3', 'aws_region')
36
- self.s3_keep = config('s3', 'keep')
37
- self.s3_pg_path = config('s3', 'pg_path')
38
- self.s3_redis_path = config('s3', 'redis_path')
35
+ self.aws_access_key_id = config('s3', 'aws_access_key_id')
36
+ self.aws_secret_access_key = config('s3', 'aws_secret_access_key')
37
+ self.bucket = config('s3', 'bucket')
38
+ self.aws_region = config('s3', 'aws_region')
39
+ self.aws_endpoint = config('s3', 'aws_endpoint')
40
+ self.aws_server_side_encryption = config('s3', 'server_side_encryption')
41
+ self.aws_stub_responses = config('s3', 'stub_responses')
42
+ self.s3_keep = config('s3', 'keep')
43
+ self.s3_pg_path = config('s3', 'pg_path')
44
+ self.s3_redis_path = config('s3', 'redis_path')
39
45
 
40
46
  self.tables = config('tables') || {}
41
47
 
@@ -17,11 +17,11 @@ module S3Backup
17
17
  Obfuscate.new(pg_dump_file.path, obfuscated_file.path).obfuscate_dump!
18
18
  puts 'Obfuscation done.'
19
19
  puts 'Upload to S3 ...'
20
- S3Backup::S3.new.upload!(obfucated_file_name, Config.s3_pg_path, obfuscated_file.path)
20
+ S3Backup::Storage::S3.new.upload!(obfucated_file_name, Config.s3_pg_path, obfuscated_file.path)
21
21
  puts 'Uploaded.'
22
22
  puts 'Clean environement.'
23
23
  clean_env
24
- S3Backup::S3.new.clean!(db_name, Config.s3_pg_path)
24
+ S3Backup::Storage::S3.new.clean!(db_name, Config.s3_pg_path)
25
25
  end
26
26
 
27
27
  private
@@ -16,7 +16,7 @@ module S3Backup
16
16
  puts 'Setup local database ...'
17
17
  setup_local_database
18
18
  puts 'Downloading pg database ...'
19
- S3Backup::S3.new.download!(pg_database_name, Config.s3_pg_path, pg_dump_s3_file.path)
19
+ S3Backup::Storage::S3.new.download!(pg_database_name, Config.s3_pg_path, pg_dump_s3_file.path)
20
20
  umcompress_file
21
21
  puts "Loading data in #{database} ..."
22
22
  load_file
@@ -9,11 +9,11 @@ module S3Backup
9
9
  compress_file
10
10
  puts 'Compressed.'
11
11
  puts 'Upload to S3 ...'
12
- S3Backup::S3.new.upload!(compressed_file_name, Config.s3_redis_path, compressed_file.path)
12
+ S3Backup::Storage::S3.new.upload!(compressed_file_name, Config.s3_redis_path, compressed_file.path)
13
13
  puts 'Uploaded.'
14
14
  puts 'Clean environement.'
15
15
  clean_env
16
- S3Backup::S3.new.clean!(base_s3_name, Config.s3_redis_path)
16
+ S3Backup::Storage::S3.new.clean!(base_s3_name, Config.s3_redis_path)
17
17
  end
18
18
 
19
19
  private
@@ -18,7 +18,7 @@ module S3Backup
18
18
  puts 'Stop redis database ...'
19
19
  stop_redis_database
20
20
  puts 'Downloading redis database ...'
21
- S3Backup::S3.new.download!(redis_s3_file_name, Config.s3_redis_path, redis_dump_s3_file.path)
21
+ S3Backup::Storage::S3.new.download!(redis_s3_file_name, Config.s3_redis_path, redis_dump_s3_file.path)
22
22
  umcompress_file
23
23
  copy_file
24
24
  puts 'Start redis database ...'
@@ -0,0 +1,92 @@
1
+ require 'aws-sdk-s3'
2
+ require 'zlib'
3
+ require 'ruby-progressbar'
4
+
5
+ module S3Backup
6
+ module Storage
7
+ class S3
8
+
9
+ attr_reader :connection
10
+
11
+ def initialize
12
+ @connection = Aws::S3::Client.new(
13
+ credentials: Aws::Credentials.new(
14
+ Config.aws_access_key_id,
15
+ Config.aws_secret_access_key
16
+ ),
17
+ region: Config.aws_region,
18
+ endpoint: Config.aws_endpoint,
19
+ stub_responses: Config.aws_stub_responses
20
+ )
21
+ end
22
+
23
+ def upload!(file_name, bucket_path, file_path)
24
+ upload_options = {
25
+ bucket: Config.bucket,
26
+ key: File.join(bucket_path, file_name),
27
+ body: File.open(file_path)
28
+ }
29
+
30
+ upload_options[:server_side_encryption] = Config.aws_server_side_encryption if Config.aws_server_side_encryption
31
+ @connection.put_object(upload_options)
32
+
33
+ true
34
+ end
35
+
36
+ def download!(database_name, bucket_path, file_path)
37
+ prefix = File.join(bucket_path, database_name)
38
+ s3_backup_file = @connection.list_objects(bucket: Config.bucket, prefix: prefix).contents.sort_by(&:last_modified).reverse.first
39
+
40
+ raise "#{database_name} file not found on s3" unless s3_backup_file
41
+
42
+ file = File.open(file_path, 'wb')
43
+ puts "File size: #{(s3_backup_file.size.to_f / 1024 / 1024).round(4)}MB, writing to #{file_path}"
44
+ total_bytes = s3_backup_file.size
45
+ remaining_bytes = s3_backup_file.size
46
+ progress_bar
47
+
48
+ @connection.get_object(bucket: Config.bucket, key: s3_backup_file.key) do |chunk|
49
+ update_progress_bar(total_bytes, remaining_bytes)
50
+ file.write chunk
51
+ remaining_bytes -= chunk.size
52
+ end
53
+ file.close
54
+
55
+ true
56
+ end
57
+
58
+ def clean!(base_name, bucket_path)
59
+ prefix = File.join(bucket_path, base_name)
60
+
61
+ s3_files = @connection.list_objects(bucket: Config.bucket, prefix: prefix).contents.sort_by(&:last_modified).reverse
62
+ files_to_remove = s3_files[(Config.s3_keep || 1)..-1]
63
+
64
+ return true if files_to_remove.nil? || files_to_remove.empty?
65
+
66
+ @connection.delete_objects(
67
+ bucket: Config.bucket,
68
+ delete: {
69
+ objects: files_to_remove.map {|f| {key: f.key} }
70
+ }
71
+ )
72
+
73
+ true
74
+ end
75
+
76
+ private
77
+
78
+ def progress_bar
79
+ @progress_bar ||= ProgressBar.create(
80
+ format: "%a %b\u{15E7}%i %p%% %t",
81
+ progress_mark: ' ',
82
+ remainder_mark: "\u{FF65}"
83
+ )
84
+ end
85
+
86
+ def update_progress_bar(total, remaining)
87
+ progress_bar.progress = (((total - remaining) * 100) / total).to_i
88
+ end
89
+
90
+ end
91
+ end
92
+ end
@@ -1,3 +1,3 @@
1
1
  module S3Backup # :nodoc:
2
- VERSION = '0.0.5'.freeze
2
+ VERSION = '0.0.6'.freeze
3
3
  end
data/lib/s3_backup.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  require 's3_backup/config'
2
- require 's3_backup/s3'
2
+ require 's3_backup/storage/s3'
3
3
 
4
4
  require 's3_backup/pg/obfuscate'
5
5
  require 's3_backup/pg/backup'
@@ -36,7 +36,7 @@ module S3Backup
36
36
 
37
37
  require_s3_params
38
38
  Config.requires!(:s3_pg_path)
39
- file_downloaded = S3Backup::S3.new.download!(pg_database_name, Config.s3_pg_path, filename)
39
+ file_downloaded = S3Backup::Storage::S3.new.download!(pg_database_name, Config.s3_pg_path, filename)
40
40
  abort 'Unable to download file' unless file_downloaded
41
41
  filename
42
42
  end
@@ -9,7 +9,10 @@ s3:
9
9
  aws_access_key_id: XXX
10
10
  aws_secret_access_key: DSLK
11
11
  bucket: my_bucket
12
- aws_region: p-southeast-2
12
+ aws_region: ap-southeast-2
13
+ aws_endpoint: https://s3.amazonaws.com
14
+ stub_responses: true
15
+ server_side_encryption: AES256
13
16
  pg_path: pg_backup
14
17
 
15
18
  tables:
@@ -0,0 +1,66 @@
1
+ require 'spec_helper'
2
+
3
+ describe S3Backup::Storage::S3 do
4
+ let(:configuration_file) { 'spec/fixtures/obfuscate_default.yml' }
5
+
6
+ before { S3Backup::Config.load!(configuration_file) }
7
+ subject(:s3) { S3Backup::Storage::S3.new }
8
+
9
+ describe '#upload' do
10
+ before do
11
+ client = s3.instance_variable_get('@connection')
12
+ client.stub_responses(:put_object)
13
+ end
14
+
15
+ it 'should upload a file via AWS S3' do
16
+ s3.upload!('test', 'test-bucket', 'spec/fixtures/obfuscate_default.yml')
17
+ end
18
+ end
19
+
20
+ describe '#download' do
21
+ before do
22
+ client = s3.instance_variable_get('@connection')
23
+ client.stub_responses(:list_objects,
24
+ {
25
+ contents: [
26
+ {
27
+ key: 'offers/1/img.jgp',
28
+ last_modified: Time.now,
29
+ size: 1234
30
+ }
31
+ ]
32
+ })
33
+ end
34
+
35
+ it 'should retrieve the latest file' do
36
+ temp = Tempfile.new('s3-backup')
37
+ expect(s3.download!('test', 'test-bucket', temp.path)).to eq(true)
38
+ end
39
+ end
40
+
41
+ describe '#clean!' do
42
+ before do
43
+ client = s3.instance_variable_get('@connection')
44
+ client.stub_responses(:list_objects,
45
+ {
46
+ contents: [
47
+ {
48
+ key: 'test1-012.tgz',
49
+ last_modified: Time.now,
50
+ size: 1234
51
+ },
52
+ {
53
+ key: 'test1-123.tgz',
54
+ last_modified: Time.now - 3600,
55
+ size: 1234
56
+ }
57
+ ]
58
+ })
59
+ end
60
+
61
+ it 'should clean the old files' do
62
+ s3.clean!('test', 'test-bucket')
63
+ end
64
+ end
65
+
66
+ end
metadata CHANGED
@@ -1,29 +1,29 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: s3_backup
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.5
4
+ version: 0.0.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Tom Floc'h
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-11-16 00:00:00.000000000 Z
11
+ date: 2018-02-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
- name: fog
14
+ name: aws-sdk-s3
15
15
  requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - ">="
17
+ - - "~>"
18
18
  - !ruby/object:Gem::Version
19
- version: '1.41'
19
+ version: '1.8'
20
20
  type: :runtime
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
- - - ">="
24
+ - - "~>"
25
25
  - !ruby/object:Gem::Version
26
- version: '1.41'
26
+ version: '1.8'
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: faker
29
29
  requirement: !ruby/object:Gem::Requirement
@@ -113,12 +113,13 @@ files:
113
113
  - lib/s3_backup/railtie.rb
114
114
  - lib/s3_backup/redis/backup.rb
115
115
  - lib/s3_backup/redis/import.rb
116
- - lib/s3_backup/s3.rb
116
+ - lib/s3_backup/storage/s3.rb
117
117
  - lib/s3_backup/version.rb
118
118
  - lib/tasks/pg.rake
119
119
  - lib/tasks/redis.rake
120
120
  - spec/fixtures/obfuscate_default.yml
121
121
  - spec/s3_backup/pg/obfuscate_spec.rb
122
+ - spec/s3_backup/storage/s3_spec.rb
122
123
  - spec/spec_helper.rb
123
124
  homepage: https://github.com/arkes/s3_backup
124
125
  licenses:
@@ -140,7 +141,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
140
141
  version: '0'
141
142
  requirements: []
142
143
  rubyforge_project:
143
- rubygems_version: 2.4.5
144
+ rubygems_version: 2.4.5.2
144
145
  signing_key:
145
146
  specification_version: 4
146
147
  summary: Postgres, redis backup and restore
data/lib/s3_backup/s3.rb DELETED
@@ -1,81 +0,0 @@
1
- require 'fog'
2
- require 'zlib'
3
- require 'ruby-progressbar'
4
-
5
- module S3Backup
6
- class S3
7
-
8
- attr_reader :connection
9
-
10
- def initialize
11
- @connection = Fog::Storage.new(
12
- provider: 'AWS',
13
- aws_access_key_id: Config.aws_access_key_id,
14
- aws_secret_access_key: Config.aws_secret_access_key,
15
- region: Config.aws_region
16
- )
17
- end
18
-
19
- def upload!(file_name, bucket_path, file_path)
20
- directory = @connection.directories.get(Config.bucket)
21
-
22
- directory.files.create(
23
- key: File.join(bucket_path, file_name),
24
- body: File.open(file_path),
25
- public: false
26
- )
27
-
28
- true
29
- end
30
-
31
- def download!(database_name, bucket_path, file_path)
32
- prefix = File.join(bucket_path, database_name)
33
- directory = connection.directories.get(Config.bucket, prefix: prefix)
34
-
35
- s3_backup_file = directory.files.sort_by(&:last_modified).reverse.first
36
-
37
- raise "#{database_name} file not found on s3" unless s3_backup_file
38
-
39
- file = File.open(file_path, 'wb')
40
- puts "File size: #{s3_backup_file.content_length / 1024 / 1024}MB, writing to #{file_path}"
41
- progress_bar
42
-
43
- directory.files.get(s3_backup_file.key) do |chunk, remaining_bytes, total_bytes|
44
- update_progress_bar(total_bytes, remaining_bytes)
45
- file.write chunk
46
- end
47
- file.close
48
-
49
- true
50
- end
51
-
52
- def clean!(base_name, bucket_path)
53
- prefix = File.join(bucket_path, base_name)
54
- directory = connection.directories.get(Config.bucket, prefix: prefix)
55
-
56
- s3_files = directory.files.sort_by(&:last_modified).reverse
57
- files_to_remove = s3_files[Config.s3_keep..-1]
58
-
59
- return true if files_to_remove.blank?
60
-
61
- files_to_remove.each(&:destroy)
62
-
63
- true
64
- end
65
-
66
- private
67
-
68
- def progress_bar
69
- @progress_bar ||= ProgressBar.create(
70
- format: "%a %b\u{15E7}%i %p%% %t",
71
- progress_mark: ' ',
72
- remainder_mark: "\u{FF65}"
73
- )
74
- end
75
-
76
- def update_progress_bar(total, remaining)
77
- progress_bar.progress = (((total - remaining) * 100) / total).to_i
78
- end
79
-
80
- end
81
- end