s3ckup 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: ab5c9438cd5dcd2044b8de936e70cd11c676d400
4
+ data.tar.gz: dd1d6f66846bcb0891bda487ea9645792e121b2b
5
+ SHA512:
6
+ metadata.gz: 5159afdaace5641384d99967281fa705f551da0622b16f79e5b1388d9fe3bc0412c52806e217f63aa801a3dfa520d07da920c85d9dfc7d7039c05da2ebf4d747
7
+ data.tar.gz: 302145498e57f9c2c06417dae0cd91e4d59d90bd528c45cd7b85bc0ece2e8c994a8d16ce8ec0d2032e16cc5e5c14acaacef85daf67fb134a3abb99002d3df53c
@@ -0,0 +1,20 @@
1
+ Copyright 2012 Tiago Scolari
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining
4
+ a copy of this software and associated documentation files (the
5
+ "Software"), to deal in the Software without restriction, including
6
+ without limitation the rights to use, copy, modify, merge, publish,
7
+ distribute, sublicense, and/or sell copies of the Software, and to
8
+ permit persons to whom the Software is furnished to do so, subject to
9
+ the following conditions:
10
+
11
+ The above copyright notice and this permission notice shall be
12
+ included in all copies or substantial portions of the Software.
13
+
14
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
17
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
18
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
19
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
20
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -0,0 +1,43 @@
1
+ s3ckup
2
+ ====
3
+
4
+ Ruby gem/command line tool for Incremental backuping to AWS S3.
5
+
6
+ My motivation for this was to be used in a backup container for docker. But it's a command line that can be used in any
7
+ platform.
8
+
9
+ It will store a `manifest.yml` file in the root of the bucket, containing all
10
+ file names and digests. On backup this file will be matched with a on time
11
+ generated one, only updated/new files will be pushed to the bucket, and files
12
+ that no longer exists will be deleted.
13
+
14
+ There's no versioning, but you could use s3 built in versioning.
15
+
16
+ INSTALLATION
17
+ -----------
18
+
19
+ ```
20
+ gem install s3ckup
21
+ ```
22
+
23
+
24
+ USAGE
25
+ -----
26
+
27
+ ```
28
+ Usage: s3ckup OPTIONS --folders folder1,folder2,...
29
+ -k, --aws-key-id KEY_ID AWS key id
30
+ -a, --aws-access-key ACCESS_KEY AWS access key
31
+ -b, --s3-bucket BUCKET_NAME S3 bucket name
32
+ -r, --recursive Recursive mode
33
+ -v, --verbose Verbose
34
+ -f FOLDER1,FOLDER2,FOLDER3..., Folder list, separated by ','
35
+ --folders
36
+ ```
37
+
38
+ Example:
39
+
40
+ ```
41
+ s3ckup -k $AWS_KEY -a $AWS_SECRET -b my_backup_bucket -r -f /myapp/uploads,/mydb/data
42
+ ```
43
+
@@ -0,0 +1,9 @@
1
+ #!/usr/bin/env rake
2
+ begin
3
+ require 'bundler/setup'
4
+ rescue LoadError
5
+ puts 'You must `gem install bundler` and `bundle install` to run rake tasks'
6
+ end
7
+
8
+ Bundler::GemHelper.install_tasks
9
+
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 's3ckup'
4
+ require 's3ckup/command_line'
5
+ S3ckup::CommandLine.new.run(ARGV)
@@ -0,0 +1,4 @@
1
+ require 'rubygems'
2
+ require_relative 's3ckup/manifest'
3
+ require_relative 's3ckup/backup'
4
+ require_relative 's3ckup/s3_manager'
@@ -0,0 +1,37 @@
1
+ module S3ckup
2
+ class Backup
3
+
4
+ def initialize(key_id, access_key, logger = Logger.new)
5
+ @logger = logger
6
+ @s3_manager = S3Mananger.new(key_id, access_key, @logger)
7
+ end
8
+
9
+ def run(bucket_name, folders)
10
+ local_files = Manifest.new(folders.flatten)
11
+ remote_files = @s3_manager.fetch_manifest(bucket_name)
12
+ backup(bucket_name, remote_files, local_files)
13
+ end
14
+
15
+ private
16
+
17
+ def fetch_remote_manifest(bucket)
18
+ manifest_yml = @s3_manager.fetch_manifest(bucket)
19
+ data = YAML.load(manifest_yml.value)
20
+ Manifest.new(data)
21
+ rescue AWS::S3::NoSuchKey
22
+ Manifest.new({})
23
+ end
24
+
25
+ def backup(bucket, remote_files, local_files)
26
+ removed_files = remote_files - local_files
27
+ @s3_manager.delete(bucket, removed_files)
28
+
29
+ remote_files = remote_files - removed_files
30
+ files_to_upload = local_files.diff(remote_files)
31
+
32
+ @s3_manager.update_manifest(bucket, local_files)
33
+ @s3_manager.upload(bucket, files_to_upload)
34
+ end
35
+
36
+ end
37
+ end
@@ -0,0 +1,58 @@
1
+ module S3ckup
2
+ class CommandLine
3
+ require 'optparse'
4
+ require 'logger'
5
+
6
+ def run(args)
7
+ logger = Logger.new($stdout)
8
+ logger.level = Logger::WARN
9
+
10
+ options = fetch_options(args, logger)
11
+ S3ckup::Backup.new(options[:key_id], options[:access_key], logger).
12
+ run(options[:bucket], options[:folders])
13
+ end
14
+
15
+ private
16
+
17
+ def fetch_options(args, logger)
18
+ Hash.new.tap do |options|
19
+ OptionParser.new do |opts|
20
+ opts.banner = "Usage: s3ckup OPTIONS --folders folder1,folder2,..."
21
+
22
+ opts.on("-k", "--aws-key-id KEY_ID", "AWS key id") do |key_id|
23
+ options[:key_id] = key_id
24
+ end
25
+
26
+ opts.on("-a", "--aws-access-key ACCESS_KEY", "AWS access key") do |access_key|
27
+ options[:access_key] = access_key
28
+ end
29
+
30
+ opts.on("-b", "--s3-bucket BUCKET_NAME", "S3 bucket name") do |bucket_name|
31
+ options[:bucket] = bucket_name
32
+ end
33
+
34
+ opts.on("-r", "--recursive", "Recursive mode") do
35
+ options[:recursive] = true
36
+ end
37
+
38
+ opts.on("-v", "--verbose", "Verbose") do
39
+ logger.level = Logger::INFO
40
+ end
41
+
42
+ opts.on("-f", "--folders FOLDER1,FOLDER2,FOLDER3...", Array, "Folder list, separated by ','") do |folder_list|
43
+ options[:folders] = []
44
+ folder_list.each do |folder|
45
+ if options[:recursive]
46
+ options[:folders] << File.join(folder, "**", "*")
47
+ else
48
+ options[:folders] << File.join(folder, "*")
49
+ end
50
+ end
51
+ end
52
+
53
+ end.parse!(args)
54
+ end
55
+ end
56
+
57
+ end
58
+ end
@@ -0,0 +1,67 @@
1
+ require 'digest/md5'
2
+ require 'yaml'
3
+
4
+ module S3ckup
5
+ class Manifest
6
+ attr_reader :files
7
+
8
+ def initialize(*args)
9
+ if args.first.is_a?(Hash)
10
+ @files = args.first
11
+ else
12
+ @files = create_manifest(Array(args).flatten)
13
+ end
14
+ end
15
+
16
+ def to_yaml
17
+ @files.to_yaml
18
+ end
19
+
20
+ def file_list
21
+ @files.keys.sort
22
+ end
23
+
24
+ def -(manifest)
25
+ raise ArgumentError unless manifest.is_a?(Manifest)
26
+ remaining_files = file_list - manifest.file_list
27
+ remaining_files = @files.clone.keep_if { |file| remaining_files.include?(file) }
28
+ Manifest.new(remaining_files)
29
+ end
30
+
31
+ def diff(manifest)
32
+ files_to_upload = @files.to_a - manifest.files.to_a
33
+ Manifest.new(Hash[*files_to_upload.flatten])
34
+ end
35
+
36
+ private
37
+
38
+ def create_manifest(folders)
39
+ Hash.new.tap do |files|
40
+ folders.each do |folder|
41
+ files.merge! scan_folder(folder)
42
+ end
43
+ end
44
+ end
45
+
46
+ def scan_folder(folder)
47
+ Hash.new.tap do |folder_files|
48
+ Dir[folder].each do |file_name|
49
+ if should_backup?(file_name)
50
+ folder_files[file_name] = file_digest(file_name)
51
+ end
52
+ end
53
+ end
54
+ end
55
+
56
+ def should_backup?(file_name)
57
+ ! (File.directory?(file_name) ||
58
+ File.symlink?(file_name) ||
59
+ File.socket?(file_name))
60
+ end
61
+
62
+ def file_digest(file_name)
63
+ Digest::MD5.file(file_name).hexdigest
64
+ end
65
+
66
+ end
67
+ end
@@ -0,0 +1,49 @@
1
+ require 'aws/s3'
2
+
3
+ module S3ckup
4
+ class S3Mananger
5
+ MANIFEST_FILE_NAME = 'manifest.yml'
6
+
7
+ def initialize(key_id, access_key, logger = Logger.new)
8
+ connect!(key_id, access_key)
9
+ @logger = logger
10
+ end
11
+
12
+ def fetch_manifest(bucket)
13
+ manifest = AWS::S3::S3Object.find MANIFEST_FILE_NAME, bucket
14
+ Manifest.new(YAML.load(manifest.value))
15
+ rescue
16
+ @logger.warn("Failed to fetch remote manifest. Ignoring.")
17
+ Manifest.new({})
18
+ end
19
+
20
+ def upload(bucket, manifest)
21
+ manifest.file_list.each do |file_name|
22
+ @logger.info("Uploading file: #{file_name}")
23
+ AWS::S3::S3Object.store(file_name, open(file_name), bucket)
24
+ end
25
+ end
26
+
27
+ def update_manifest(bucket, manifest)
28
+ @logger.info("Updating remote manifest")
29
+ AWS::S3::S3Object.store(MANIFEST_FILE_NAME, manifest.to_yaml, bucket)
30
+ end
31
+
32
+ def delete(bucket, manifest)
33
+ manifest.file_list.each do |file_name|
34
+ @logger.info("Deleting remote file: #{file_name}")
35
+ AWS::S3::S3Object.delete file_name, bucket
36
+ end
37
+ end
38
+
39
+ private
40
+
41
+ def connect!(key_id, access_key)
42
+ AWS::S3::Base.establish_connection!(
43
+ :access_key_id => key_id,
44
+ :secret_access_key => access_key
45
+ )
46
+ end
47
+
48
+ end
49
+ end
@@ -0,0 +1,3 @@
1
+ module S3ckup
2
+ VERSION = "0.0.1"
3
+ end
metadata ADDED
@@ -0,0 +1,110 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: s3ckup
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ platform: ruby
6
+ authors:
7
+ - Tiago Scolari
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2014-04-10 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: aws-s3
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: '0'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: '0'
27
+ - !ruby/object:Gem::Dependency
28
+ name: pry
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ">="
32
+ - !ruby/object:Gem::Version
33
+ version: '0'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ">="
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
41
+ - !ruby/object:Gem::Dependency
42
+ name: rspec
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - ">="
46
+ - !ruby/object:Gem::Version
47
+ version: '0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - ">="
53
+ - !ruby/object:Gem::Version
54
+ version: '0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: pry-nav
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - ">="
60
+ - !ruby/object:Gem::Version
61
+ version: '0'
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - ">="
67
+ - !ruby/object:Gem::Version
68
+ version: '0'
69
+ description: Manages incremental backups of files, soring on s3.
70
+ email:
71
+ - tscolari@gmail.com
72
+ executables:
73
+ - s3ckup
74
+ extensions: []
75
+ extra_rdoc_files: []
76
+ files:
77
+ - MIT-LICENSE
78
+ - README.md
79
+ - Rakefile
80
+ - bin/s3ckup
81
+ - lib/s3ckup.rb
82
+ - lib/s3ckup/backup.rb
83
+ - lib/s3ckup/command_line.rb
84
+ - lib/s3ckup/manifest.rb
85
+ - lib/s3ckup/s3_manager.rb
86
+ - lib/s3ckup/version.rb
87
+ homepage: https://github.com/tscolari/s3ckup
88
+ licenses: []
89
+ metadata: {}
90
+ post_install_message:
91
+ rdoc_options: []
92
+ require_paths:
93
+ - lib
94
+ required_ruby_version: !ruby/object:Gem::Requirement
95
+ requirements:
96
+ - - ">="
97
+ - !ruby/object:Gem::Version
98
+ version: '0'
99
+ required_rubygems_version: !ruby/object:Gem::Requirement
100
+ requirements:
101
+ - - ">="
102
+ - !ruby/object:Gem::Version
103
+ version: '0'
104
+ requirements: []
105
+ rubyforge_project:
106
+ rubygems_version: 2.2.2
107
+ signing_key:
108
+ specification_version: 4
109
+ summary: Folders to s3 backuping tool.
110
+ test_files: []