s3archive 1.0

Sign up to get free protection for your applications and to get access to all the features.
data/bin/s3archive ADDED
@@ -0,0 +1,4 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ load File.expand_path('../../lib/tasks/s3archive.thor', __FILE__)
4
+ S3Archive::Cli.start
@@ -0,0 +1,100 @@
1
+ # The main entry point db:backup_to_s3 is run by /etc/logrotate.d/rails3app
2
+ # daily. changed
3
+
4
+ require 'thor'
5
+ require 'socket' # For hostname
6
+ require 'tempfile'
7
+ require_relative 'logging'
8
+ require_relative 'config'
9
+ require_relative 's3_file_synchronizer'
10
+
11
+ module S3Archive
12
+ class CompressAndUpload
13
+ include Logging
14
+
15
+ def self.run(path)
16
+ new(path).run
17
+ end
18
+
19
+ attr_reader :path
20
+ def initialize(path)
21
+ @path = path
22
+ end
23
+
24
+ def run
25
+ unless File.exists?(path)
26
+ logger.error("COULD NOT FIND '#{path}'")
27
+ return
28
+ end
29
+
30
+ logger.info("* Processing #{path}")
31
+
32
+ compress! if compress?
33
+ upload!
34
+ delete_tempfile! if compress?
35
+ end
36
+
37
+ private
38
+ def compress?
39
+ @do_compress ||= begin
40
+ if path.end_with?('.gz')
41
+ logger.info("** #{path} already compressed, skipping compression")
42
+ false
43
+ else
44
+ true
45
+ end
46
+ end
47
+ end
48
+
49
+ def compress!
50
+ logger.info("** Compressing #{path} to #{tempfile.path}")
51
+ system "gzip -n -c < #{path} > #{tempfile.path}"
52
+ end
53
+
54
+ def upload!
55
+ bucket = S3Archive.config.bucket
56
+ logger.info("** Uploading #{path_to_upload} to s3://#{bucket}/#{key}")
57
+ S3FileSynchronizer.run(path_to_upload, bucket, key)
58
+ end
59
+
60
+ def path_to_upload
61
+ compress? ? tempfile.path : path
62
+ end
63
+
64
+ def delete_tempfile!
65
+ logger.info("** Deleting #{tempfile.path}")
66
+ tempfile.unlink
67
+ end
68
+
69
+ def tempfile
70
+ @tempfile ||= begin
71
+ tempfile = Tempfile.new([filename, '.gz'])
72
+ tempfile.close # An external process will write to it
73
+ tempfile
74
+ end
75
+ end
76
+
77
+ def key
78
+ year, month, day = Time.now.strftime("%Y-%m-%d").split('-')
79
+ [hostname, year, month, day, "#{filename}.gz"].join('/')
80
+ end
81
+
82
+ def filename
83
+ @filename ||= File.basename(path)
84
+ end
85
+
86
+ def hostname
87
+ Socket.gethostname
88
+ end
89
+ end
90
+ end
91
+
92
+
93
+ if $0 == __FILE__
94
+ path = File.join(File.dirname(__FILE__), 'config.rb')
95
+ S3Archive::CompressAndUpload.run(path)
96
+
97
+ # bucket = 'this.is.my.test.bucket'
98
+ # key = 'this is a folder/foobar'
99
+ # S3FileSynchronizer.run(in_path, bucket, key)
100
+ end
@@ -0,0 +1,26 @@
1
+ require 'singleton'
2
+ require 'yaml'
3
+
4
+ module S3Archive
5
+ def self.config_path=(config_path)
6
+ @config_path = config_path
7
+ end
8
+
9
+ def self.config_path
10
+ @config_path || "/etc/s3archive.yml"
11
+ end
12
+
13
+ def self.config
14
+ @config ||= Config.new(YAML.load(File.read(config_path)))
15
+ end
16
+
17
+ class Config
18
+ attr_accessor :bucket, :access_key_id, :secret_access_key
19
+
20
+ def initialize(params = {})
21
+ params.each do |key, val|
22
+ send("#{key}=", val)
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,9 @@
1
+ require 'logger'
2
+
3
+ module S3Archive
4
+ module Logging
5
+ def logger
6
+ @logger ||= Logger.new(STDOUT)
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,113 @@
1
+ require 'right_aws'
2
+ require 'digest/md5'
3
+ require_relative 'logging'
4
+
5
+ module S3Archive
6
+ class S3FileSynchronizer
7
+ include Logging
8
+
9
+ attr_reader :local_file, :s3_file
10
+ def initialize(local_file, s3_file)
11
+ @local_file = local_file
12
+ @s3_file = s3_file
13
+ end
14
+
15
+ def run
16
+ if s3_file.exists?
17
+ if s3_file.md5_hex == local_file.md5_hex
18
+ logger.info("'#{s3_file}' already exists and has correct checksum")
19
+ nil
20
+ else
21
+ new_s3_file = S3File.new(s3_file.bucket, "#{s3_file.key}.#{local_file.md5_hex}")
22
+ logger.error("'#{s3_file}' already exists and has wrong checksum. Uploading to '#{new_s3_file}'.")
23
+ new_s3_file.put(local_file)
24
+ end
25
+ else
26
+ s3_file.put(local_file)
27
+ end
28
+ end
29
+
30
+ def self.run(local_path, bucket, key)
31
+ local_file = LocalFile.new(local_path)
32
+ s3_file = S3File.new(bucket, key)
33
+
34
+ new(local_file, s3_file).run
35
+ end
36
+ end
37
+
38
+ # Just a wrapper around a path with some md5 functions
39
+ LocalFile = Struct.new(:path) do
40
+ include Logging
41
+
42
+ def open(*args, &block)
43
+ File.open(path, *args, &block)
44
+ end
45
+
46
+ def md5_hex
47
+ md5.hexdigest
48
+ end
49
+
50
+ def md5_base64
51
+ md5.base64digest
52
+ end
53
+
54
+ def md5
55
+ @md5 ||= Digest::MD5.file(path)
56
+ end
57
+
58
+ def to_s
59
+ path
60
+ end
61
+ end
62
+
63
+ # A wrapper around a s3 path (bucket, key) with some md5 and a put function
64
+ S3File = Struct.new(:bucket, :key) do
65
+ include Logging
66
+
67
+ def md5_hex
68
+ exists? && headers.fetch("etag").tr('"', '')
69
+ end
70
+
71
+ def exists?
72
+ !headers.nil?
73
+ end
74
+
75
+ def put(local_file)
76
+ local_file.open do |file|
77
+ logger.info("Putting '#{local_file}' to '#{self}'")
78
+ s3interface.put(bucket, key, file, 'Content-MD5' => local_file.md5_base64)
79
+ end
80
+ end
81
+
82
+ def to_s
83
+ "s3://#{bucket}/#{key}"
84
+ end
85
+
86
+ private
87
+ def headers
88
+ @headers ||= without_close_on_error do
89
+ begin
90
+ s3interface.head(bucket, key)
91
+ rescue RightAws::AwsError => e
92
+ raise unless e.http_code.to_s == '404'
93
+ end
94
+ end
95
+ end
96
+
97
+ def without_close_on_error(&block)
98
+ old_val = RightAws::AWSErrorHandler.close_on_error
99
+ RightAws::AWSErrorHandler.close_on_error = false
100
+ block.call
101
+ ensure
102
+ RightAws::AWSErrorHandler.close_on_error = old_val
103
+ end
104
+
105
+ def s3interface
106
+ @s3interface ||= RightAws::S3Interface.new(
107
+ S3Archive.config.access_key_id,
108
+ S3Archive.config.secret_access_key,
109
+ :logger => logger
110
+ )
111
+ end
112
+ end
113
+ end
@@ -0,0 +1,22 @@
1
+ require 'thor'
2
+ $:.unshift File.join(File.dirname(__FILE__), '..')
3
+ require 's3archive/compress_and_upload'
4
+
5
+ module S3Archive
6
+ class Cli < Thor
7
+ namespace :s3archive
8
+
9
+ class_option "-c",
10
+ :desc => "Path to config file",
11
+ :banner => "CONFIG_FILE",
12
+ :type => :string,
13
+ :aliases => "--config",
14
+ :default => "/etc/s3archive.yml"
15
+
16
+ desc "upload PATH", "Compresses PATH and uploads to s3://<bucket>/<year>/<month>/<day>/<filename>.gz"
17
+ def upload (orig_path, options = {})
18
+ S3Archive.config_path = self.options["c"] if self.options["c"]
19
+ CompressAndUpload.run(orig_path)
20
+ end
21
+ end
22
+ end
metadata ADDED
@@ -0,0 +1,100 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: s3archive
3
+ version: !ruby/object:Gem::Version
4
+ version: '1.0'
5
+ prerelease:
6
+ platform: ruby
7
+ authors:
8
+ - Petter Remen
9
+ autorequire:
10
+ bindir: bin
11
+ cert_chain: []
12
+ date: 2012-05-30 00:00:00.000000000 Z
13
+ dependencies:
14
+ - !ruby/object:Gem::Dependency
15
+ name: thor
16
+ requirement: !ruby/object:Gem::Requirement
17
+ none: false
18
+ requirements:
19
+ - - ~>
20
+ - !ruby/object:Gem::Version
21
+ version: 0.14.6
22
+ type: :runtime
23
+ prerelease: false
24
+ version_requirements: !ruby/object:Gem::Requirement
25
+ none: false
26
+ requirements:
27
+ - - ~>
28
+ - !ruby/object:Gem::Version
29
+ version: 0.14.6
30
+ - !ruby/object:Gem::Dependency
31
+ name: right_aws
32
+ requirement: !ruby/object:Gem::Requirement
33
+ none: false
34
+ requirements:
35
+ - - ~>
36
+ - !ruby/object:Gem::Version
37
+ version: 3.0.0
38
+ type: :runtime
39
+ prerelease: false
40
+ version_requirements: !ruby/object:Gem::Requirement
41
+ none: false
42
+ requirements:
43
+ - - ~>
44
+ - !ruby/object:Gem::Version
45
+ version: 3.0.0
46
+ - !ruby/object:Gem::Dependency
47
+ name: rspec
48
+ requirement: !ruby/object:Gem::Requirement
49
+ none: false
50
+ requirements:
51
+ - - ~>
52
+ - !ruby/object:Gem::Version
53
+ version: 2.9.0
54
+ type: :development
55
+ prerelease: false
56
+ version_requirements: !ruby/object:Gem::Requirement
57
+ none: false
58
+ requirements:
59
+ - - ~>
60
+ - !ruby/object:Gem::Version
61
+ version: 2.9.0
62
+ description: ''
63
+ email:
64
+ - petter@spnab.com
65
+ executables:
66
+ - s3archive
67
+ extensions: []
68
+ extra_rdoc_files: []
69
+ files:
70
+ - bin/s3archive
71
+ - lib/s3archive/compress_and_upload.rb
72
+ - lib/s3archive/config.rb
73
+ - lib/s3archive/logging.rb
74
+ - lib/s3archive/s3_file_synchronizer.rb
75
+ - lib/tasks/s3archive.thor
76
+ homepage: http://github.com/spab/s3archive
77
+ licenses: []
78
+ post_install_message:
79
+ rdoc_options: []
80
+ require_paths:
81
+ - lib
82
+ required_ruby_version: !ruby/object:Gem::Requirement
83
+ none: false
84
+ requirements:
85
+ - - ! '>='
86
+ - !ruby/object:Gem::Version
87
+ version: '0'
88
+ required_rubygems_version: !ruby/object:Gem::Requirement
89
+ none: false
90
+ requirements:
91
+ - - ! '>='
92
+ - !ruby/object:Gem::Version
93
+ version: '0'
94
+ requirements: []
95
+ rubyforge_project:
96
+ rubygems_version: 1.8.24
97
+ signing_key:
98
+ specification_version: 3
99
+ summary: Simple script to safely archive a file to S3
100
+ test_files: []