s3streambackup 0.1.0 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
data/README.md CHANGED
@@ -1,6 +1,7 @@
1
1
  # S3 Stream Backup
2
2
 
3
3
  Stores data from STDIN in S3 object using multipart upload and removes oldest backups to keep maximum desired backup object count.
4
+ Restore tool included.
4
5
 
5
6
  ## Installing
6
7
 
@@ -15,12 +16,20 @@ gem install s3streambackup
15
16
  ## Usage
16
17
 
17
18
  ```bash
18
- s3streambackup mybucket some-backup < some-backup.file
19
+ # store some-backup.file in mybucket bucket and name it my-backup
20
+ s3streambackup mybucket my-backup < some-backup.file
21
+
22
+ # list available backups of my-backup
23
+ s3streamrestore mybucket my-backup
24
+
25
+ # restore my-backup backup from 2013-08-06 09:03:17 UTC
26
+ s3streamrestore mybucket my-backup 130806_090317 > some-backup.file
19
27
  ```
20
28
 
21
- Note that you should have your S3 key and secret set in environment `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` or you can specify them in command line with `--key` and `--secret`.
29
+ Note that you should have your S3 key and secret set in environment variables `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` or you can specify them in command line with `--key` and `--secret` options.
22
30
 
23
31
  You can store your backup object within prefix by using `--prefix` option e.g.: `--prefix backups/server1/`.
32
+ Additionally you can postfix your backup objects by using `--postfix` option e.g: `--postfix .sql.gz`.
24
33
 
25
34
  By default two backup copies will be kept. You can change this number by using `--keep` options.
26
35
 
@@ -29,7 +38,11 @@ For other usage information use `--help`.
29
38
  ### PostgreSQL backup example
30
39
 
31
40
  ```bash
32
- /bin/su - postgres -c '/usr/bin/pg_dumpall' | xz -2 | s3streambackup --keep 7 --prefix backups/zabbix/ mybucket postgress-all
41
+ # backup to S3
42
+ su - postgres -c 'pg_dumpall' | xz -2 | s3streambackup --keep 7 --prefix backups/zabbix/ mybucket postgress-all
43
+
44
+ # restore could look like this
45
+ s3streambackup --prefix backups/zabbix/ mybucket postgress-all 130806_090317 | xz -d | su - postgres -c 'psql'
33
46
  ```
34
47
 
35
48
  ## Contributing to S3 Stream Backup
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.1.0
1
+ 0.2.0
@@ -1,117 +1,55 @@
1
1
  #!/usr/bin/env ruby
2
2
 
3
- require 'cli'
4
- require 'aws-sdk'
5
- require 'logger'
3
+ $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
4
+ require 's3streambackup'
6
5
 
7
- ## HACK: Auto select region based on location_constraint
8
- module AWS
9
- class S3
10
- class BucketCollection
11
- def [](name)
12
- # if name is DNS compatible we still cannot use it for writes if it does contain dots
13
- return S3::Bucket.new(name.to_s, :owner => nil, :config => config) if client.dns_compatible_bucket_name?(name) and not name.include? '.'
14
-
15
- # save region mapping for bucket for futher requests
16
- @@location_cache = {} unless defined? @@location_cache
17
- # if we have it cased use it; else try to fetch it and if it is nil bucket is in standard region
18
- region = @@location_cache[name] || @@location_cache[name] = S3::Bucket.new(name.to_s, :owner => nil, :config => config).location_constraint || @@location_cache[name] = :standard
19
-
20
- # no need to specify region if bucket is in standard region
21
- return S3::Bucket.new(name.to_s, :owner => nil, :config => config) if region == :standard
22
-
23
- # use same config but with region specified for buckets that are not DNS compatible or have dots and are not in standard region
24
- S3::Bucket.new(name.to_s, :owner => nil, :config => config.with(region: region))
25
- end
26
- end
27
- end
28
- end
29
-
30
- settings = CLI.new do
6
+ S3StreamBackup.new do
7
+ cli do
8
+ description 'Store backup from STDIN to S3'
31
9
  stdin :data
32
- option :key,
33
- short: :k,
34
- description: 'S3 key',
35
- default: ENV['AWS_ACCESS_KEY_ID']
36
- option :secret,
37
- short: :s,
38
- description: 'S3 key secret',
39
- default_label: '<secret>',
40
- default: ENV['AWS_SECRET_ACCESS_KEY']
41
- option :prefix,
42
- short: :p,
43
- description: 'prefix under which the objects will be kept',
44
- default: ''
45
10
  option :keep,
46
11
  short: :K,
47
12
  description: 'how many backup file to keep',
48
13
  cast: Integer,
49
14
  default: 2
50
- option :log_file,
51
- short: :l,
52
- description: 'location of log file'
53
- switch :plain,
54
- description: 'use plain connections instead of SSL to S3'
55
- switch :debug,
56
- description: 'log debug messages'
57
- argument :bucket,
58
- description: 'name of bucket to upload data to'
59
- argument :name,
60
- description: 'name under which the object will be stored'
61
- end.parse! do |settings|
62
- fail 'AWS_ACCESS_KEY_ID environment not set and --key not used' unless settings.key
63
- fail 'AWS_SECRET_ACCESS_KEY environment not set and --secret not used' unless settings.secret
64
- end
65
-
66
- log = Logger.new(settings.log_file ? settings.log_file : STDOUT)
67
- log.formatter = proc do |severity, datetime, progname, msg|
68
- "[#{datetime.utc.strftime "%Y-%m-%d %H:%M:%S.%6N %Z"}] [#{$$}] #{severity}: #{msg.strip}\n"
69
- end
70
-
71
- log.level = Logger::INFO
72
- log.level = Logger::DEBUG if settings.debug
73
-
74
- begin
75
- s3 = AWS::S3.new(
76
- access_key_id: settings.key,
77
- secret_access_key: settings.secret,
78
- logger: log,
79
- log_level: :debug,
80
- use_ssl: ! settings.plain
81
- )
82
-
83
- upload_date = Time.now.utc.strftime "%y%m%d_%H%M%S"
84
- prefix = "#{settings.prefix}#{settings.name}.backup."
85
- path = "#{prefix}#{upload_date}"
86
-
87
- bucket = s3.buckets[settings.bucket]
88
- backup = bucket.objects[path]
89
- log.info "writting to: #{path}"
90
-
91
- # make sure we use multipart upload
92
- total_bytes = 0
93
- backup.write(estimated_content_length: 10 * 1024 ** 3) do |buffer, bytes|
94
- log.info "#{total_bytes} bytes written..."
95
- data = settings.stdin.read(bytes)
96
- total_bytes += data.bytesize
97
- buffer.write data
15
+ option :postfix,
16
+ short: :P,
17
+ description: 'postfix which is appended to backup objects',
18
+ default: ''
98
19
  end
99
- log.info "total upload size: #{total_bytes}"
100
20
 
101
- backups = bucket.objects.with_prefix(prefix).to_a
21
+ main do |settings, log, s3|
22
+ upload_date = Time.now.utc.strftime "%y%m%d_%H%M%S"
23
+ prefix = "#{settings.prefix}#{settings.name}.backup."
24
+ path = "#{prefix}#{upload_date}#{settings.postfix}"
25
+
26
+ bucket = s3.buckets[settings.bucket]
27
+ backup = bucket.objects[path]
28
+ log.info "writting to: #{path}"
29
+
30
+ # make sure we use multipart upload
31
+ total_bytes = ProgeressLogger.new
32
+ backup.write(
33
+ content_type: 'application/octet-stream',
34
+ estimated_content_length: 10 * 1024 ** 3
35
+ ) do |buffer, bytes|
36
+ total_bytes.log(log, ' written...')
37
+ data = settings.stdin.read(bytes)
38
+ total_bytes << data.bytesize
39
+ buffer.write data
40
+ end
41
+ log.info "total upload size: #{total_bytes.in_bytes_auto}"
102
42
 
103
- log.info "keeping maximum #{settings.keep} latest buckups of #{backups.length} storred"
43
+ backups = bucket.objects.with_prefix(prefix).to_a
104
44
 
105
- if backups.length > settings.keep
106
- backups.take(backups.length - settings.keep).each do |backup|
107
- log.info "removing oldest backup: #{backup.key}"
108
- backup.delete
45
+ log.info "keeping maximum #{settings.keep} latest buckups of #{backups.length} storred"
46
+
47
+ if backups.length > settings.keep
48
+ backups.take(backups.length - settings.keep).each do |backup|
49
+ log.info "removing oldest backup: #{backup.key}"
50
+ backup.delete
51
+ end
109
52
  end
110
53
  end
111
- rescue => error
112
- msg = "#{error.class.name}: #{error.message}\n#{error.backtrace.join("\n")}"
113
- log.error msg
114
- STDERR.write msg
115
- exit 10
116
54
  end
117
55
 
@@ -0,0 +1,66 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
4
+ require 's3streambackup'
5
+
6
+ S3StreamBackup.new do
7
+ cli do
8
+ description 'Restore backup from S3 to STDOUT'
9
+ option :keep,
10
+ short: :K,
11
+ description: 'how many backup file to keep',
12
+ cast: Integer,
13
+ default: 2
14
+ option :postfix,
15
+ short: :P,
16
+ description: 'ignored',
17
+ default: ''
18
+ argument :date,
19
+ description: 'date of the backup to restore e.g 130806_090317; if not specified list available backups',
20
+ required: false
21
+ end
22
+
23
+ main do |settings, log, s3|
24
+ prefix = "#{settings.prefix}#{settings.name}.backup."
25
+
26
+ bucket = s3.buckets[settings.bucket]
27
+ backups = bucket.objects.with_prefix(prefix).to_a
28
+ if not settings.date
29
+ dates = backups.map do |backup|
30
+ backup.key.to_s.match(/.backup.([0-9]{6}_[0-9]{6})/).captures.first
31
+ end
32
+ dates.each do |date|
33
+ puts "#{date} (#{Time.parse(date + ' +0000')})"
34
+ end
35
+ exit 0
36
+ end
37
+
38
+ backup = backups.select do |backup|
39
+ backup.key =~ /.backup.#{settings.date}/
40
+ end.first
41
+
42
+ fail "backup from date #{settings.date} not found" unless backup
43
+
44
+ length = backup.content_length
45
+ log.info "sourcing from: #{backup.key} length: #{length}"
46
+
47
+ total_bytes = ProgeressLogger.new
48
+ backup.read do |data|
49
+ total_bytes.log log, ' read...'
50
+ total_bytes << data.bytesize
51
+ begin
52
+ STDOUT.write data
53
+ rescue Errno::EPIPE
54
+ log.warn "STDOUT closed prematurely"
55
+ exit 1
56
+ end
57
+ end
58
+ if total_bytes != length
59
+ log.warn "got differnet amount of data (#{total_bytes} bytes) than expected (#{length} bytes)"
60
+ exit 1
61
+ else
62
+ log.info "total restore size: #{total_bytes.in_bytes_auto}"
63
+ end
64
+ end
65
+ end
66
+
@@ -0,0 +1,137 @@
1
+ require 'cli'
2
+ require 'aws-sdk'
3
+ require 'logger'
4
+ require 's3streambackup/units'
5
+
6
+ ## HACK: Auto select region based on location_constraint
7
+ module AWS
8
+ class S3
9
+ class BucketCollection
10
+ def [](name)
11
+ # if name is DNS compatible we still cannot use it for writes if it does contain dots
12
+ return S3::Bucket.new(name.to_s, :owner => nil, :config => config) if client.dns_compatible_bucket_name?(name) and not name.include? '.'
13
+
14
+ # save region mapping for bucket for futher requests
15
+ @@location_cache = {} unless defined? @@location_cache
16
+ # if we have it cased use it; else try to fetch it and if it is nil bucket is in standard region
17
+ region = @@location_cache[name] || @@location_cache[name] = S3::Bucket.new(name.to_s, :owner => nil, :config => config).location_constraint || @@location_cache[name] = :standard
18
+
19
+ # no need to specify region if bucket is in standard region
20
+ return S3::Bucket.new(name.to_s, :owner => nil, :config => config) if region == :standard
21
+
22
+ # use same config but with region specified for buckets that are not DNS compatible or have dots and are not in standard region
23
+ S3::Bucket.new(name.to_s, :owner => nil, :config => config.with(region: region))
24
+ end
25
+ end
26
+ end
27
+ end
28
+
29
+ class S3StreamBackup
30
+ def initialize(&block)
31
+ instance_eval &block
32
+ cli_setup = @cli_setup
33
+ cli_verify_setup = @cli_verify_setup
34
+ settings = CLI.new do
35
+ option :key,
36
+ short: :k,
37
+ description: 'S3 key',
38
+ default: ENV['AWS_ACCESS_KEY_ID']
39
+ option :secret,
40
+ short: :s,
41
+ description: 'S3 key secret',
42
+ default_label: '<secret>',
43
+ default: ENV['AWS_SECRET_ACCESS_KEY']
44
+ option :prefix,
45
+ short: :p,
46
+ description: 'prefix under which the backup objects are kept',
47
+ default: ''
48
+ option :log_file,
49
+ short: :l,
50
+ description: 'location of log file; if not specifed log to STDERR'
51
+ switch :plain,
52
+ description: 'use plain connections instead of SSL to S3'
53
+ switch :verbose,
54
+ short: :v,
55
+ description: 'log debug messages'
56
+ switch :debug,
57
+ short: :d,
58
+ description: 'log AWS SDK debug messages'
59
+ argument :bucket,
60
+ description: 'name of bucket to upload data to'
61
+ argument :name,
62
+ description: 'name under which the object will be stored'
63
+ instance_eval &cli_setup if cli_setup
64
+ end.parse! do |settings|
65
+ fail 'AWS_ACCESS_KEY_ID environment not set and --key not used' unless settings.key
66
+ fail 'AWS_SECRET_ACCESS_KEY environment not set and --secret not used' unless settings.secret
67
+ instance_eval &cli_verify_setup if cli_verify_setup
68
+ end
69
+
70
+ log = Logger.new(settings.log_file ? settings.log_file : STDERR)
71
+ log.formatter = proc do |severity, datetime, progname, msg|
72
+ "[#{datetime.utc.strftime "%Y-%m-%d %H:%M:%S.%6N %Z"}] [#{$$}] #{severity}: #{msg.strip}\n"
73
+ end
74
+
75
+ log.level = Logger::INFO
76
+ log.level = Logger::DEBUG if settings.verbose or settings.debug
77
+
78
+ begin
79
+ s3 = AWS::S3.new(
80
+ access_key_id: settings.key,
81
+ secret_access_key: settings.secret,
82
+ logger: settings.debug ? log : nil,
83
+ log_level: :debug,
84
+ use_ssl: ! settings.plain
85
+ )
86
+ @main.call(settings, log, s3)
87
+ rescue => error
88
+ msg = "#{error.class.name}: #{error.message}\n#{error.backtrace.join("\n")}"
89
+ log.error msg
90
+ STDERR.write msg
91
+ exit 10
92
+ end
93
+ end
94
+
95
+ def cli(&block)
96
+ @cli_setup = block
97
+ end
98
+
99
+ def cli_verify(&block)
100
+ @cli_verify_setup = block
101
+ end
102
+
103
+ def main(&block)
104
+ @main = block
105
+ end
106
+ end
107
+
108
+ class ProgeressLogger
109
+ def initialize
110
+ @bytes = 0
111
+ end
112
+
113
+ def log(logger, postfix)
114
+ if logger.debug?
115
+ was, @output_bytes = @output_bytes || '', @bytes.in_bytes_auto
116
+ logger.debug "#{@output_bytes}#{postfix}"
117
+ end
118
+ end
119
+
120
+ def <<(bytes)
121
+ @bytes += bytes
122
+ end
123
+
124
+ def to_s
125
+ @bytes.to_s
126
+ end
127
+
128
+ include Comparable
129
+ def <=>(value)
130
+ @bytes <=> value
131
+ end
132
+
133
+ def in_bytes_auto
134
+ @bytes.in_bytes_auto
135
+ end
136
+ end
137
+
@@ -0,0 +1,19 @@
1
+ class Numeric
2
+ UNITS = %w{bytes KiB MiB GiB TiB PiB EiB}
3
+ def in_bytes_auto
4
+ size = self.to_f
5
+ units = UNITS.dup
6
+
7
+ while size > 999
8
+ size /= 1024
9
+ units.shift
10
+ end
11
+
12
+ if units.length == UNITS.length
13
+ "#{'%d' % size} #{units.first}"
14
+ else
15
+ "#{'%.1f' % size} #{units.first}"
16
+ end
17
+ end
18
+ end
19
+
@@ -5,14 +5,14 @@
5
5
 
6
6
  Gem::Specification.new do |s|
7
7
  s.name = "s3streambackup"
8
- s.version = "0.1.0"
8
+ s.version = "0.2.0"
9
9
 
10
10
  s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
11
11
  s.authors = ["Jakub Pastuszek"]
12
- s.date = "2013-08-05"
12
+ s.date = "2013-08-06"
13
13
  s.description = "Stores data from STDIN in S3 object using multipart upload and removes oldest backups to keep maximum desired backup object count."
14
14
  s.email = "jpastuszek@gmail.com"
15
- s.executables = ["s3streambackup"]
15
+ s.executables = ["s3streambackup", "s3streamrestore"]
16
16
  s.extra_rdoc_files = [
17
17
  "LICENSE.txt",
18
18
  "README.md"
@@ -27,9 +27,12 @@ Gem::Specification.new do |s|
27
27
  "Rakefile",
28
28
  "VERSION",
29
29
  "bin/s3streambackup",
30
+ "bin/s3streamrestore",
30
31
  "features/s3streambackup.feature",
31
32
  "features/step_definitions/s3streambackup_steps.rb",
32
33
  "features/support/env.rb",
34
+ "lib/s3streambackup.rb",
35
+ "lib/s3streambackup/units.rb",
33
36
  "s3streambackup.gemspec",
34
37
  "spec/s3streambackup_spec.rb",
35
38
  "spec/spec_helper.rb"
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: s3streambackup
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.2.0
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2013-08-05 00:00:00.000000000 Z
12
+ date: 2013-08-06 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: cli
@@ -144,6 +144,7 @@ description: Stores data from STDIN in S3 object using multipart upload and remo
144
144
  email: jpastuszek@gmail.com
145
145
  executables:
146
146
  - s3streambackup
147
+ - s3streamrestore
147
148
  extensions: []
148
149
  extra_rdoc_files:
149
150
  - LICENSE.txt
@@ -158,9 +159,12 @@ files:
158
159
  - Rakefile
159
160
  - VERSION
160
161
  - bin/s3streambackup
162
+ - bin/s3streamrestore
161
163
  - features/s3streambackup.feature
162
164
  - features/step_definitions/s3streambackup_steps.rb
163
165
  - features/support/env.rb
166
+ - lib/s3streambackup.rb
167
+ - lib/s3streambackup/units.rb
164
168
  - s3streambackup.gemspec
165
169
  - spec/s3streambackup_spec.rb
166
170
  - spec/spec_helper.rb
@@ -179,7 +183,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
179
183
  version: '0'
180
184
  segments:
181
185
  - 0
182
- hash: -1306132470382039118
186
+ hash: 4107987020335858105
183
187
  required_rubygems_version: !ruby/object:Gem::Requirement
184
188
  none: false
185
189
  requirements: