cryo 0.0.1 → 0.0.2
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +5 -0
- data/Makefile +5 -0
- data/bin/bootstrap_cryo.sh +3 -0
- data/bin/cryo +158 -0
- data/bin/test +100 -0
- data/bin/test2 +210 -0
- data/cryo.gemspec +9 -2
- data/examples/rds.sh +28 -0
- data/lib/cryo.rb +88 -3
- data/lib/cryo/database.rb +16 -0
- data/lib/cryo/database/mysql.rb +44 -0
- data/lib/cryo/database/postgres.rb +31 -0
- data/lib/cryo/database/redis.rb +63 -0
- data/lib/cryo/message.rb +23 -0
- data/lib/cryo/message/sns.rb +19 -0
- data/lib/cryo/store.rb +139 -0
- data/lib/cryo/store/s3.rb +128 -0
- data/lib/cryo/utils.rb +117 -0
- data/lib/cryo/version.rb +2 -2
- metadata +102 -5
data/.gitignore
CHANGED
data/Makefile
ADDED
data/bin/cryo
ADDED
@@ -0,0 +1,158 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require_relative "../lib/cryo"
|
4
|
+
require 'trollop'
|
5
|
+
|
6
|
+
required_parameters = ["archive_bucket",
|
7
|
+
"archive_frequency",
|
8
|
+
"aws_access_key",
|
9
|
+
"aws_secret_key",
|
10
|
+
"host",
|
11
|
+
"password",
|
12
|
+
"snapshot_bucket",
|
13
|
+
"snapshot_frequency",
|
14
|
+
"snapshot_period",
|
15
|
+
"snapshot_prefix",
|
16
|
+
"sns_topic",
|
17
|
+
"tmp_path",
|
18
|
+
"user",
|
19
|
+
]
|
20
|
+
|
21
|
+
mode = ARGV.shift
|
22
|
+
|
23
|
+
options = Trollop::options do
|
24
|
+
version "cryo #{Cryo::VERSION} (c) 2013 Airbnb"
|
25
|
+
banner <<-END_OF_BANNER
|
26
|
+
|
27
|
+
#{version}
|
28
|
+
|
29
|
+
Welcome to Cryo, a simple backup utility
|
30
|
+
|
31
|
+
All options can be passed in with either the given command line options, or passed in via
|
32
|
+
environment with the same name, capitalized, and with CRYO_ appended. Like: CRYO_HOST instead of --host
|
33
|
+
|
34
|
+
More docs and examples can be found at https://github.com/airbnb/cryo
|
35
|
+
|
36
|
+
Usage:
|
37
|
+
cryo [redis,mysql,postgres,list,get] [options]
|
38
|
+
|
39
|
+
where [options] are:
|
40
|
+
|
41
|
+
END_OF_BANNER
|
42
|
+
|
43
|
+
opt(:tmp_path,
|
44
|
+
"where should temp files be created",
|
45
|
+
:type => String,
|
46
|
+
:default => ENV['CRYO_TMP_PATH'])
|
47
|
+
|
48
|
+
opt(:snapshot_frequency,
|
49
|
+
"how often to take backups (in mins)",
|
50
|
+
:type => Integer,
|
51
|
+
:default => ENV['CRYO_SNAPSHOT_FREQUENCY'].to_i)
|
52
|
+
|
53
|
+
opt(:archive_frequency,
|
54
|
+
"maxium time in between archives (in mins)",
|
55
|
+
:type => Integer,
|
56
|
+
:default => ENV['CRYO_ARCHIVE_FREQUENCY'].to_i)
|
57
|
+
|
58
|
+
opt(:snapshot_period,
|
59
|
+
"time before snapshots get deleted or archived (in mins)",
|
60
|
+
:type => Integer,
|
61
|
+
:default => ENV['CRYO_SNAPSHOT_PERIOD'].to_i)
|
62
|
+
|
63
|
+
opt(:snapshot_bucket,
|
64
|
+
"s3 bucket to use for snapshots",
|
65
|
+
:type => String,
|
66
|
+
:default => ENV['CRYO_SNAPSHOT_BUCKET'])
|
67
|
+
|
68
|
+
opt(:snapshot_prefix,
|
69
|
+
"s3 object prefix to use for snapshots",
|
70
|
+
:type => String,
|
71
|
+
:default => ENV['CRYO_SNAPSHOT_PREFIX'])
|
72
|
+
|
73
|
+
opt(:archive_bucket,
|
74
|
+
"s3 bucket to use for archives",
|
75
|
+
:type => String,
|
76
|
+
:default => ENV['CRYO_ARCHIVE_BUCKET'])
|
77
|
+
|
78
|
+
opt(:archive_prefix,
|
79
|
+
"s3 object prefix to use for archives",
|
80
|
+
:type => String,
|
81
|
+
:default => ENV['CRYO_ARCHIVE_PREFIX'])
|
82
|
+
|
83
|
+
opt(:sns_topic,
|
84
|
+
"sns topic",
|
85
|
+
:type => String,
|
86
|
+
:default => ENV['CRYO_SNS_TOPIC'])
|
87
|
+
|
88
|
+
opt(:aws_access_key,
|
89
|
+
"aws_access_key. Can be set using the AWS_ACCESS_KEY environment variable",
|
90
|
+
:type => String,
|
91
|
+
:default => ENV['CRYO_AWS_ACCESS_KEY'])
|
92
|
+
|
93
|
+
opt(:aws_secret_key,
|
94
|
+
"aws_secret_key. Can be set using the AWS_SECRET_KEY environment variable",
|
95
|
+
:type => String,
|
96
|
+
:default => ENV['CRYO_AWS_SECRET_KEY'])
|
97
|
+
|
98
|
+
opt(:host,
|
99
|
+
"remote host. Can be set using the CRYO_HOST environment variable",
|
100
|
+
:type => String,
|
101
|
+
:default => ENV['CRYO_HOST'])
|
102
|
+
|
103
|
+
opt(:user,
|
104
|
+
"remote user",
|
105
|
+
:type => String,
|
106
|
+
:default => ENV['CRYO_USER'])
|
107
|
+
|
108
|
+
opt(:password,
|
109
|
+
"remote password",
|
110
|
+
:type => String,
|
111
|
+
:default => ENV['CRYO_PASSWORD'])
|
112
|
+
|
113
|
+
|
114
|
+
case mode
|
115
|
+
when 'mysql'
|
116
|
+
when 'redis'
|
117
|
+
opt(:path,
|
118
|
+
"path to redis database file",
|
119
|
+
:type => String,
|
120
|
+
:default => ENV['CRYO_PATH'])
|
121
|
+
required_parameters << 'path'
|
122
|
+
when 'postgres'
|
123
|
+
when 'list'
|
124
|
+
when 'get'
|
125
|
+
else
|
126
|
+
unless mode == '--help'
|
127
|
+
STDERR.puts "ERROR! bad input. first option needs to be one of [redis, postgres, mysql, list, get]"
|
128
|
+
STDERR.puts "Please use --help for more info"
|
129
|
+
exit 1
|
130
|
+
end
|
131
|
+
end
|
132
|
+
end
|
133
|
+
|
134
|
+
options.merge!(type: mode)
|
135
|
+
|
136
|
+
|
137
|
+
required_parameters.each do |arg|
|
138
|
+
Trollop::die arg.to_sym, "needs to specified on the commandline or set by the CRYO_#{arg.upcase} environment variable" \
|
139
|
+
if options[arg.to_sym].nil? or ! options[arg.to_sym]
|
140
|
+
end unless mode == 'get'
|
141
|
+
|
142
|
+
|
143
|
+
run = Cryo.new(options)
|
144
|
+
|
145
|
+
case mode
|
146
|
+
when 'list'
|
147
|
+
run.list_snapshots
|
148
|
+
when 'get'
|
149
|
+
snapshot = ARGV.shift
|
150
|
+
raise "you did not specify a snapshot!" if snapshot.nil?
|
151
|
+
run.get_snapshot(snapshot)
|
152
|
+
else
|
153
|
+
run.backup!
|
154
|
+
run.archive_and_purge
|
155
|
+
end
|
156
|
+
|
157
|
+
#
|
158
|
+
# run.list_archives
|
data/bin/test
ADDED
@@ -0,0 +1,100 @@
|
|
1
|
+
options = Trollop::options do
|
2
|
+
|
3
|
+
opt(:snapshot_frequency,
|
4
|
+
"how often to take backups (in mins)",
|
5
|
+
:type => Integer,
|
6
|
+
:default => ENV['CRYO_SNAPSHOT_FREQUENCY'].to_i || nil,
|
7
|
+
:required => true)
|
8
|
+
|
9
|
+
opt(:archive_frequency,
|
10
|
+
"maxium time in between archives (in mins)",
|
11
|
+
:type => Integer,
|
12
|
+
:default => ENV['CRYO_ARCHIVE_FREQUENCY'].to_i || nil,
|
13
|
+
:required => true)
|
14
|
+
|
15
|
+
opt(:snapshot_period,
|
16
|
+
"time before snapshots get deleted or archived (in mins)",
|
17
|
+
:type => Integer,
|
18
|
+
:default => ENV['CRYO_SNAPSHOT_PERIOD'].to_i || nil,
|
19
|
+
:required => true)
|
20
|
+
|
21
|
+
opt(:snapshot_bucket,
|
22
|
+
"s3 bucket to use for snapshots",
|
23
|
+
:type => String,
|
24
|
+
:default => ENV['CRYO_SNAPSHOT_BUCKET'] || nil,
|
25
|
+
:required => true)
|
26
|
+
|
27
|
+
opt(:snapshot_prefix,
|
28
|
+
"s3 object prefix to use for snapshots",
|
29
|
+
:type => String,
|
30
|
+
:default => ENV['CRYO_SNAPSHOT_PREFIX'] || nil,
|
31
|
+
:required => true)
|
32
|
+
|
33
|
+
opt(:archive_bucket,
|
34
|
+
"s3 bucket to use for archives",
|
35
|
+
:type => String,
|
36
|
+
:default => ENV['CRYO_ARCHIVE_BUCKET'] || nil,
|
37
|
+
:required => true)
|
38
|
+
|
39
|
+
opt(:archive_prefix,
|
40
|
+
"s3 object prefix to use for archives",
|
41
|
+
:type => String,
|
42
|
+
:default => ENV['CRYO_ARCHIVE_PREFIX'] || nil,
|
43
|
+
:required => true)
|
44
|
+
|
45
|
+
opt(:sns_topic,
|
46
|
+
"sns topic",
|
47
|
+
:type => String,
|
48
|
+
:default => ENV['CRYO_SNS_TOPIC'] || nil,
|
49
|
+
:required => true)
|
50
|
+
|
51
|
+
|
52
|
+
|
53
|
+
|
54
|
+
|
55
|
+
|
56
|
+
opt(:host,
|
57
|
+
"remote host. Can be set using the CRYO_HOST environment variable",
|
58
|
+
:type => String,
|
59
|
+
:default => ENV['CRYO_HOST'] || nil,
|
60
|
+
:required => true)
|
61
|
+
|
62
|
+
opt(:user,
|
63
|
+
"remote user",
|
64
|
+
:type => String,
|
65
|
+
:default => ENV['CRYO_USER'] || nil,
|
66
|
+
:required => true)
|
67
|
+
|
68
|
+
opt(:password,
|
69
|
+
"remote password",
|
70
|
+
:type => String,
|
71
|
+
:default => ENV['CRYO_PASSWORD'] || nil,
|
72
|
+
:required => true)
|
73
|
+
|
74
|
+
|
75
|
+
case mode
|
76
|
+
when 'mysql'
|
77
|
+
when 'redis'
|
78
|
+
opt(:path,
|
79
|
+
"path to redis database file",
|
80
|
+
:type => String,
|
81
|
+
:default => ENV['CRYO_PATH'])
|
82
|
+
required_parameters << 'path'
|
83
|
+
when 'postgres'
|
84
|
+
else
|
85
|
+
unless mode == '--help'
|
86
|
+
STDERR.puts "ERROR! bad input. first option needs to be one of [redis, postgres, mysql]"
|
87
|
+
STDERR.puts "Please use --help for more info"
|
88
|
+
exit 1
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
93
|
+
options.merge!(type: mode)
|
94
|
+
|
95
|
+
|
96
|
+
|
97
|
+
puts "optionss are:"
|
98
|
+
require 'pp'
|
99
|
+
pp options
|
100
|
+
|
data/bin/test2
ADDED
@@ -0,0 +1,210 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require 'trollop'
|
4
|
+
require_relative "../lib/cryo"
|
5
|
+
|
6
|
+
|
7
|
+
@required_inputs = [
|
8
|
+
'aws_access_key',
|
9
|
+
'aws_seceret_key',
|
10
|
+
]
|
11
|
+
|
12
|
+
|
13
|
+
@options={}
|
14
|
+
|
15
|
+
@noun=nil
|
16
|
+
@verb=nil
|
17
|
+
|
18
|
+
@log=true
|
19
|
+
|
20
|
+
def log(msg='')
|
21
|
+
STDERR.puts msg if @log
|
22
|
+
end
|
23
|
+
|
24
|
+
|
25
|
+
def print_help_and_exit
|
26
|
+
banner = <<-END_OF_BANNER
|
27
|
+
Welcome to Cryo, a simple backup utility. #{Cryo::VERSION} (c) 2013 Airbnb
|
28
|
+
|
29
|
+
All options can be passed in with either the given command line options, or passed in via
|
30
|
+
environment with the same name, capitalized, and with CRYO_ appended. Like: CRYO_HOST instead of --host
|
31
|
+
|
32
|
+
More docs and examples can be found at https://github.com/airbnb/cryo
|
33
|
+
|
34
|
+
Usage:
|
35
|
+
cryo [backup,list,get] [options]
|
36
|
+
|
37
|
+
where [options] are:
|
38
|
+
|
39
|
+
END_OF_BANNER
|
40
|
+
puts banner
|
41
|
+
Kernel.exit 1
|
42
|
+
end
|
43
|
+
|
44
|
+
def print_message_and_exit(message='')
|
45
|
+
STDERR.puts message
|
46
|
+
Kernel.exit 1
|
47
|
+
end
|
48
|
+
|
49
|
+
def parse_inputs
|
50
|
+
log "starting to parse command line inputs"
|
51
|
+
while next_argument = ARGV.shift
|
52
|
+
log "looping inside parse_inputs. next arg is #{next_argument}"
|
53
|
+
print_help_and_exit unless next_argument.start_with? '--'
|
54
|
+
formatted_key = next_argument.gsub(/^--/,'').gsub(/-/,'_').to_sym
|
55
|
+
if !ARGV.empty? and !ARGV.first.start_with? '--'
|
56
|
+
log "this option has a param"
|
57
|
+
param = ARGV.shift
|
58
|
+
else
|
59
|
+
log "this arg does not have a param"
|
60
|
+
param = true
|
61
|
+
end
|
62
|
+
@options.merge!({formatted_key => param})
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
def verify_environment_variable(variable='')
|
67
|
+
log "checking for environment variable #{variable}"
|
68
|
+
formatted_variable_name = "CRYO_#{variable.upcase}"
|
69
|
+
value = ENV[formatted_variable_name]
|
70
|
+
unless value.nil?
|
71
|
+
log "looks like the variable exists"
|
72
|
+
@options.merge!({variable.to_sym => value})
|
73
|
+
else
|
74
|
+
log "looks like the variable does not exist"
|
75
|
+
print_message_and_exit "\nyou need to make sure that you set the #{formatted_variable_name} variable!!!"
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
def verify_inputs
|
80
|
+
@required_inputs.each do |input|
|
81
|
+
verify_environment_variable input
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
|
86
|
+
def backup
|
87
|
+
%w{host}.each {|i| @required_inputs << i}
|
88
|
+
case @noun
|
89
|
+
when "mysql"
|
90
|
+
when "redis"
|
91
|
+
when "postgres"
|
92
|
+
else
|
93
|
+
print_help_and_exit
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
|
98
|
+
def list
|
99
|
+
case @noun
|
100
|
+
when "snapshots"
|
101
|
+
when "archives"
|
102
|
+
else
|
103
|
+
print_help_and_exit
|
104
|
+
end
|
105
|
+
end
|
106
|
+
|
107
|
+
|
108
|
+
def get
|
109
|
+
case @noun
|
110
|
+
when "archives"
|
111
|
+
when "snapshots"
|
112
|
+
else
|
113
|
+
print_help_and_exit
|
114
|
+
end
|
115
|
+
end
|
116
|
+
|
117
|
+
|
118
|
+
log "starting to parse arguments"
|
119
|
+
print_help_and_exit if ARGV.size < 2
|
120
|
+
@verb = ARGV.shift
|
121
|
+
@noun = ARGV.shift
|
122
|
+
log "got a noun and a verb. #{@verb} and #{@noun}"
|
123
|
+
|
124
|
+
case @verb
|
125
|
+
when "backup"
|
126
|
+
backup
|
127
|
+
when "list"
|
128
|
+
list
|
129
|
+
when "get"
|
130
|
+
get
|
131
|
+
else
|
132
|
+
print_help_and_exit
|
133
|
+
end
|
134
|
+
|
135
|
+
parse_inputs
|
136
|
+
|
137
|
+
|
138
|
+
verify_inputs
|
139
|
+
|
140
|
+
log "options are #{@options.inspect}"
|
141
|
+
|
142
|
+
Kernel.exit 2
|
143
|
+
|
144
|
+
|
145
|
+
SUB_COMMANDS = %w(backup list get)
|
146
|
+
global_opts = Trollop::options do
|
147
|
+
STDERR.puts "entering global options"
|
148
|
+
stop_on_unknown
|
149
|
+
version "cryo #{Cryo::VERSION} (c) 2013 Airbnb"
|
150
|
+
|
151
|
+
|
152
|
+
opt(:aws_access_key,
|
153
|
+
"aws_access_key. Can be set using the AWS_ACCESS_KEY environment variable",
|
154
|
+
:type => String,
|
155
|
+
:default => ENV['CRYO_AWS_ACCESS_KEY'] || nil,
|
156
|
+
:required => true)
|
157
|
+
stop_on SUB_COMMANDS
|
158
|
+
end
|
159
|
+
|
160
|
+
|
161
|
+
|
162
|
+
|
163
|
+
|
164
|
+
cmd = ARGV.shift # get the subcommand
|
165
|
+
cmd_opts = \
|
166
|
+
case cmd
|
167
|
+
when "backup" # parse delete options
|
168
|
+
Trollop::options do
|
169
|
+
stop_on_unknown
|
170
|
+
stop_on ["aws_access_key"]
|
171
|
+
STDERR.puts "entering backup options"
|
172
|
+
opt :force, "Force deletion"
|
173
|
+
end
|
174
|
+
when "list" # parse copy options
|
175
|
+
Trollop::options do
|
176
|
+
STDERR.puts "entering list options"
|
177
|
+
opt :double, "Copy twice for safety's sake"
|
178
|
+
end
|
179
|
+
when "get" # parse copy options
|
180
|
+
Trollop::options do
|
181
|
+
STDERR.puts "entering get options"
|
182
|
+
opt :double, "Copy twice for safety's sake"
|
183
|
+
end
|
184
|
+
when nil
|
185
|
+
STDERR.puts "entering nil options"
|
186
|
+
puts "please call with --help if you need help"
|
187
|
+
Kernel.exit 1
|
188
|
+
else
|
189
|
+
STDERR.puts "entering else options"
|
190
|
+
Trollop::die "unknown subcommand #{cmd.inspect}"
|
191
|
+
end
|
192
|
+
|
193
|
+
always_required_options = Trollop::options do
|
194
|
+
STDERR.puts "entering always required options"
|
195
|
+
stop_on_unknown
|
196
|
+
|
197
|
+
|
198
|
+
opt(:aws_secret_key,
|
199
|
+
"aws_secret_key. Can be set using the AWS_SECRET_KEY environment variable",
|
200
|
+
:type => String,
|
201
|
+
:default => ENV['CRYO_AWS_SECRET_KEY'] || nil,
|
202
|
+
:required => true)
|
203
|
+
|
204
|
+
end
|
205
|
+
|
206
|
+
|
207
|
+
puts "Global options: #{global_opts.inspect}"
|
208
|
+
puts "Subcommand: #{cmd.inspect}"
|
209
|
+
puts "Subcommand options: #{cmd_opts.inspect}"
|
210
|
+
puts "Remaining arguments: #{ARGV.inspect}"
|
data/cryo.gemspec
CHANGED
@@ -4,13 +4,20 @@ require File.expand_path('../lib/cryo/version', __FILE__)
|
|
4
4
|
Gem::Specification.new do |gem|
|
5
5
|
gem.authors = ["Nathan Baxter"]
|
6
6
|
gem.email = ["nathan.baxter@airbnb.com"]
|
7
|
-
gem.summary = %q{Tool for snapshotting data, backing it up, verifying it, and
|
7
|
+
gem.summary = %q{Tool for snapshotting data, backing it up, verifying it, cycling it, and triggering notifications.}
|
8
8
|
gem.homepage = "https://github.com/airbnb/cryo"
|
9
9
|
|
10
10
|
gem.files = `git ls-files`.split($\)
|
11
|
-
gem.executables =
|
11
|
+
gem.executables = ["cryo"]
|
12
|
+
|
12
13
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
13
14
|
gem.name = "cryo"
|
14
15
|
gem.require_paths = ["lib"]
|
15
16
|
gem.version = Cryo::VERSION
|
17
|
+
|
18
|
+
gem.add_runtime_dependency "colorize"
|
19
|
+
gem.add_runtime_dependency "trollop", "~> 2.0"
|
20
|
+
gem.add_runtime_dependency "aws-sdk", '~> 1.6'
|
21
|
+
gem.add_runtime_dependency "net-ntp", '~> 2.1.1'
|
22
|
+
gem.add_development_dependency "pry"
|
16
23
|
end
|
data/examples/rds.sh
ADDED
@@ -0,0 +1,28 @@
|
|
1
|
+
#!/bin/bash -ex
|
2
|
+
|
3
|
+
cd $(dirname $0) && cd ..
|
4
|
+
|
5
|
+
./bin/cryo redis\
|
6
|
+
--host localhost \
|
7
|
+
--user me \
|
8
|
+
--password verysafe \
|
9
|
+
--sns-topic arn:aws:sns:us-east-1:172631448019:martin-redis-test \
|
10
|
+
--key somekey \
|
11
|
+
--aws-access-key some_aws_access_key \
|
12
|
+
--aws-secret-key some_secret \
|
13
|
+
--bucket some_buck \
|
14
|
+
--path /mnt/redis/foo \
|
15
|
+
|
16
|
+
|
17
|
+
# or
|
18
|
+
|
19
|
+
export CRYO_AWS_ACCESS_KEY=some_key
|
20
|
+
export CRYO_AWS_SECRET_KEY=some_secret
|
21
|
+
export CRYO_BUCKET=some_other_buk
|
22
|
+
export CRYO_SNS_TOPIC=some_sns_topic
|
23
|
+
export CRYO_HOST=some_server_somewhere
|
24
|
+
export CRYO_USER=some_user
|
25
|
+
export CRYO_PASSWORD=some_good_password
|
26
|
+
export CRYO_PATH=/some/path/to/redis/db
|
27
|
+
|
28
|
+
./bin/cryo mysql
|
data/lib/cryo.rb
CHANGED
@@ -1,5 +1,90 @@
|
|
1
|
-
require
|
1
|
+
require 'colorize'
|
2
|
+
require 'aws-sdk'
|
3
|
+
require 'logger'
|
4
|
+
require 'net/ntp'
|
5
|
+
|
6
|
+
|
7
|
+
## require all ruby files recursively
|
8
|
+
Dir.glob(File.join(File.dirname(__FILE__),'**/*.rb')).sort.each do |file|
|
9
|
+
require_relative file
|
10
|
+
end
|
11
|
+
|
12
|
+
|
13
|
+
class Cryo
|
14
|
+
|
15
|
+
include Utils
|
16
|
+
# HOST = `hostname`.chomp!
|
17
|
+
attr_accessor :options, :s3, :md5, :sns, :logger, :key
|
18
|
+
|
19
|
+
def initialize(options={})
|
20
|
+
get_utc_timestamp # save start time for backup
|
21
|
+
|
22
|
+
self.options = options
|
23
|
+
self.logger = Logger.new(STDERR)
|
24
|
+
logger.level = Logger::DEBUG
|
25
|
+
|
26
|
+
@database = Database.create(options) \
|
27
|
+
unless options[:type] == 'list' or options[:type] == 'get'
|
28
|
+
@store = Store.create(options.merge(type: 's3',time: @time))
|
29
|
+
@message = Message.create(options.merge(type: 'sns'))
|
30
|
+
@snapshot_prefix = options[:snapshot_prefix]
|
31
|
+
@archive_prefix = options[:archive_prefix]
|
32
|
+
@key = get_timstamped_key_name
|
33
|
+
@snapshot_frequency = options[:snapshot_frequency]
|
34
|
+
@archive_frequency = options[:archive_frequency]
|
35
|
+
@snapshot_period = options[:snapshot_period]
|
36
|
+
@snapshot_bucket = options[:snapshot_bucket]
|
37
|
+
@archive_bucket = options[:archive_bucket]
|
38
|
+
@tmp_path = options[:tmp_path]
|
39
|
+
end
|
40
|
+
|
41
|
+
|
42
|
+
def backup!()
|
43
|
+
if @database.respond_to? 'get_gzipped_backup'
|
44
|
+
logger.info "getting compressed backup"
|
45
|
+
compressed_backup = @database.get_gzipped_backup
|
46
|
+
logger.info "got backup in #{(get_utc_time - @time).round 2} seconds"
|
47
|
+
else
|
48
|
+
logger.info "taking backup..."
|
49
|
+
backup_file = @database.get_backup
|
50
|
+
logger.info "got backup in #{(get_utc_time - @time).round 2} seconds"
|
51
|
+
|
52
|
+
timer = get_utc_time
|
53
|
+
logger.info "compressing backup..."
|
54
|
+
compressed_backup = gzip_file backup_file
|
55
|
+
logger.info "compressed backup in #{(get_utc_time - timer).round 2} seconds"
|
56
|
+
end
|
57
|
+
|
58
|
+
timer = get_utc_time
|
59
|
+
logger.info "storing backup..."
|
60
|
+
@store.put(content: Pathname.new(compressed_backup), bucket: options[:snapshot_bucket],key: @key)
|
61
|
+
logger.info "upload took #{(get_utc_time - timer).round 2} seconds"
|
62
|
+
|
63
|
+
logger.info "completed entire backup in #{(get_utc_time - @time).round 2} seconds :)"
|
64
|
+
end
|
65
|
+
|
66
|
+
def archive_and_purge()
|
67
|
+
logger.info "archiving and purging..."
|
68
|
+
@store.archive_and_purge()
|
69
|
+
logger.info "done archiving and purging :)"
|
70
|
+
end
|
71
|
+
|
72
|
+
def list_snapshots
|
73
|
+
snapshot_list = @store.get_bucket_listing(bucket: @snapshot_bucket, prefix: @snapshot_prefix)
|
74
|
+
puts "here is what I see in the snapshot bucket:"
|
75
|
+
snapshot_list.each { |i| puts " #{i.key}"}
|
76
|
+
end
|
77
|
+
|
78
|
+
def list_archives
|
79
|
+
archive_list = @store.get_bucket_listing(bucket: @archive_bucket, prefix: @archive_prefix)
|
80
|
+
puts "here is what I see in the archive bucket:"
|
81
|
+
archive_list.each { |i| puts " #{i.key}"}
|
82
|
+
end
|
83
|
+
|
84
|
+
def get_snapshot(snapshot)
|
85
|
+
basename = File.basename snapshot
|
86
|
+
puts "getting #{snapshot} and saving it in #{File.join(Dir.pwd,basename)}"
|
87
|
+
@store.get(bucket: @snapshot_bucket,key: snapshot,file: basename)
|
88
|
+
end
|
2
89
|
|
3
|
-
module Cryo
|
4
|
-
# Your code goes here...
|
5
90
|
end
|
@@ -0,0 +1,44 @@
|
|
1
|
+
# this has all of the logic to perform an entire dump of a remote rds host
|
2
|
+
|
3
|
+
class Mysql < Database
|
4
|
+
|
5
|
+
include Utils
|
6
|
+
attr_accessor :user, :host, :password, :local_path, :tmp_path, :port
|
7
|
+
|
8
|
+
def initialize(opts={})
|
9
|
+
raise "you need to specify a password" unless opts[:password]
|
10
|
+
self.password = opts[:password]
|
11
|
+
raise "you need to specify a host" unless opts[:host]
|
12
|
+
self.host = opts[:host]
|
13
|
+
raise "you need to specify a tmp path" unless opts[:tmp_path]
|
14
|
+
self.tmp_path = opts[:tmp_path]
|
15
|
+
self.user = opts[:user] || 'ubuntu'
|
16
|
+
self.port = opts[:port] || '3306'
|
17
|
+
self.local_path = opts[:local_path] || get_tempfile
|
18
|
+
verify_system_dependency 'mysqldump'
|
19
|
+
end
|
20
|
+
|
21
|
+
## run through all of the necessary steps to perform a backup
|
22
|
+
def get_backup()
|
23
|
+
get_dump
|
24
|
+
local_path
|
25
|
+
end
|
26
|
+
|
27
|
+
def get_gzipped_backup
|
28
|
+
get_and_gzip_dump
|
29
|
+
local_path
|
30
|
+
end
|
31
|
+
|
32
|
+
private
|
33
|
+
|
34
|
+
## perform a mysqldump to get an entire mysql dump on the local system, while gzipping it at the same time
|
35
|
+
def get_and_gzip_dump
|
36
|
+
safe_run "mysqldump --host=#{host} --user=#{user} --password=#{password} --all-databases --single-transaction | gzip > #{local_path}"
|
37
|
+
end
|
38
|
+
|
39
|
+
## perform a mysqldump to get an entire mysql dump on the local system
|
40
|
+
def get_dump()
|
41
|
+
safe_run "mysqldump --host=#{host} --user=#{user} --password=#{password} --all-databases --single-transaction > #{local_path}"
|
42
|
+
end
|
43
|
+
|
44
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# this has all of the logic to perform an entire dump of a remote postgress host
|
2
|
+
|
3
|
+
class Postgres
|
4
|
+
include Utils
|
5
|
+
attr_accessor :user, :host, :password, :local_path, :tmp_path
|
6
|
+
|
7
|
+
def initialize(opts={})
|
8
|
+
raise "you need to specify a password" unless opts[:password]
|
9
|
+
self.password = opts[:password]
|
10
|
+
raise "you need to specify a host" unless opts[:host]
|
11
|
+
self.host = opts[:host]
|
12
|
+
raise "you need to specify a tmp path" unless opts[:tmp_path]
|
13
|
+
self.tmp_path = opts[:tmp_path]
|
14
|
+
self.user = opts[:user] || 'ubuntu'
|
15
|
+
self.local_path = opts[:local_path] || get_tempfile
|
16
|
+
verify_system_dependency 'pg_dumpall'
|
17
|
+
end
|
18
|
+
|
19
|
+
def get_backup()
|
20
|
+
take_dump
|
21
|
+
end
|
22
|
+
|
23
|
+
private
|
24
|
+
|
25
|
+
## perform a pg_dumpall to get an entire pgdump on the local system
|
26
|
+
def take_dump()
|
27
|
+
safe_run "PGPASSWORD=#{password} pg_dumpall --host=#{host} --username=#{user} --file=#{local_path}"
|
28
|
+
local_path
|
29
|
+
end
|
30
|
+
|
31
|
+
end
|
@@ -0,0 +1,63 @@
|
|
1
|
+
class Redis
|
2
|
+
include Utils
|
3
|
+
attr_accessor :user, :host, :remote_path, :local_path, :opts, :tmp_path
|
4
|
+
|
5
|
+
def initialize(opts={})
|
6
|
+
raise "you need to specify a remote host" unless opts[:host]
|
7
|
+
self.host = opts[:host]
|
8
|
+
self.user = opts[:user] || 'ubuntu'
|
9
|
+
raise "you need to specify a tmp path" unless opts[:tmp_path]
|
10
|
+
self.tmp_path = opts[:tmp_path]
|
11
|
+
self.remote_path = opts[:path] || '/mnt/redis/dump.rdb'
|
12
|
+
self.local_path = opts[:local_path] || get_tempfile
|
13
|
+
end
|
14
|
+
|
15
|
+
|
16
|
+
## get a copy of the db from remote host
|
17
|
+
def get_backup()
|
18
|
+
take_dump
|
19
|
+
end
|
20
|
+
|
21
|
+
|
22
|
+
## get a zipped copy of the db from remote host
|
23
|
+
def get_gzipped_backup
|
24
|
+
take_dump_and_gzip
|
25
|
+
end
|
26
|
+
|
27
|
+
private
|
28
|
+
|
29
|
+
## copy the redis db into a new file and scp it here
|
30
|
+
def take_dump()
|
31
|
+
# TODO(martin): verify that both the local and remote hosts have enough free disk space for this to complete
|
32
|
+
temp_file = remote_path + "-backup-#{rand 99999}"
|
33
|
+
# this is kinda hacky, but we need to make sure that we remove a backup if we take one
|
34
|
+
begin
|
35
|
+
ssh "cp #{remote_path} #{temp_file}"
|
36
|
+
safe_run "scp #{user}@#{host}:#{temp_file} #{local_path}"
|
37
|
+
ensure
|
38
|
+
ssh "rm -f #{temp_file}"
|
39
|
+
end
|
40
|
+
local_path
|
41
|
+
end
|
42
|
+
|
43
|
+
|
44
|
+
## copy the redis db into a new file and stream it here while zipping
|
45
|
+
def take_dump_and_gzip()
|
46
|
+
# TODO(martin): verify that both the local and remote hosts have enough free disk space for this to complete
|
47
|
+
temp_file = remote_path + "-backup-#{rand 99999}"
|
48
|
+
# this is kinda hacky, but we need to make sure that we remove a backup if we take one
|
49
|
+
begin
|
50
|
+
ssh "cp #{remote_path} #{temp_file}"
|
51
|
+
safe_run "(ssh #{user}@#{host} cat #{temp_file}) | gzip > #{local_path}"
|
52
|
+
ensure
|
53
|
+
ssh "rm -f #{temp_file}"
|
54
|
+
end
|
55
|
+
local_path
|
56
|
+
end
|
57
|
+
|
58
|
+
|
59
|
+
def ssh(command)
|
60
|
+
safe_run "ssh #{user}@#{host} #{command}"
|
61
|
+
end
|
62
|
+
|
63
|
+
end
|
data/lib/cryo/message.rb
ADDED
@@ -0,0 +1,23 @@
|
|
1
|
+
class Message
|
2
|
+
|
3
|
+
def initialize(opts)
|
4
|
+
end
|
5
|
+
|
6
|
+
def get()
|
7
|
+
raise "implement me"
|
8
|
+
end
|
9
|
+
|
10
|
+
def put()
|
11
|
+
raise "implement me"
|
12
|
+
end
|
13
|
+
|
14
|
+
|
15
|
+
class << self
|
16
|
+
def create(options={})
|
17
|
+
message_class = const_get(options[:type].to_s.capitalize)
|
18
|
+
return message_class.new(options)
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
end
|
23
|
+
|
@@ -0,0 +1,19 @@
|
|
1
|
+
class Sns < Message
|
2
|
+
require 'aws-sdk'
|
3
|
+
|
4
|
+
def initialize(opts={})
|
5
|
+
AWS.config(:access_key_id => opts[:aws_access_key],
|
6
|
+
:secret_access_key => opts[:aws_secret_key])
|
7
|
+
@sns = AWS::SNS::Client.new
|
8
|
+
@topic = opts[:topic] || opts[:topic_arn]
|
9
|
+
end
|
10
|
+
|
11
|
+
|
12
|
+
def send(opts={})
|
13
|
+
@sns.publish({
|
14
|
+
:message => opts[:message],
|
15
|
+
:subject => opts[:subject],
|
16
|
+
:topic_arn => @topic
|
17
|
+
})
|
18
|
+
end
|
19
|
+
end
|
data/lib/cryo/store.rb
ADDED
@@ -0,0 +1,139 @@
|
|
1
|
+
#require 'net/ntp'
|
2
|
+
require 'logger'
|
3
|
+
|
4
|
+
class Store
|
5
|
+
|
6
|
+
include Utils
|
7
|
+
attr_accessor :logger
|
8
|
+
|
9
|
+
def initialize(opts={})
|
10
|
+
self.logger = Logger.new(STDERR)
|
11
|
+
logger.level = Logger::DEBUG
|
12
|
+
|
13
|
+
@snapshot_frequency = opts[:snapshot_frequency]
|
14
|
+
@archive_frequency = opts[:archive_frequency]
|
15
|
+
@snapshot_period = opts[:snapshot_period]
|
16
|
+
@snapshot_prefix = opts[:snapshot_prefix]
|
17
|
+
@archive_prefix = opts[:archive_prefix]
|
18
|
+
@time = opts[:time]
|
19
|
+
end
|
20
|
+
|
21
|
+
def get()
|
22
|
+
raise "implement me"
|
23
|
+
end
|
24
|
+
|
25
|
+
def put()
|
26
|
+
raise "implement me"
|
27
|
+
end
|
28
|
+
|
29
|
+
def get_snapshot_list()
|
30
|
+
raise "implement me"
|
31
|
+
end
|
32
|
+
|
33
|
+
def get_archive_list()
|
34
|
+
raise "implement me"
|
35
|
+
end
|
36
|
+
|
37
|
+
class << self
|
38
|
+
def create(options={})
|
39
|
+
const_get(options[:type].to_s.capitalize).new(options)
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
def archive_and_purge
|
44
|
+
snapshot_list = get_snapshot_list
|
45
|
+
newest_archive = get_newest_archive
|
46
|
+
recursive_archive_and_purge(snapshot_list: snapshot_list, newest_archive: newest_archive)
|
47
|
+
end
|
48
|
+
|
49
|
+
protected
|
50
|
+
|
51
|
+
def get_newest_archive()
|
52
|
+
raise "implment me"
|
53
|
+
end
|
54
|
+
|
55
|
+
|
56
|
+
def recursive_archive_and_purge(opts={})
|
57
|
+
logger.debug "entering recursive_archive_and_purge"
|
58
|
+
snapshot_list = opts[:snapshot_list]
|
59
|
+
|
60
|
+
# return if there are no snapshots
|
61
|
+
if snapshot_list.empty?
|
62
|
+
logger.info "no snapshots found"
|
63
|
+
return true
|
64
|
+
end
|
65
|
+
|
66
|
+
# return if there are not enough snapshots avilable
|
67
|
+
minium_number_of_snapshots = (@snapshot_period.to_f/@snapshot_frequency.to_f).ceil
|
68
|
+
if snapshot_list.size < minium_number_of_snapshots
|
69
|
+
logger.info "not enough snapshots avilable for archiving"
|
70
|
+
logger.info "we found #{snapshot_list.size} but we need to keep at least #{minium_number_of_snapshots}"
|
71
|
+
return true
|
72
|
+
end
|
73
|
+
|
74
|
+
oldest_snapshot = snapshot_list.shift
|
75
|
+
oldest_snapshot_age = get_age_from_key_name(oldest_snapshot)
|
76
|
+
|
77
|
+
logger.debug "oldest_snapshot is #{oldest_snapshot}"
|
78
|
+
logger.debug "oldest_snapshot_age is #{oldest_snapshot_age}"
|
79
|
+
|
80
|
+
# return if the oldest snapshot it not old enough to be archived
|
81
|
+
if oldest_snapshot_age < @snapshot_period
|
82
|
+
logger.info "all snapshots are younger than snapshot_period"
|
83
|
+
return true
|
84
|
+
end
|
85
|
+
|
86
|
+
# if we got this far, then the oldest snapshot needs to be either archived or deleted
|
87
|
+
newest_archive = get_newest_archive
|
88
|
+
|
89
|
+
# check to see if we have any archives
|
90
|
+
if newest_archive.empty?
|
91
|
+
logger.info "looks like we don't have any archives yet"
|
92
|
+
logger.info "archiving oldest snapshot #{oldest_snapshot}"
|
93
|
+
archive_snapshot oldest_snapshot
|
94
|
+
logger.debug "recursing..."
|
95
|
+
recursive_archive_and_purge(snapshot_list: snapshot_list, newest_archive: oldest_snapshot)
|
96
|
+
return true
|
97
|
+
end
|
98
|
+
|
99
|
+
|
100
|
+
newest_archive_age = get_age_from_key_name(newest_archive)
|
101
|
+
|
102
|
+
# check to see if the oldest snapshot should be archived
|
103
|
+
if need_to_archive?(oldest_snapshot_age,newest_archive_age)
|
104
|
+
logger.info "archiving oldest snapshot #{oldest_snapshot}"
|
105
|
+
archive_snapshot oldest_snapshot
|
106
|
+
logger.debug "recursing..."
|
107
|
+
recursive_archive_and_purge(snapshot_list: snapshot_list, newest_archive: oldest_snapshot)
|
108
|
+
return true
|
109
|
+
end
|
110
|
+
|
111
|
+
# check the next oldest snapshot too, before we throw this one away
|
112
|
+
second_oldest_snapshot = opts[:snapshot_list].first
|
113
|
+
second_oldest_snapshot_age = get_age_from_key_name(second_oldest_snapshot)
|
114
|
+
|
115
|
+
if need_to_archive?(second_oldest_snapshot_age,newest_archive_age)
|
116
|
+
logger.info "archiving oldest snapshot #{oldest_snapshot}"
|
117
|
+
archive_snapshot oldest_snapshot
|
118
|
+
logger.debug "recursing..."
|
119
|
+
recursive_archive_and_purge(snapshot_list: snapshot_list, newest_archive: oldest_snapshot)
|
120
|
+
return true
|
121
|
+
end
|
122
|
+
|
123
|
+
# if we got this far, then we just need to delete the oldest snapshot
|
124
|
+
logger.info "deleting oldest snapshot #{oldest_snapshot}"
|
125
|
+
delete_snapshot oldest_snapshot
|
126
|
+
logger.debug "recursing"
|
127
|
+
recursive_archive_and_purge(snapshot_list: snapshot_list, newest_archive: newest_archive)
|
128
|
+
return true
|
129
|
+
end
|
130
|
+
|
131
|
+
def archive_snapshot
|
132
|
+
raise "implment me"
|
133
|
+
end
|
134
|
+
|
135
|
+
def delete
|
136
|
+
raise "implment me"
|
137
|
+
end
|
138
|
+
|
139
|
+
end
|
@@ -0,0 +1,128 @@
|
|
1
|
+
class S3 < Store
|
2
|
+
require 'aws-sdk'
|
3
|
+
|
4
|
+
attr_accessor :snapshot_bucket, :archive_bucket, :prefix
|
5
|
+
|
6
|
+
def initialize(opts={})
|
7
|
+
super(opts)
|
8
|
+
AWS.config(:access_key_id => opts[:aws_access_key],
|
9
|
+
:secret_access_key => opts[:aws_secret_key])
|
10
|
+
@s3 = AWS::S3.new
|
11
|
+
@snapshot_bucket = @s3.buckets[opts[:snapshot_bucket]]
|
12
|
+
@archive_bucket = @s3.buckets[opts[:archive_bucket]]
|
13
|
+
end
|
14
|
+
|
15
|
+
def get(opts={})
|
16
|
+
bucket = opts[:bucket]
|
17
|
+
key = opts[:key]
|
18
|
+
file_path = opts[:file] || opts[:path]
|
19
|
+
if file_path
|
20
|
+
File.open(file_path,'w') do |file|
|
21
|
+
@s3.buckets[bucket].objects[key].read {|chunk| file.write chunk}
|
22
|
+
return true
|
23
|
+
end
|
24
|
+
else
|
25
|
+
return @s3.buckets[bucket].objects[key].read
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
|
30
|
+
def put(opts={})
|
31
|
+
bucket = opts[:bucket]
|
32
|
+
key = opts[:key]
|
33
|
+
content = opts[:content]
|
34
|
+
@s3.buckets[bucket].objects[key].write(content) # TODO: verify that bucket exists?
|
35
|
+
end
|
36
|
+
|
37
|
+
|
38
|
+
def etag(opts={})
|
39
|
+
bucket = opts[:bucket]
|
40
|
+
key = opts[:key]
|
41
|
+
@s3.buckets[bucket].objects[key].etag
|
42
|
+
end
|
43
|
+
|
44
|
+
|
45
|
+
# retun an array listing the objects in our snapshot bucket
|
46
|
+
def get_snapshot_list
|
47
|
+
get_bucket_listing(bucket: @snapshot_bucket, prefix: @prefix)
|
48
|
+
end
|
49
|
+
|
50
|
+
|
51
|
+
# retun an array listing the objects in our archive bucket
|
52
|
+
def get_archive_list
|
53
|
+
get_bucket_listing(bucket: archive_bucket, prefix: @prefix)
|
54
|
+
end
|
55
|
+
|
56
|
+
# return an array listing of objects in a bucket
|
57
|
+
def get_bucket_listing(opts={})
|
58
|
+
bucket = opts[:bucket]
|
59
|
+
prefix = opts[:prefix]
|
60
|
+
list = []
|
61
|
+
@s3.buckets[bucket].objects.with_prefix(prefix).each do |object|
|
62
|
+
list << object
|
63
|
+
end
|
64
|
+
list
|
65
|
+
end
|
66
|
+
|
67
|
+
def get_snapshot_list
|
68
|
+
snapshot_list = []
|
69
|
+
@snapshot_bucket.objects.with_prefix(@snapshot_prefix).each do |object|
|
70
|
+
snapshot_list << trim_snapshot_name(object.key)
|
71
|
+
end
|
72
|
+
snapshot_list
|
73
|
+
end
|
74
|
+
|
75
|
+
protected
|
76
|
+
|
77
|
+
def expand_snapshot_name(shortname)
|
78
|
+
@snapshot_prefix + shortname + "Z.cryo"
|
79
|
+
end
|
80
|
+
|
81
|
+
def expand_archive_name(shortname)
|
82
|
+
@archive_prefix + shortname + "Z.cryo"
|
83
|
+
end
|
84
|
+
|
85
|
+
def trim_snapshot_name(longname)
|
86
|
+
longname.gsub(/^#{@snapshot_prefix}/,'').gsub(/Z\.cryo$/,'')
|
87
|
+
end
|
88
|
+
|
89
|
+
def trim_archive_name(longname)
|
90
|
+
return "" if longname.nil?
|
91
|
+
longname.gsub(/^#{@archive_prefix}/,'').gsub(/Z\.cryo$/,'')
|
92
|
+
end
|
93
|
+
|
94
|
+
def delete_snapshot(snapshot)
|
95
|
+
full_snapshot_name = expand_snapshot_name(snapshot)
|
96
|
+
@snapshot_bucket.objects[full_snapshot_name].delete
|
97
|
+
end
|
98
|
+
|
99
|
+
def archive_snapshot(snapshot)
|
100
|
+
logger.info "archiving snapshot #{snapshot}"
|
101
|
+
full_snapshot_name = expand_snapshot_name(snapshot)
|
102
|
+
full_archive_name = expand_archive_name(snapshot)
|
103
|
+
logger.debug "full_snapshot_name is #{full_snapshot_name}"
|
104
|
+
logger.debug "full_archive_name is #{full_archive_name}"
|
105
|
+
snapshot_object = @snapshot_bucket.objects[full_snapshot_name]
|
106
|
+
# if we have already copied the object, just delete the snapshot
|
107
|
+
if @archive_bucket.objects[full_archive_name].exists?
|
108
|
+
snapshot_object.delete
|
109
|
+
else
|
110
|
+
snapshot_object.move_to(full_archive_name, :bucket => @archive_bucket)
|
111
|
+
end
|
112
|
+
end
|
113
|
+
|
114
|
+
|
115
|
+
# this function returns the last item in a bucket that matches the given prefix
|
116
|
+
def get_newest_archive(prefix=@archive_prefix)
|
117
|
+
tree = @archive_bucket.objects.with_prefix(prefix).as_tree
|
118
|
+
directories = tree.children.select(&:branch?).collect(&:prefix)
|
119
|
+
if directories.empty?
|
120
|
+
matches = []
|
121
|
+
@archive_bucket.objects.with_prefix(prefix).each {|o| matches << o.key}
|
122
|
+
return trim_archive_name(matches.last)
|
123
|
+
else
|
124
|
+
# recurse
|
125
|
+
get_newest_archive(directories.last)
|
126
|
+
end
|
127
|
+
end
|
128
|
+
end
|
data/lib/cryo/utils.rb
ADDED
@@ -0,0 +1,117 @@
|
|
1
|
+
module Utils
|
2
|
+
|
3
|
+
require 'zlib'
|
4
|
+
require 'net/ntp'
|
5
|
+
require 'fileutils'
|
6
|
+
|
7
|
+
|
8
|
+
def delete_file(path)
|
9
|
+
File.delete(path) if File.exists?(path)
|
10
|
+
end
|
11
|
+
|
12
|
+
|
13
|
+
def get_tempfile
|
14
|
+
# Tempfile.new('redis-backup','/mnt/cryo').path
|
15
|
+
tmp_file = File.join(@tmp_path,"tmp-#{rand 9999}")
|
16
|
+
at_exit {delete_file tmp_file}
|
17
|
+
FileUtils.touch tmp_file
|
18
|
+
tmp_file
|
19
|
+
end
|
20
|
+
|
21
|
+
|
22
|
+
def ungzip_file(path)
|
23
|
+
# get a temp file
|
24
|
+
tempfile = get_tempfile
|
25
|
+
#logger.info "unzipping #{path} to #{tempfile}..."
|
26
|
+
|
27
|
+
# stream the gzipped file into an uncompressed file
|
28
|
+
Zlib::GzipReader.open(path) do |gz|
|
29
|
+
File.open(tempfile,'w') do |open_file|
|
30
|
+
# write 1M chunks at a time
|
31
|
+
open_file.write gz.read(1024*1024) until gz.eof?
|
32
|
+
end
|
33
|
+
end
|
34
|
+
#logger.info "finished unzipping file"
|
35
|
+
|
36
|
+
# return unzipped file
|
37
|
+
tempfile
|
38
|
+
end
|
39
|
+
|
40
|
+
|
41
|
+
def gzip_file(path)
|
42
|
+
# given a path to a file, return a gzipped version of it
|
43
|
+
tempfile = get_tempfile
|
44
|
+
#logger.info "gzipping #{path} to #{tempfile}"
|
45
|
+
|
46
|
+
# stream the gzipped content into a file as we compute it
|
47
|
+
Zlib::GzipWriter.open(tempfile) do |gz|
|
48
|
+
File.open(path) do |f|
|
49
|
+
# write 1M chunks at a time
|
50
|
+
gz.write f.read(1024*1024) until f.eof?
|
51
|
+
end
|
52
|
+
end
|
53
|
+
#logger.info "done unzipping"
|
54
|
+
tempfile
|
55
|
+
end
|
56
|
+
|
57
|
+
def safe_run(command)
|
58
|
+
#logger.debug "about to run #{command}"
|
59
|
+
output = `bash -c "set -o pipefail && #{command}"`.chomp
|
60
|
+
raise "command '#{command}' failed!\nOutput was:\n#{output}" unless $?.success?
|
61
|
+
true
|
62
|
+
end
|
63
|
+
|
64
|
+
def verify_system_dependency(command)
|
65
|
+
raise "system dependency #{command} is not unstalled" unless system "which #{command} > /dev/null"
|
66
|
+
end
|
67
|
+
|
68
|
+
def get_utc_time
|
69
|
+
retries = 5
|
70
|
+
begin
|
71
|
+
Net::NTP.get("us.pool.ntp.org").time.getutc
|
72
|
+
rescue Object => o
|
73
|
+
retries -= 1
|
74
|
+
if retries > 0
|
75
|
+
logger.debug "retrying ntp query again..."
|
76
|
+
sleep 2
|
77
|
+
retry
|
78
|
+
end
|
79
|
+
throw o
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
def get_utc_timestamp()
|
84
|
+
@time ||= get_utc_time # don't change the endpoint!!!
|
85
|
+
@timestamp ||= @time.strftime("%Y/%m/%d/%H:%M:%S")
|
86
|
+
end
|
87
|
+
|
88
|
+
def get_timstamped_key_name()
|
89
|
+
"#{@snapshot_prefix}#{@timestamp}Z.cryo"
|
90
|
+
end
|
91
|
+
|
92
|
+
def get_utc_time_from_key_name(key_name)
|
93
|
+
logger.debug "getting time for #{key_name}"
|
94
|
+
year,month,day,time = key_name.split('/')
|
95
|
+
hour,min,sec = time.split(':')
|
96
|
+
Time.utc(year,month,day,hour,min,sec)
|
97
|
+
end
|
98
|
+
|
99
|
+
# returns the age of the snapshot in mins
|
100
|
+
def get_age_from_key_name(key_name)
|
101
|
+
snapshot_time = get_utc_time_from_key_name(key_name)
|
102
|
+
age_in_mins_as_float = (@time - snapshot_time) / 60
|
103
|
+
age_in_mins_as_int = age_in_mins_as_float.to_i
|
104
|
+
end
|
105
|
+
|
106
|
+
# find out if we have an archive that is more recent than the snapshot period
|
107
|
+
def need_to_archive?(old_snapshot_age,new_archive_age)
|
108
|
+
logger.debug "checking to see if we should archive"
|
109
|
+
logger.debug "oldest snapshot age is #{old_snapshot_age}"
|
110
|
+
logger.debug "newest archive time is #{new_archive_age}"
|
111
|
+
logger.debug "@snapshot_period is #{@archive_frequency}"
|
112
|
+
answer = (new_archive_age - old_snapshot_age) > @archive_frequency
|
113
|
+
logger.debug "returning #{answer.inspect}"
|
114
|
+
return answer
|
115
|
+
end
|
116
|
+
|
117
|
+
end
|
data/lib/cryo/version.rb
CHANGED
@@ -1,3 +1,3 @@
|
|
1
|
-
|
2
|
-
VERSION = "0.0.
|
1
|
+
class Cryo
|
2
|
+
VERSION = "0.0.2"
|
3
3
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: cryo
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.2
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -9,22 +9,118 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date:
|
13
|
-
dependencies:
|
12
|
+
date: 2013-04-02 00:00:00.000000000 Z
|
13
|
+
dependencies:
|
14
|
+
- !ruby/object:Gem::Dependency
|
15
|
+
name: colorize
|
16
|
+
requirement: !ruby/object:Gem::Requirement
|
17
|
+
none: false
|
18
|
+
requirements:
|
19
|
+
- - ! '>='
|
20
|
+
- !ruby/object:Gem::Version
|
21
|
+
version: '0'
|
22
|
+
type: :runtime
|
23
|
+
prerelease: false
|
24
|
+
version_requirements: !ruby/object:Gem::Requirement
|
25
|
+
none: false
|
26
|
+
requirements:
|
27
|
+
- - ! '>='
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: '0'
|
30
|
+
- !ruby/object:Gem::Dependency
|
31
|
+
name: trollop
|
32
|
+
requirement: !ruby/object:Gem::Requirement
|
33
|
+
none: false
|
34
|
+
requirements:
|
35
|
+
- - ~>
|
36
|
+
- !ruby/object:Gem::Version
|
37
|
+
version: '2.0'
|
38
|
+
type: :runtime
|
39
|
+
prerelease: false
|
40
|
+
version_requirements: !ruby/object:Gem::Requirement
|
41
|
+
none: false
|
42
|
+
requirements:
|
43
|
+
- - ~>
|
44
|
+
- !ruby/object:Gem::Version
|
45
|
+
version: '2.0'
|
46
|
+
- !ruby/object:Gem::Dependency
|
47
|
+
name: aws-sdk
|
48
|
+
requirement: !ruby/object:Gem::Requirement
|
49
|
+
none: false
|
50
|
+
requirements:
|
51
|
+
- - ~>
|
52
|
+
- !ruby/object:Gem::Version
|
53
|
+
version: '1.6'
|
54
|
+
type: :runtime
|
55
|
+
prerelease: false
|
56
|
+
version_requirements: !ruby/object:Gem::Requirement
|
57
|
+
none: false
|
58
|
+
requirements:
|
59
|
+
- - ~>
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '1.6'
|
62
|
+
- !ruby/object:Gem::Dependency
|
63
|
+
name: net-ntp
|
64
|
+
requirement: !ruby/object:Gem::Requirement
|
65
|
+
none: false
|
66
|
+
requirements:
|
67
|
+
- - ~>
|
68
|
+
- !ruby/object:Gem::Version
|
69
|
+
version: 2.1.1
|
70
|
+
type: :runtime
|
71
|
+
prerelease: false
|
72
|
+
version_requirements: !ruby/object:Gem::Requirement
|
73
|
+
none: false
|
74
|
+
requirements:
|
75
|
+
- - ~>
|
76
|
+
- !ruby/object:Gem::Version
|
77
|
+
version: 2.1.1
|
78
|
+
- !ruby/object:Gem::Dependency
|
79
|
+
name: pry
|
80
|
+
requirement: !ruby/object:Gem::Requirement
|
81
|
+
none: false
|
82
|
+
requirements:
|
83
|
+
- - ! '>='
|
84
|
+
- !ruby/object:Gem::Version
|
85
|
+
version: '0'
|
86
|
+
type: :development
|
87
|
+
prerelease: false
|
88
|
+
version_requirements: !ruby/object:Gem::Requirement
|
89
|
+
none: false
|
90
|
+
requirements:
|
91
|
+
- - ! '>='
|
92
|
+
- !ruby/object:Gem::Version
|
93
|
+
version: '0'
|
14
94
|
description:
|
15
95
|
email:
|
16
96
|
- nathan.baxter@airbnb.com
|
17
|
-
executables:
|
97
|
+
executables:
|
98
|
+
- cryo
|
18
99
|
extensions: []
|
19
100
|
extra_rdoc_files: []
|
20
101
|
files:
|
21
102
|
- .gitignore
|
22
103
|
- Gemfile
|
23
104
|
- LICENSE
|
105
|
+
- Makefile
|
24
106
|
- README.md
|
25
107
|
- Rakefile
|
108
|
+
- bin/bootstrap_cryo.sh
|
109
|
+
- bin/cryo
|
110
|
+
- bin/test
|
111
|
+
- bin/test2
|
26
112
|
- cryo.gemspec
|
113
|
+
- examples/rds.sh
|
27
114
|
- lib/cryo.rb
|
115
|
+
- lib/cryo/database.rb
|
116
|
+
- lib/cryo/database/mysql.rb
|
117
|
+
- lib/cryo/database/postgres.rb
|
118
|
+
- lib/cryo/database/redis.rb
|
119
|
+
- lib/cryo/message.rb
|
120
|
+
- lib/cryo/message/sns.rb
|
121
|
+
- lib/cryo/store.rb
|
122
|
+
- lib/cryo/store/s3.rb
|
123
|
+
- lib/cryo/utils.rb
|
28
124
|
- lib/cryo/version.rb
|
29
125
|
homepage: https://github.com/airbnb/cryo
|
30
126
|
licenses: []
|
@@ -49,5 +145,6 @@ rubyforge_project:
|
|
49
145
|
rubygems_version: 1.8.24
|
50
146
|
signing_key:
|
51
147
|
specification_version: 3
|
52
|
-
summary: Tool for snapshotting data, backing it up, verifying it,
|
148
|
+
summary: Tool for snapshotting data, backing it up, verifying it, cycling it, and
|
149
|
+
triggering notifications.
|
53
150
|
test_files: []
|