s3backup 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/History.txt +4 -0
- data/Manifest.txt +23 -0
- data/PostInstall.txt +7 -0
- data/README.rdoc +77 -0
- data/Rakefile +26 -0
- data/backup.yml +8 -0
- data/bin/s3backup +10 -0
- data/lib/s3backup/backup.rb +76 -0
- data/lib/s3backup/cli.rb +69 -0
- data/lib/s3backup/crypt.rb +22 -0
- data/lib/s3backup/restore.rb +98 -0
- data/lib/s3backup/s3log.rb +30 -0
- data/lib/s3backup/s3wrapper.rb +215 -0
- data/lib/s3backup/tree_info.rb +116 -0
- data/lib/s3backup.rb +6 -0
- data/script/console +10 -0
- data/script/destroy +14 -0
- data/script/generate +14 -0
- data/spec/s3backup_cli_spec.rb +15 -0
- data/spec/s3backup_spec.rb +11 -0
- data/spec/spec.opts +1 -0
- data/spec/spec_helper.rb +10 -0
- data/tasks/rspec.rake +21 -0
- metadata +100 -0
data/History.txt
ADDED
data/Manifest.txt
ADDED
@@ -0,0 +1,23 @@
|
|
1
|
+
History.txt
|
2
|
+
Manifest.txt
|
3
|
+
PostInstall.txt
|
4
|
+
README.rdoc
|
5
|
+
Rakefile
|
6
|
+
backup.yml
|
7
|
+
bin/s3backup
|
8
|
+
lib/s3backup.rb
|
9
|
+
lib/s3backup/backup.rb
|
10
|
+
lib/s3backup/cli.rb
|
11
|
+
lib/s3backup/crypt.rb
|
12
|
+
lib/s3backup/restore.rb
|
13
|
+
lib/s3backup/s3log.rb
|
14
|
+
lib/s3backup/s3wrapper.rb
|
15
|
+
lib/s3backup/tree_info.rb
|
16
|
+
script/console
|
17
|
+
script/destroy
|
18
|
+
script/generate
|
19
|
+
spec/s3backup_cli_spec.rb
|
20
|
+
spec/s3backup_spec.rb
|
21
|
+
spec/spec.opts
|
22
|
+
spec/spec_helper.rb
|
23
|
+
tasks/rspec.rake
|
data/PostInstall.txt
ADDED
data/README.rdoc
ADDED
@@ -0,0 +1,77 @@
|
|
1
|
+
= s3backup
|
2
|
+
|
3
|
+
* http://rubyforge.org/projects/s3backup/
|
4
|
+
|
5
|
+
== DESCRIPTION:
|
6
|
+
S3Backup is a backup tool to local directory to Amazon S3.
|
7
|
+
|
8
|
+
== FEATURES/PROBLEMS:
|
9
|
+
S3Backup is a backup/restore tool. It is upload local directory to Amazon S3 with compressed.
|
10
|
+
If directories isn't modified after prior backup,those aren't upload.
|
11
|
+
It can be Cryptnize upload files if password and salt are configured.
|
12
|
+
|
13
|
+
== SYNOPSIS:
|
14
|
+
|
15
|
+
To use remotebackup,you should prepare backup configuretion by yaml such below.
|
16
|
+
------------------------------------------------------------
|
17
|
+
bucket: "bucket name"
|
18
|
+
directories:
|
19
|
+
- "absolute path to directory for backup/restore"
|
20
|
+
- "absolute path to directory for backup/restore"
|
21
|
+
access_key_id: 'Amazon access_key_id'
|
22
|
+
secret_access_key: 'Amazon secret_access_key'
|
23
|
+
password: 'password for aes. (optional)'
|
24
|
+
salt: 'HexString(16 length) (must when password is specified) '
|
25
|
+
------------------------------------------------------------
|
26
|
+
|
27
|
+
command:
|
28
|
+
|
29
|
+
for backup:
|
30
|
+
s3backup [-f configuration file] [-v verbose message] [-l path for log] [-h help]
|
31
|
+
|
32
|
+
configuration file path to file written above contents. default is ./backup.yml
|
33
|
+
verbose display directory tree and difference of anterior backup
|
34
|
+
path for log defaut starndard output.
|
35
|
+
help help message
|
36
|
+
|
37
|
+
for restore:
|
38
|
+
s3backup -r [-f configuration file] [-v verbose message] [-l path for log] [-o output dir] [-h help]
|
39
|
+
|
40
|
+
configuration file path to file written above contents. default is ./backup.yml
|
41
|
+
verbose display directory tree and difference of anterior backup
|
42
|
+
path for log defaut starndard output.
|
43
|
+
output dir path to directory for restore directory. defaut is ./
|
44
|
+
help help message
|
45
|
+
|
46
|
+
== REQUIREMENTS:
|
47
|
+
|
48
|
+
* aws-s3
|
49
|
+
|
50
|
+
== INSTALL:
|
51
|
+
|
52
|
+
* gem install s3backup (might need sudo privileges)
|
53
|
+
|
54
|
+
== LICENSE:
|
55
|
+
|
56
|
+
(The MIT License)
|
57
|
+
|
58
|
+
Copyright (c) 2009 Takeshi Morita
|
59
|
+
|
60
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
61
|
+
a copy of this software and associated documentation files (the
|
62
|
+
'Software'), to deal in the Software without restriction, including
|
63
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
64
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
65
|
+
permit persons to whom the Software is furnished to do so, subject to
|
66
|
+
the following conditions:
|
67
|
+
|
68
|
+
The above copyright notice and this permission notice shall be
|
69
|
+
included in all copies or substantial portions of the Software.
|
70
|
+
|
71
|
+
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
72
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
73
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
74
|
+
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
75
|
+
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
76
|
+
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
77
|
+
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/Rakefile
ADDED
@@ -0,0 +1,26 @@
|
|
1
|
+
require 'rubygems'
|
2
|
+
gem 'hoe', '>= 2.1.0'
|
3
|
+
require 'hoe'
|
4
|
+
require 'fileutils'
|
5
|
+
require './lib/s3backup'
|
6
|
+
|
7
|
+
Hoe.plugin :newgem
|
8
|
+
# Hoe.plugin :website
|
9
|
+
# Hoe.plugin :cucumberfeatures
|
10
|
+
|
11
|
+
# Generate all the Rake tasks
|
12
|
+
# Run 'rake -T' to see list of generated tasks (from gem root directory)
|
13
|
+
$hoe = Hoe.spec 's3backup' do
|
14
|
+
self.developer 'Takeshi Morita', 'morita@ibrains.co.jp'
|
15
|
+
self.post_install_message = 'PostInstall.txt' # TODO remove if post-install message not required
|
16
|
+
self.rubyforge_name = self.name # TODO this is default value
|
17
|
+
self.extra_deps = [['aws-s3','>= 0.6.2']]
|
18
|
+
|
19
|
+
end
|
20
|
+
|
21
|
+
require 'newgem/tasks'
|
22
|
+
Dir['tasks/**/*.rake'].each { |t| load t }
|
23
|
+
|
24
|
+
# TODO - want other tests/tasks run by default? Add them to the list
|
25
|
+
# remove_task :default
|
26
|
+
# task :default => [:spec, :features]
|
data/backup.yml
ADDED
@@ -0,0 +1,8 @@
|
|
1
|
+
bucket: "bucket name"
|
2
|
+
directories:
|
3
|
+
- "absolute path to directory for backup/restore"
|
4
|
+
- "absolute path to directory for backup/restore"
|
5
|
+
access_key_id: 'Amazon access_key_id'
|
6
|
+
secret_access_key: 'Amazon secret_access_key'
|
7
|
+
password: 'password for aes. (optional)'
|
8
|
+
salt: 'HexString(16 length) (must when password is specified) '
|
data/bin/s3backup
ADDED
@@ -0,0 +1,76 @@
|
|
1
|
+
require 'cgi'
|
2
|
+
require 's3backup/s3wrapper'
|
3
|
+
require 's3backup/s3log'
|
4
|
+
require 's3backup/tree_info'
|
5
|
+
module S3backup
|
6
|
+
class Backup
|
7
|
+
def initialize(config)
|
8
|
+
check_config(config)
|
9
|
+
directories = config["directories"]
|
10
|
+
@directories = directories.map{|d| d=~/\/$/ ? d.chop : d}
|
11
|
+
begin
|
12
|
+
@s3_obj = S3Wrapper.new(config)
|
13
|
+
rescue => err
|
14
|
+
S3log.error(err.backtrace.join("\n")+"\n"+err.message)
|
15
|
+
exit -1
|
16
|
+
end
|
17
|
+
@target_infos = {:attributes=> [],:tree_infos => []}
|
18
|
+
@directories.each do |dir|
|
19
|
+
@target_infos[:tree_infos].push(TreeInfo.new(dir))
|
20
|
+
S3log.debug("tree_info=#{@target_infos[:tree_infos].inspect}")
|
21
|
+
@target_infos[:attributes].push({:base => dir,:update_date => Date.today})
|
22
|
+
end
|
23
|
+
end
|
24
|
+
def check_config(config)
|
25
|
+
unless config["directories"]
|
26
|
+
S3log.error("directories doesn't exist in #{config_file}.")
|
27
|
+
exit -1
|
28
|
+
end
|
29
|
+
unless config["directories"].class == Array
|
30
|
+
dir = config["directories"]
|
31
|
+
config["directories"] = Array.new
|
32
|
+
config["directories"].push dir
|
33
|
+
end
|
34
|
+
config["directories"].each do |dir|
|
35
|
+
unless File.directory? dir
|
36
|
+
S3log.error("#{dir} isn't exist.")
|
37
|
+
exit -1
|
38
|
+
end
|
39
|
+
if File.expand_path(dir) != dir
|
40
|
+
S3log.error("#{dir.length} must be absolute path.")
|
41
|
+
exit -1
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
def start
|
46
|
+
begin
|
47
|
+
first_flg=false
|
48
|
+
unless @s3_obj.bucket_exist?
|
49
|
+
first_flg = true
|
50
|
+
@s3_obj.create_bucket
|
51
|
+
end
|
52
|
+
@target_infos[:attributes].each_with_index do |attribute,i|
|
53
|
+
base = CGI.escape(attribute[:base])
|
54
|
+
tree_file_name = "tree_"+base+".yml"
|
55
|
+
tree_data = nil
|
56
|
+
unless first_flg
|
57
|
+
#前回のファイル・ツリーをAWS S3から取得
|
58
|
+
tree_data = @s3_obj.get(tree_file_name)
|
59
|
+
end
|
60
|
+
#前回と今回のファイル・ツリーを比較
|
61
|
+
diff_info = @target_infos[:tree_infos][i].diff(TreeInfo.new(tree_data))
|
62
|
+
S3log.debug("diff_info=#{diff_info.inspect}")
|
63
|
+
#更新されたディレクトリをAWS S3にアップロード
|
64
|
+
@s3_obj.store_directories(diff_info[:directory][:add] + diff_info[:directory][:modify])
|
65
|
+
#削除されたディレクトリをAWS S3からdelete
|
66
|
+
@s3_obj.delete_directories(diff_info[:directory][:remove])
|
67
|
+
#今回のファイル・ツリーをAWS S3に登録
|
68
|
+
@s3_obj.post(tree_file_name,@target_infos[:tree_infos][i].dump_yaml)
|
69
|
+
end
|
70
|
+
rescue => err
|
71
|
+
S3log.error(err.backtrace.join("\n")+"\n"+err.message)
|
72
|
+
exit -1
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
data/lib/s3backup/cli.rb
ADDED
@@ -0,0 +1,69 @@
|
|
1
|
+
require 'optparse'
|
2
|
+
require 'yaml'
|
3
|
+
require 's3backup/s3log'
|
4
|
+
|
5
|
+
module S3backup
|
6
|
+
class CLI
|
7
|
+
DEFAULT_CONFIG='./backup.yml'
|
8
|
+
def self.execute(stdout, arguments=[])
|
9
|
+
|
10
|
+
# NOTE: the option -p/--path= is given as an example, and should be replaced in your application.
|
11
|
+
options = {
|
12
|
+
:restore=> false,
|
13
|
+
:config_file => DEFAULT_CONFIG,
|
14
|
+
:verbose => false,
|
15
|
+
:log => nil,
|
16
|
+
:output_dir => '.'
|
17
|
+
}
|
18
|
+
begin
|
19
|
+
parser = OptionParser.new do |opt|
|
20
|
+
opt.banner = "Usage: #{File.basename($0)} [Option]"
|
21
|
+
opt.on("-r","--restore","restore backup.") {
|
22
|
+
options[:restore] = true
|
23
|
+
}
|
24
|
+
opt.on("-f","--file config",String,"location config file. default: #{DEFAULT_CONFIG}") {|o|
|
25
|
+
options[:config_file] = o
|
26
|
+
}
|
27
|
+
opt.on("-o","--output directory",String,"restore location of directory. default: current directory.") {|o|
|
28
|
+
options[:output_dir] = o
|
29
|
+
}
|
30
|
+
opt.on("-v","--verbose","verbose message to log file"){
|
31
|
+
options[:verbose] = true
|
32
|
+
}
|
33
|
+
opt.on("-l","--log path",String,"path to log file"){|o|
|
34
|
+
options[:log] = o
|
35
|
+
}
|
36
|
+
opt.on("-h","--help","print this message and quit") {
|
37
|
+
puts opt.help
|
38
|
+
exit 0
|
39
|
+
}
|
40
|
+
opt.parse!(arguments)
|
41
|
+
end
|
42
|
+
rescue OptionParser::ParseError => err
|
43
|
+
S3log.error(err.message)
|
44
|
+
exit 1
|
45
|
+
end
|
46
|
+
S3log.set_debug(options[:verbose])
|
47
|
+
if !File.file?(options[:config_file])
|
48
|
+
S3log.error("config #{options[:config_file]} is not exist.")
|
49
|
+
exit 1
|
50
|
+
end
|
51
|
+
if options[:log]
|
52
|
+
S3log.set_logfile(File.open(options[:log],"a"))
|
53
|
+
end
|
54
|
+
if options[:restore]
|
55
|
+
require 's3backup/restore'
|
56
|
+
if !File.directory?(options[:output_dir])
|
57
|
+
S3log.error("output directory #{options[:output_dir]} is not exist.")
|
58
|
+
exit 1
|
59
|
+
end
|
60
|
+
rt = Restore.new(options[:output_dir],YAML.load_file(options[:config_file]))
|
61
|
+
rt.start
|
62
|
+
else
|
63
|
+
require 's3backup/backup'
|
64
|
+
bk = Backup.new(YAML.load_file(options[:config_file]))
|
65
|
+
bk.start
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
require 'openssl'
|
2
|
+
module S3backup
|
3
|
+
class Crypt
|
4
|
+
CIPHER_ALGORITHM="aes-256-cbc"
|
5
|
+
def initialize(password,salt)
|
6
|
+
@password = password
|
7
|
+
@salt = salt.scan(/../).map{|i|i.hex}.pack("c*")
|
8
|
+
end
|
9
|
+
def encrypt(data)
|
10
|
+
enc = OpenSSL::Cipher::Cipher.new(CIPHER_ALGORITHM)
|
11
|
+
enc.encrypt
|
12
|
+
enc.pkcs5_keyivgen(@password,@salt)
|
13
|
+
enc.update(data)+enc.final
|
14
|
+
end
|
15
|
+
def decrypt(data)
|
16
|
+
enc = OpenSSL::Cipher::Cipher.new(CIPHER_ALGORITHM)
|
17
|
+
enc.decrypt
|
18
|
+
enc.pkcs5_keyivgen(@password,@salt)
|
19
|
+
enc.update(data)+enc.final
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
@@ -0,0 +1,98 @@
|
|
1
|
+
require 'cgi'
|
2
|
+
require 's3backup/s3wrapper'
|
3
|
+
require 's3backup/s3log'
|
4
|
+
require 's3backup/tree_info'
|
5
|
+
require 'fileutils'
|
6
|
+
module S3backup
|
7
|
+
class Restore
|
8
|
+
def initialize(output_dir,config)
|
9
|
+
check_config(config)
|
10
|
+
@output_dir = output_dir
|
11
|
+
@directories = config["directories"]
|
12
|
+
begin
|
13
|
+
@s3_obj = S3Wrapper.new(config)
|
14
|
+
rescue => err
|
15
|
+
S3log.error(err.backtrace.join("\n")+"\n"+err.message)
|
16
|
+
exit -1
|
17
|
+
end
|
18
|
+
@target_infos = {:attributes=> [],:tree_infos => []}
|
19
|
+
end
|
20
|
+
def check_config(config)
|
21
|
+
if config["directories"]
|
22
|
+
if config["directories"].class != Array
|
23
|
+
dir = config["directories"]
|
24
|
+
config["directories"] = Array.new
|
25
|
+
config["directories"].push dir
|
26
|
+
end
|
27
|
+
config["directories"] = config["directories"].map{|d| d=~/\/$/ ? d.chop : d}
|
28
|
+
end
|
29
|
+
end
|
30
|
+
def get_tree(dir)
|
31
|
+
base_dir = dir
|
32
|
+
tree_data = nil
|
33
|
+
while 1
|
34
|
+
base = CGI.escape(base_dir)
|
35
|
+
tree_file_name = "tree_"+base+".yml"
|
36
|
+
tree_data = @s3_obj.get(tree_file_name)
|
37
|
+
if tree_data or base_dir == "/"
|
38
|
+
break
|
39
|
+
end
|
40
|
+
base_dir = File.dirname(base_dir)
|
41
|
+
end
|
42
|
+
unless tree_data
|
43
|
+
return nil
|
44
|
+
end
|
45
|
+
tree_info = TreeInfo.new(tree_data)
|
46
|
+
return tree_info.hierarchie(dir)
|
47
|
+
end
|
48
|
+
def get_bases
|
49
|
+
files = @s3_obj.find(/^tree_.*\.yml/)
|
50
|
+
dirs = files.map do |d|
|
51
|
+
m=/tree_(.*)\.yml/.match(d)
|
52
|
+
next nil unless m
|
53
|
+
CGI.unescape(m[1])
|
54
|
+
end
|
55
|
+
return dirs.compact
|
56
|
+
end
|
57
|
+
def expand_tree(tree)
|
58
|
+
top = tree[0].keys[0]
|
59
|
+
top_dir = File.dirname(top)
|
60
|
+
tmp_dir = CGI.escape(top_dir)
|
61
|
+
output_dir = @output_dir+"/"+tmp_dir
|
62
|
+
FileUtils.mkdir_p(output_dir)
|
63
|
+
tree.each do |node|
|
64
|
+
@s3_obj.get_directories(node.keys,top_dir,output_dir)
|
65
|
+
end
|
66
|
+
top_dir_len = top_dir.length
|
67
|
+
(tree.length - 1).downto(0){|n|
|
68
|
+
tree[n].each do |k,v|
|
69
|
+
dir_len = k.length
|
70
|
+
relative_path = k.slice(top_dir_len,dir_len - top_dir_len)
|
71
|
+
dir = output_dir + relative_path
|
72
|
+
File.utime(v[:atime],v[:mtime],dir)
|
73
|
+
end
|
74
|
+
}
|
75
|
+
end
|
76
|
+
def start
|
77
|
+
begin
|
78
|
+
unless @s3_obj.bucket_exist?
|
79
|
+
S3log.error("bucket: #{@s3_obj.bucket} isn't found!")
|
80
|
+
exit -1
|
81
|
+
end
|
82
|
+
@directories = get_bases unless @directories
|
83
|
+
@directories.each do |dir|
|
84
|
+
tree = get_tree(dir)
|
85
|
+
unless tree
|
86
|
+
s3log.warn("#{dir} isn't find in AWS S3. ignore")
|
87
|
+
next
|
88
|
+
end
|
89
|
+
puts tree.inspect
|
90
|
+
expand_tree(tree)
|
91
|
+
end
|
92
|
+
rescue => err
|
93
|
+
S3log.error(err.backtrace.join("\n")+"\n"+err.message)
|
94
|
+
exit -1
|
95
|
+
end
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
require 'logger'
|
2
|
+
module S3backup
|
3
|
+
class S3log
|
4
|
+
@@log_file = nil
|
5
|
+
@@debug = false
|
6
|
+
def S3log.get_logger
|
7
|
+
@@log_file ? @@log_file : Logger.new($stderr)
|
8
|
+
end
|
9
|
+
def S3log.set_debug(flg)
|
10
|
+
@@debug=flg
|
11
|
+
end
|
12
|
+
def S3log.error(str)
|
13
|
+
get_logger.error(str)
|
14
|
+
end
|
15
|
+
def S3log.info(str)
|
16
|
+
get_logger.info(str)
|
17
|
+
end
|
18
|
+
def S3log.warn(str)
|
19
|
+
get_logger.warn(str)
|
20
|
+
end
|
21
|
+
def S3log.debug(str)
|
22
|
+
if @@debug
|
23
|
+
get_logger.debug(str)
|
24
|
+
end
|
25
|
+
end
|
26
|
+
def S3log.set_logfile(f)
|
27
|
+
@@log_file = Logger.new(f)
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,215 @@
|
|
1
|
+
require 'aws/s3'
|
2
|
+
require 'tempfile'
|
3
|
+
require 's3backup/crypt'
|
4
|
+
module S3backup
|
5
|
+
class S3Wrapper
|
6
|
+
BUF_READ_SIZE=1024*1024*128
|
7
|
+
attr_reader :bucket
|
8
|
+
def initialize(config)
|
9
|
+
#設定ファイルの内容をメンバ変数にセット
|
10
|
+
set_config(config)
|
11
|
+
args = {
|
12
|
+
:access_key_id => @access_key_id,
|
13
|
+
:secret_access_key => @secret_access_key
|
14
|
+
}
|
15
|
+
if @proxy.size != 0
|
16
|
+
args[:proxy] = @proxy
|
17
|
+
end
|
18
|
+
#AWS S3に接続
|
19
|
+
AWS::S3::Base.establish_connection!(args)
|
20
|
+
end
|
21
|
+
def bucket_exist?()
|
22
|
+
#AWS S3にあがっているすべてのBucketを取得
|
23
|
+
buckets = AWS::S3::Service.buckets
|
24
|
+
#すべてのBuckeから指定されたBucketの検索
|
25
|
+
first_flg = true
|
26
|
+
buckets.each do |bucket|
|
27
|
+
if bucket.name == @bucket
|
28
|
+
return true
|
29
|
+
end
|
30
|
+
end
|
31
|
+
return false
|
32
|
+
end
|
33
|
+
def create_bucket()
|
34
|
+
S3log.info("Bucket.create(#{@bucket})")
|
35
|
+
#Bucket作成
|
36
|
+
ret = AWS::S3::Bucket.create(@bucket)
|
37
|
+
unless ret
|
38
|
+
raise "AWS::S3::Bucket create error"
|
39
|
+
end
|
40
|
+
end
|
41
|
+
def get(key)
|
42
|
+
key_name = CGI.escape(key)
|
43
|
+
data = nil
|
44
|
+
if AWS::S3::S3Object.exists? key_name,@bucket
|
45
|
+
data = AWS::S3::S3Object.value(key_name,@bucket)
|
46
|
+
if @aes
|
47
|
+
data = @aes.decrypt(data)
|
48
|
+
end
|
49
|
+
end
|
50
|
+
return data
|
51
|
+
end
|
52
|
+
def store_directories(dirs)
|
53
|
+
dirs.each do |dir|
|
54
|
+
store_directory(dir)
|
55
|
+
end
|
56
|
+
end
|
57
|
+
def get_directory(dir,out_dir)
|
58
|
+
data = get_chain(dir)
|
59
|
+
tmp = Tempfile.open("s3backup")
|
60
|
+
tmp.write(data)
|
61
|
+
tmp.close
|
62
|
+
#tgzのファイルをcur_dirに展開
|
63
|
+
from_tgz(tmp.path,out_dir)
|
64
|
+
tmp.close(true)
|
65
|
+
end
|
66
|
+
def get_directories(dirs,prefix,output_dir)
|
67
|
+
prefix_len = prefix.length
|
68
|
+
dirs.each do |dir|
|
69
|
+
parent = File.dirname(dir)
|
70
|
+
p_len = parent.length
|
71
|
+
relative_path = parent.slice(prefix_len,p_len - prefix_len)
|
72
|
+
cur_dir = output_dir + relative_path
|
73
|
+
get_directory(dir,cur_dir)
|
74
|
+
end
|
75
|
+
end
|
76
|
+
def delete_directories(dirs)
|
77
|
+
dirs.each do |dir|
|
78
|
+
delete_chain(dir)
|
79
|
+
end
|
80
|
+
end
|
81
|
+
def post(key,val)
|
82
|
+
if val.length > BUF_READ_SIZE
|
83
|
+
raise "max size = #{BUF_READ_SIZE} but size = #{val.size}"
|
84
|
+
end
|
85
|
+
key_name = CGI.escape(key)
|
86
|
+
if @aes
|
87
|
+
val = @aes.encrypt(val)
|
88
|
+
end
|
89
|
+
AWS::S3::S3Object.store(key_name,val,@bucket)
|
90
|
+
S3log.info("S3Object.store(#{key_name})")
|
91
|
+
end
|
92
|
+
def set_config(config)
|
93
|
+
err_msg = ""
|
94
|
+
unless config["access_key_id"]
|
95
|
+
err_msg += "access_key_id doesn't exist in config file.\n"
|
96
|
+
end
|
97
|
+
@access_key_id = config["access_key_id"]
|
98
|
+
unless config["secret_access_key"]
|
99
|
+
err_msg += "secret_access_key doesn't exis in config file.\n"
|
100
|
+
end
|
101
|
+
@secret_access_key = config["secret_access_key"]
|
102
|
+
unless config["bucket"]
|
103
|
+
err_msg += "bucket doesn't exist in config file.\n"
|
104
|
+
end
|
105
|
+
@proxy = {}
|
106
|
+
@proxy[:host] = config["proxy_host"] if config["proxy_host"]
|
107
|
+
@proxy[:port] = config["proxy_port"] if config["proxy_port"]
|
108
|
+
@proxy[:user] = config["proxy_user"] if config["proxy_user"]
|
109
|
+
@proxy[:password] = config["proxy_password"] if config["proxy_password"]
|
110
|
+
@bucket = config["bucket"]
|
111
|
+
if config["password"] and config["password"] != ""
|
112
|
+
if config["salt"]
|
113
|
+
if config["salt"] =~ /[0-9A-Fa-f]{16}/
|
114
|
+
@aes = Crypt.new(config["password"],config["salt"])
|
115
|
+
else
|
116
|
+
err_msg += "salt format shoud be HexString and length should be 16.\n"
|
117
|
+
end
|
118
|
+
else
|
119
|
+
err_msg += "salt doesn't exist in config file.\n"
|
120
|
+
end
|
121
|
+
end
|
122
|
+
if err_msg != ""
|
123
|
+
raise err_msg
|
124
|
+
end
|
125
|
+
end
|
126
|
+
#指定されたディレクトリをtar gzip形式で圧縮する
|
127
|
+
def to_tgz(path,dir)
|
128
|
+
#サブディレクトリを圧縮の対象外にする。
|
129
|
+
sub_dir = []
|
130
|
+
Dir.foreach(dir) do |file|
|
131
|
+
next if /^\.+$/ =~ file
|
132
|
+
sub_dir.push(file) if File.directory?(dir+"/"+file)
|
133
|
+
end
|
134
|
+
exclude = ""
|
135
|
+
exclude = exclude + " --exclude=" + sub_dir.join(" --exclude=") if sub_dir.length != 0
|
136
|
+
cmd = "(cd #{File.dirname(dir)};tar -czvf #{path} #{exclude} #{File.basename(dir)} > /dev/null 2>&1)"
|
137
|
+
S3log.info(cmd)
|
138
|
+
system(cmd)
|
139
|
+
unless $?.success?
|
140
|
+
raise "feiled #{cmd} execute. #{$?.inspect}"
|
141
|
+
end
|
142
|
+
end
|
143
|
+
def from_tgz(path,dir)
|
144
|
+
cmd = "tar -xzvf #{path} -C #{dir} > /dev/null 2>&1"
|
145
|
+
S3log.info(cmd)
|
146
|
+
system(cmd)
|
147
|
+
unless $?.success?
|
148
|
+
raise "feiled #{cmd} execute. #{$?.inspect}"
|
149
|
+
end
|
150
|
+
end
|
151
|
+
def delete(key)
|
152
|
+
if @aes
|
153
|
+
key_name = CGI.escape(@aes.encrypt(key))
|
154
|
+
else
|
155
|
+
key_name = CGI.escape(key)
|
156
|
+
end
|
157
|
+
if AWS::S3::S3Object.exists? key_name
|
158
|
+
S3log.info("S3Object.delete(#{key_name})")
|
159
|
+
AWS::S3::S3Object.delete(key_name,@bucket)
|
160
|
+
return true
|
161
|
+
end
|
162
|
+
return false
|
163
|
+
end
|
164
|
+
def delete_chain(key)
|
165
|
+
i=1
|
166
|
+
while delete(i.to_s + "_" + key)
|
167
|
+
i+=1
|
168
|
+
end
|
169
|
+
end
|
170
|
+
def post_chain(key,f)
|
171
|
+
i=1
|
172
|
+
begin
|
173
|
+
while 1
|
174
|
+
key_name = i.to_s()+"_"+key
|
175
|
+
if @aes
|
176
|
+
key_name = @aes.encrypt(key_name)
|
177
|
+
end
|
178
|
+
post(key_name,f.readpartial(BUF_READ_SIZE))
|
179
|
+
i+=1
|
180
|
+
end
|
181
|
+
rescue EOFError
|
182
|
+
end
|
183
|
+
end
|
184
|
+
def get_chain(key)
|
185
|
+
data = nil
|
186
|
+
data_set = nil
|
187
|
+
i=1
|
188
|
+
while 1
|
189
|
+
key_name = i.to_s()+"_"+key
|
190
|
+
if @aes
|
191
|
+
key_name = @aes.encrypt(key_name)
|
192
|
+
end
|
193
|
+
data = get(key_name)
|
194
|
+
if data == nil
|
195
|
+
break
|
196
|
+
end
|
197
|
+
if i==1
|
198
|
+
data_set = ''
|
199
|
+
end
|
200
|
+
data_set += data
|
201
|
+
i+=1
|
202
|
+
end
|
203
|
+
return data_set
|
204
|
+
end
|
205
|
+
def store_directory(dir)
|
206
|
+
tmp = Tempfile.open("s3backup")
|
207
|
+
tmp.close
|
208
|
+
#tgzのファイルをtmp.pathに作成
|
209
|
+
to_tgz(tmp.path,dir)
|
210
|
+
#S3にディレクトリの絶対パスをキーにして、圧縮したデータをストア
|
211
|
+
post_chain(dir,File.open(tmp.path,"r"))
|
212
|
+
tmp.close(true)
|
213
|
+
end
|
214
|
+
end
|
215
|
+
end
|
@@ -0,0 +1,116 @@
|
|
1
|
+
require 'yaml'
|
2
|
+
module S3backup
|
3
|
+
class TreeInfo
|
4
|
+
attr_reader :fileMap
|
5
|
+
def initialize(target)
|
6
|
+
if target.nil?
|
7
|
+
@fileMap = {:file => Hash.new,:symlink => Hash.new,:directory => Hash.new}
|
8
|
+
elsif File.directory?(target)
|
9
|
+
@fileMap = {:file => Hash.new,:symlink => Hash.new,:directory => Hash.new}
|
10
|
+
stat = File.stat(target)
|
11
|
+
@fileMap[:directory][target] = {:mtime => stat.mtime, :atime => stat.atime}
|
12
|
+
makeFileMap(target)
|
13
|
+
elsif File.file?(target)
|
14
|
+
load_yaml(File.read(target))
|
15
|
+
else
|
16
|
+
load_yaml(target)
|
17
|
+
end
|
18
|
+
end
|
19
|
+
def makeFileMap(dir)
|
20
|
+
Dir.entries(dir).each do |e|
|
21
|
+
if e == "." or e == ".."
|
22
|
+
next
|
23
|
+
end
|
24
|
+
name = dir + "/" + e
|
25
|
+
if File.directory?(name)
|
26
|
+
stat = File.stat(name)
|
27
|
+
@fileMap[:directory][name] = {:mtime => stat.mtime, :atime => stat.atime}
|
28
|
+
makeFileMap(name)
|
29
|
+
elsif File.symlink?(name)
|
30
|
+
@fileMap[:symlink][name] = {:source => File.readlink(name)}
|
31
|
+
else
|
32
|
+
stat = File.stat(name)
|
33
|
+
@fileMap[:file][name] = {:size => stat.size,:date => stat.mtime}
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
def load_yaml(data)
|
38
|
+
@fileMap = YAML.load(data)
|
39
|
+
end
|
40
|
+
def dump_yaml()
|
41
|
+
YAML.dump(@fileMap)
|
42
|
+
end
|
43
|
+
def hierarchie(dir)
|
44
|
+
count = dir.count("/")
|
45
|
+
tree = []
|
46
|
+
@fileMap[:directory].each do |k,v|
|
47
|
+
if k.index(dir) != 0
|
48
|
+
next
|
49
|
+
end
|
50
|
+
level = k.count("/") - count
|
51
|
+
tree[level] = {} unless tree[level]
|
52
|
+
tree[level][k] = v
|
53
|
+
end
|
54
|
+
return tree
|
55
|
+
end
|
56
|
+
def diff(target)
|
57
|
+
modify_dir_map = {}
|
58
|
+
modify_files = []
|
59
|
+
modify_links = []
|
60
|
+
|
61
|
+
remove_dirs = target.fileMap[:directory].keys - @fileMap[:directory].keys
|
62
|
+
add_dirs = @fileMap[:directory].keys - target.fileMap[:directory].keys
|
63
|
+
|
64
|
+
new_info = @fileMap[:file]
|
65
|
+
old_info = target.fileMap[:file]
|
66
|
+
|
67
|
+
remove_files = old_info.keys - new_info.keys
|
68
|
+
remove_files.each do |f|
|
69
|
+
dir = File.dirname(f)
|
70
|
+
modify_dir_map[dir] = true
|
71
|
+
end
|
72
|
+
add_files = new_info.keys - old_info.keys
|
73
|
+
add_files.each do |f|
|
74
|
+
dir = File.dirname(f)
|
75
|
+
modify_dir_map[dir] = true
|
76
|
+
end
|
77
|
+
|
78
|
+
new_info.each do |k,v|
|
79
|
+
next unless old_info[k]
|
80
|
+
if old_info[k][:date] != v[:date] or old_info[k][:size] != v[:size]
|
81
|
+
modify_files.push(k)
|
82
|
+
dir = File.dirname(k)
|
83
|
+
modify_dir_map[dir] = true
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
new_info = @fileMap[:symlink]
|
88
|
+
old_info = target.fileMap[:symlink]
|
89
|
+
|
90
|
+
remove_links = old_info.keys - new_info.keys
|
91
|
+
remove_links.each do |f|
|
92
|
+
dir = File.dirname(f)
|
93
|
+
modify_dir_map[dir] = true
|
94
|
+
end
|
95
|
+
|
96
|
+
add_links = new_info.keys - old_info.keys
|
97
|
+
add_links.each do |f|
|
98
|
+
dir = File.dirname(f)
|
99
|
+
modify_dir_map[dir] = true
|
100
|
+
end
|
101
|
+
|
102
|
+
new_info.each do |k,v|
|
103
|
+
next unless old_info[k]
|
104
|
+
if old_info[k][:source] != v[:source]
|
105
|
+
modify_links.push(k)
|
106
|
+
dir = File.dirname(k)
|
107
|
+
modify_dir_map[dir] = true
|
108
|
+
end
|
109
|
+
end
|
110
|
+
return {
|
111
|
+
:directory => {:add => add_dirs,:modify => modify_dir_map.keys - add_dirs - remove_dirs,:remove => remove_dirs},
|
112
|
+
:file => {:add => add_files,:modify => modify_files,:remove => remove_files},
|
113
|
+
:symlink => {:add => add_links,:modify => modify_links,:remove => remove_links}}
|
114
|
+
end
|
115
|
+
end
|
116
|
+
end
|
data/lib/s3backup.rb
ADDED
data/script/console
ADDED
@@ -0,0 +1,10 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# File: script/console
|
3
|
+
irb = RUBY_PLATFORM =~ /(:?mswin|mingw)/ ? 'irb.bat' : 'irb'
|
4
|
+
|
5
|
+
libs = " -r irb/completion"
|
6
|
+
# Perhaps use a console_lib to store any extra methods I may want available in the cosole
|
7
|
+
# libs << " -r #{File.dirname(__FILE__) + '/../lib/console_lib/console_logger.rb'}"
|
8
|
+
libs << " -r #{File.dirname(__FILE__) + '/../lib/s3backup.rb'}"
|
9
|
+
puts "Loading s3backup gem"
|
10
|
+
exec "#{irb} #{libs} --simple-prompt"
|
data/script/destroy
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
APP_ROOT = File.expand_path(File.join(File.dirname(__FILE__), '..'))
|
3
|
+
|
4
|
+
begin
|
5
|
+
require 'rubigen'
|
6
|
+
rescue LoadError
|
7
|
+
require 'rubygems'
|
8
|
+
require 'rubigen'
|
9
|
+
end
|
10
|
+
require 'rubigen/scripts/destroy'
|
11
|
+
|
12
|
+
ARGV.shift if ['--help', '-h'].include?(ARGV[0])
|
13
|
+
RubiGen::Base.use_component_sources! [:rubygems, :newgem, :newgem_theme, :test_unit]
|
14
|
+
RubiGen::Scripts::Destroy.new.run(ARGV)
|
data/script/generate
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
APP_ROOT = File.expand_path(File.join(File.dirname(__FILE__), '..'))
|
3
|
+
|
4
|
+
begin
|
5
|
+
require 'rubigen'
|
6
|
+
rescue LoadError
|
7
|
+
require 'rubygems'
|
8
|
+
require 'rubigen'
|
9
|
+
end
|
10
|
+
require 'rubigen/scripts/generate'
|
11
|
+
|
12
|
+
ARGV.shift if ['--help', '-h'].include?(ARGV[0])
|
13
|
+
RubiGen::Base.use_component_sources! [:rubygems, :newgem, :newgem_theme, :test_unit]
|
14
|
+
RubiGen::Scripts::Generate.new.run(ARGV)
|
@@ -0,0 +1,15 @@
|
|
1
|
+
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
|
2
|
+
require 's3backup/cli'
|
3
|
+
|
4
|
+
describe S3backup::CLI, "execute" do
|
5
|
+
before(:each) do
|
6
|
+
@stdout_io = StringIO.new
|
7
|
+
S3backup::CLI.execute(@stdout_io, [])
|
8
|
+
@stdout_io.rewind
|
9
|
+
@stdout = @stdout_io.read
|
10
|
+
end
|
11
|
+
|
12
|
+
it "should print default output" do
|
13
|
+
@stdout.should =~ /To update this executable/
|
14
|
+
end
|
15
|
+
end
|
data/spec/spec.opts
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
--colour
|
data/spec/spec_helper.rb
ADDED
data/tasks/rspec.rake
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
begin
|
2
|
+
require 'spec'
|
3
|
+
rescue LoadError
|
4
|
+
require 'rubygems' unless ENV['NO_RUBYGEMS']
|
5
|
+
require 'spec'
|
6
|
+
end
|
7
|
+
begin
|
8
|
+
require 'spec/rake/spectask'
|
9
|
+
rescue LoadError
|
10
|
+
puts <<-EOS
|
11
|
+
To use rspec for testing you must install rspec gem:
|
12
|
+
gem install rspec
|
13
|
+
EOS
|
14
|
+
exit(0)
|
15
|
+
end
|
16
|
+
|
17
|
+
desc "Run the specs under spec/models"
|
18
|
+
Spec::Rake::SpecTask.new do |t|
|
19
|
+
t.spec_opts = ['--options', "spec/spec.opts"]
|
20
|
+
t.spec_files = FileList['spec/**/*_spec.rb']
|
21
|
+
end
|
metadata
ADDED
@@ -0,0 +1,100 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: s3backup
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.5.1
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Takeshi Morita
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
|
12
|
+
date: 2009-10-22 00:00:00 +09:00
|
13
|
+
default_executable:
|
14
|
+
dependencies:
|
15
|
+
- !ruby/object:Gem::Dependency
|
16
|
+
name: aws-s3
|
17
|
+
type: :runtime
|
18
|
+
version_requirement:
|
19
|
+
version_requirements: !ruby/object:Gem::Requirement
|
20
|
+
requirements:
|
21
|
+
- - ">="
|
22
|
+
- !ruby/object:Gem::Version
|
23
|
+
version: 0.6.2
|
24
|
+
version:
|
25
|
+
- !ruby/object:Gem::Dependency
|
26
|
+
name: hoe
|
27
|
+
type: :development
|
28
|
+
version_requirement:
|
29
|
+
version_requirements: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - ">="
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: 2.3.3
|
34
|
+
version:
|
35
|
+
description: S3Backup is a backup tool to local directory to Amazon S3.
|
36
|
+
email:
|
37
|
+
- morita@ibrains.co.jp
|
38
|
+
executables:
|
39
|
+
- s3backup
|
40
|
+
extensions: []
|
41
|
+
|
42
|
+
extra_rdoc_files:
|
43
|
+
- History.txt
|
44
|
+
- Manifest.txt
|
45
|
+
- PostInstall.txt
|
46
|
+
files:
|
47
|
+
- History.txt
|
48
|
+
- Manifest.txt
|
49
|
+
- PostInstall.txt
|
50
|
+
- README.rdoc
|
51
|
+
- Rakefile
|
52
|
+
- backup.yml
|
53
|
+
- bin/s3backup
|
54
|
+
- lib/s3backup.rb
|
55
|
+
- lib/s3backup/backup.rb
|
56
|
+
- lib/s3backup/cli.rb
|
57
|
+
- lib/s3backup/crypt.rb
|
58
|
+
- lib/s3backup/restore.rb
|
59
|
+
- lib/s3backup/s3log.rb
|
60
|
+
- lib/s3backup/s3wrapper.rb
|
61
|
+
- lib/s3backup/tree_info.rb
|
62
|
+
- script/console
|
63
|
+
- script/destroy
|
64
|
+
- script/generate
|
65
|
+
- spec/s3backup_cli_spec.rb
|
66
|
+
- spec/s3backup_spec.rb
|
67
|
+
- spec/spec.opts
|
68
|
+
- spec/spec_helper.rb
|
69
|
+
- tasks/rspec.rake
|
70
|
+
has_rdoc: true
|
71
|
+
homepage: http://rubyforge.org/projects/s3backup/
|
72
|
+
licenses: []
|
73
|
+
|
74
|
+
post_install_message: PostInstall.txt
|
75
|
+
rdoc_options:
|
76
|
+
- --main
|
77
|
+
- README.rdoc
|
78
|
+
require_paths:
|
79
|
+
- lib
|
80
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
81
|
+
requirements:
|
82
|
+
- - ">="
|
83
|
+
- !ruby/object:Gem::Version
|
84
|
+
version: "0"
|
85
|
+
version:
|
86
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
87
|
+
requirements:
|
88
|
+
- - ">="
|
89
|
+
- !ruby/object:Gem::Version
|
90
|
+
version: "0"
|
91
|
+
version:
|
92
|
+
requirements: []
|
93
|
+
|
94
|
+
rubyforge_project: s3backup
|
95
|
+
rubygems_version: 1.3.5
|
96
|
+
signing_key:
|
97
|
+
specification_version: 3
|
98
|
+
summary: S3Backup is a backup tool to local directory to Amazon S3.
|
99
|
+
test_files: []
|
100
|
+
|