darkofabijan-astrails-safe 0.2.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. data/LICENSE +20 -0
  2. data/README.markdown +237 -0
  3. data/Rakefile +61 -0
  4. data/bin/astrails-safe +53 -0
  5. data/examples/example_helper.rb +19 -0
  6. data/lib/astrails/safe.rb +61 -0
  7. data/lib/astrails/safe/archive.rb +24 -0
  8. data/lib/astrails/safe/backup.rb +20 -0
  9. data/lib/astrails/safe/cloudfiles.rb +70 -0
  10. data/lib/astrails/safe/config/builder.rb +60 -0
  11. data/lib/astrails/safe/config/node.rb +76 -0
  12. data/lib/astrails/safe/gpg.rb +46 -0
  13. data/lib/astrails/safe/gzip.rb +25 -0
  14. data/lib/astrails/safe/local.rb +70 -0
  15. data/lib/astrails/safe/mysqldump.rb +32 -0
  16. data/lib/astrails/safe/pgdump.rb +36 -0
  17. data/lib/astrails/safe/pipe.rb +17 -0
  18. data/lib/astrails/safe/s3.rb +86 -0
  19. data/lib/astrails/safe/sftp.rb +88 -0
  20. data/lib/astrails/safe/sink.rb +35 -0
  21. data/lib/astrails/safe/source.rb +47 -0
  22. data/lib/astrails/safe/stream.rb +20 -0
  23. data/lib/astrails/safe/svndump.rb +13 -0
  24. data/lib/astrails/safe/tmp_file.rb +48 -0
  25. data/lib/extensions/mktmpdir.rb +45 -0
  26. data/spec/integration/archive_integration_spec.rb +88 -0
  27. data/spec/integration/cleanup_spec.rb +61 -0
  28. data/spec/spec.opts +5 -0
  29. data/spec/spec_helper.rb +16 -0
  30. data/spec/unit/archive_spec.rb +67 -0
  31. data/spec/unit/cloudfiles_spec.rb +170 -0
  32. data/spec/unit/config_spec.rb +213 -0
  33. data/spec/unit/gpg_spec.rb +148 -0
  34. data/spec/unit/gzip_spec.rb +64 -0
  35. data/spec/unit/local_spec.rb +110 -0
  36. data/spec/unit/mysqldump_spec.rb +83 -0
  37. data/spec/unit/pgdump_spec.rb +45 -0
  38. data/spec/unit/s3_spec.rb +160 -0
  39. data/spec/unit/svndump_spec.rb +39 -0
  40. data/templates/script.rb +165 -0
  41. metadata +179 -0
@@ -0,0 +1,24 @@
1
+ module Astrails
2
+ module Safe
3
+ class Archive < Source
4
+
5
+ def command
6
+ "tar -cf - #{@config[:options]} #{tar_exclude_files} #{tar_files}"
7
+ end
8
+
9
+ def extension; '.tar'; end
10
+
11
+ protected
12
+
13
+ def tar_exclude_files
14
+ [*@config[:exclude]].compact.map{|x| "--exclude=#{x}"}.join(" ")
15
+ end
16
+
17
+ def tar_files
18
+ raise RuntimeError, "missing files for tar" unless @config[:files]
19
+ [*@config[:files]].map{|s| s.strip}.join(" ")
20
+ end
21
+
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,20 @@
1
+ module Astrails
2
+ module Safe
3
+ class Backup
4
+ attr_accessor :id, :kind, :filename, :extension, :command, :compressed, :timestamp, :path
5
+ def initialize(opts = {})
6
+ opts.each do |k, v|
7
+ self.send("#{k}=", v)
8
+ end
9
+ end
10
+
11
+ def run(config, *mods)
12
+ mods.each do |mod|
13
+ mod = mod.to_s
14
+ mod[0] = mod[0..0].upcase
15
+ Astrails::Safe.const_get(mod).new(config, self).process
16
+ end
17
+ end
18
+ end
19
+ end
20
+ end
@@ -0,0 +1,70 @@
1
+ module Astrails
2
+ module Safe
3
+ class Cloudfiles < Sink
4
+ MAX_CLOUDFILES_FILE_SIZE = 5368709120
5
+
6
+ protected
7
+
8
+ def active?
9
+ container && user && api_key
10
+ end
11
+
12
+ def path
13
+ @path ||= expand(config[:cloudfiles, :path] || config[:local, :path] || ":kind/:id")
14
+ end
15
+
16
+ def save
17
+ raise RuntimeError, "pipe-streaming not supported for Cloudfiles." unless @backup.path
18
+
19
+ # needed in cleanup even on dry run
20
+ cf = CloudFiles::Connection.new(user, api_key, true, service_net) unless $LOCAL
21
+ puts "Uploading #{container}:#{full_path} from #{@backup.path}" if $_VERBOSE || $DRY_RUN
22
+ unless $DRY_RUN || $LOCAL
23
+ if File.stat(@backup.path).size > MAX_CLOUDFILES_FILE_SIZE
24
+ STDERR.puts "ERROR: File size exceeds maximum allowed for upload to Cloud Files (#{MAX_CLOUDFILES_FILE_SIZE}): #{@backup.path}"
25
+ return
26
+ end
27
+ benchmark = Benchmark.realtime do
28
+ cf_container = cf.create_container(container)
29
+ o = cf_container.create_object(full_path,true)
30
+ o.write(File.open(@backup.path))
31
+ end
32
+ puts "...done" if $_VERBOSE
33
+ puts("Upload took " + sprintf("%.2f", benchmark) + " second(s).") if $_VERBOSE
34
+ end
35
+ end
36
+
37
+ def cleanup
38
+ return if $LOCAL
39
+
40
+ return unless keep = @config[:keep, :cloudfiles]
41
+
42
+ puts "listing files: #{container}:#{base}*" if $_VERBOSE
43
+ cf = CloudFiles::Connection.new(user, api_key, true, service_net) unless $LOCAL
44
+ cf_container = cf.container(container)
45
+ files = cf_container.objects(:prefix => base).sort
46
+
47
+ cleanup_with_limit(files, keep) do |f|
48
+ puts "removing Cloud File #{container}:#{f}" if $DRY_RUN || $_VERBOSE
49
+ cf_container.delete_object(f) unless $DRY_RUN || $LOCAL
50
+ end
51
+ end
52
+
53
+ def container
54
+ @config[:cloudfiles, :container]
55
+ end
56
+
57
+ def user
58
+ @config[:cloudfiles, :user]
59
+ end
60
+
61
+ def api_key
62
+ @config[:cloudfiles, :api_key]
63
+ end
64
+
65
+ def service_net
66
+ @config[:cloudfiles, :service_net] || false
67
+ end
68
+ end
69
+ end
70
+ end
@@ -0,0 +1,60 @@
1
+ module Astrails
2
+ module Safe
3
+ module Config
4
+ class Builder
5
+ COLLECTIONS = %w/database archive repo/
6
+ ITEMS = %w/s3 cloudfiles key secret bucket api_key container service_net path gpg password keep local mysqldump pgdump command options
7
+ user host port socket skip_tables tar files exclude filename svndump repo_path sftp page page_size/
8
+ NAMES = COLLECTIONS + ITEMS
9
+ def initialize(node)
10
+ @node = node
11
+ end
12
+
13
+ # supported args:
14
+ # args = [value]
15
+ # args = [id, data]
16
+ # args = [data]
17
+ # id/value - simple values, data - hash
18
+ def method_missing(sym, *args, &block)
19
+ return super unless NAMES.include?(sym.to_s)
20
+
21
+ # do we have id or value?
22
+ unless args.first.is_a?(Hash)
23
+ id_or_value = args.shift # nil for args == []
24
+ end
25
+
26
+ id_or_value = id_or_value.map {|v| v.to_s} if id_or_value.is_a?(Array)
27
+
28
+ # do we have data hash?
29
+ if data = args.shift
30
+ raise "#{sym}: hash expected: #{data.inspect}" unless data.is_a?(Hash)
31
+ end
32
+
33
+ #puts "#{sym}: args=#{args.inspect}, id_or_value=#{id_or_value}, data=#{data.inspect}, block=#{block.inspect}"
34
+
35
+ raise "#{sym}: unexpected: #{args.inspect}" unless args.empty?
36
+ raise "#{sym}: missing arguments" unless id_or_value || data || block
37
+
38
+ if COLLECTIONS.include?(sym.to_s) && id_or_value
39
+ data ||= {}
40
+ end
41
+
42
+ if !data && !block
43
+ # simple value assignment
44
+ @node[sym] = id_or_value
45
+
46
+ elsif id_or_value
47
+ # collection element with id => create collection node and a subnode in it
48
+ key = sym.to_s + "s"
49
+ collection = @node[key] || @node.set(key, {})
50
+ collection.set(id_or_value, data || {}, &block)
51
+
52
+ else
53
+ # simple subnode
54
+ @node.set(sym, data || {}, &block)
55
+ end
56
+ end
57
+ end
58
+ end
59
+ end
60
+ end
@@ -0,0 +1,76 @@
1
+ require 'astrails/safe/config/builder'
2
+ module Astrails
3
+ module Safe
4
+ module Config
5
+ class Node
6
+ attr_reader :parent
7
+ attr_reader :data
8
+ def initialize(parent = nil, data = {}, &block)
9
+ @parent, @data = parent, {}
10
+ data.each { |k, v| self[k] = v }
11
+ Builder.new(self).instance_eval(&block) if block
12
+ end
13
+
14
+ # looks for the path from this node DOWN. will not delegate to parent
15
+ def get(*path)
16
+ key = path.shift
17
+ value = @data[key.to_s]
18
+ return value if value && path.empty?
19
+
20
+ value && value.get(*path)
21
+ end
22
+
23
+ # recursive find
24
+ # starts at the node and continues to the parent
25
+ def find(*path)
26
+ get(*path) || @parent && @parent.find(*path)
27
+ end
28
+ alias :[] :find
29
+
30
+ MULTIVALUES = %w/skip_tables exclude files/
31
+ def set(key, value, &block)
32
+ if @data[key.to_s]
33
+ raise(ArgumentError, "duplicate value for '#{key}'") if value.is_a?(Hash) || !MULTIVALUES.include?(key.to_s)
34
+ end
35
+
36
+ if value.is_a?(Hash)
37
+ @data[key.to_s] = Node.new(self, value, &block)
38
+ else
39
+ raise(ArgumentError, "#{key}: no block supported for simple values") if block
40
+ if @data[key.to_s]
41
+ @data[key.to_s] = @data[key.to_s].to_a + value.to_a
42
+ else
43
+ @data[key.to_s] = value
44
+ end
45
+ value
46
+ end
47
+ end
48
+ alias :[]= :set
49
+
50
+ def each(&block)
51
+ @data.each(&block)
52
+ end
53
+ include Enumerable
54
+
55
+ def to_hash
56
+ @data.keys.inject({}) do |res, key|
57
+ value = @data[key]
58
+ res[key] = value.is_a?(Node) ? value.to_hash : value
59
+ res
60
+ end
61
+ end
62
+
63
+ def dump(indent = "")
64
+ @data.each do |key, value|
65
+ if value.is_a?(Node)
66
+ puts "#{indent}#{key}:"
67
+ value.dump(indent + " ")
68
+ else
69
+ puts "#{indent}#{key}: #{value.inspect}"
70
+ end
71
+ end
72
+ end
73
+ end
74
+ end
75
+ end
76
+ end
@@ -0,0 +1,46 @@
1
+ module Astrails
2
+ module Safe
3
+ class Gpg < Pipe
4
+
5
+ protected
6
+
7
+ def post_process
8
+ @backup.compressed = true
9
+ end
10
+
11
+ def pipe
12
+ command = @config[:gpg, :command] || 'gpg'
13
+ if key
14
+ "|#{command} #{@config[:gpg, :options]} -e -r #{key}"
15
+ elsif password
16
+ "|#{command} #{@config[:gpg, :options]} -c --passphrase-file #{gpg_password_file(password)}"
17
+ end
18
+ end
19
+
20
+ def extension
21
+ ".gpg"
22
+ end
23
+
24
+ def active?
25
+ raise RuntimeError, "can't use both gpg password and pubkey" if key && password
26
+
27
+ !!(password || key)
28
+ end
29
+
30
+ private
31
+
32
+ def password
33
+ @password ||= @config[:gpg, :password]
34
+ end
35
+
36
+ def key
37
+ @key ||= @config[:gpg, :key]
38
+ end
39
+
40
+ def gpg_password_file(pass)
41
+ return "TEMP_GENERATED_FILENAME" if $DRY_RUN
42
+ Astrails::Safe::TmpFile.create("gpg-pass") { |file| file.write(pass) }
43
+ end
44
+ end
45
+ end
46
+ end
@@ -0,0 +1,25 @@
1
+ module Astrails
2
+ module Safe
3
+ class Gzip < Pipe
4
+
5
+ protected
6
+
7
+ def post_process
8
+ @backup.compressed = true
9
+ end
10
+
11
+ def pipe
12
+ "|gzip"
13
+ end
14
+
15
+ def extension
16
+ ".gz"
17
+ end
18
+
19
+ def active?
20
+ !@backup.compressed
21
+ end
22
+
23
+ end
24
+ end
25
+ end
@@ -0,0 +1,70 @@
1
+ module Astrails
2
+ module Safe
3
+ class Local < Sink
4
+
5
+ protected
6
+
7
+ def active?
8
+ # S3 can't upload from pipe. it needs to know file size, so we must pass through :local
9
+ # will change once we add SSH/FTP sink
10
+ true
11
+ end
12
+
13
+ def path
14
+ @path ||= File.expand_path(expand(@config[:local, :path] || raise(RuntimeError, "missing :local/:path")))
15
+ end
16
+
17
+ def save
18
+ puts "command: #{@backup.command}" if $_VERBOSE
19
+
20
+ # FIXME: probably need to change this to smth like @backup.finalize!
21
+ @backup.path = full_path # need to do it outside DRY_RUN so that it will be avialable for S3 DRY_RUN
22
+
23
+ unless $DRY_RUN
24
+ FileUtils.mkdir_p(path) unless File.directory?(path)
25
+ benchmark = Benchmark.realtime do
26
+ if paging_enabled?
27
+ system "cd #{self.path}; #{@backup.command}>/dev/null"
28
+ else
29
+ system "#{@backup.command}>#{@backup.path}"
30
+ end
31
+ end
32
+ puts("command took " + sprintf("%.2f", benchmark) + " second(s).") if $_VERBOSE
33
+ if paging_enabled?
34
+ target_dir = File.dirname(@backup.path)
35
+ Dir.chdir(target_dir)
36
+ files = Dir.glob("#{File.basename(@backup.path)}*")
37
+ files.collect! { |file| File.join(target_dir, file) }
38
+ @backup.path = files
39
+ puts("Paged files: #{@backup.path.inspect}") if $_VERBOSE
40
+ end
41
+ end
42
+
43
+ end
44
+
45
+ def cleanup
46
+ return unless keep = @config[:keep, :local]
47
+
48
+ puts "listing files #{base}" if $_VERBOSE
49
+
50
+ # TODO: cleanup ALL zero-length files
51
+
52
+ files = Dir["#{base}*"] .
53
+ select{|f| File.file?(f) && File.size(f) > 0} .
54
+ sort
55
+
56
+ cleanup_with_limit(files, keep) do |f|
57
+ puts "removing local file #{f}" if $DRY_RUN || $_VERBOSE
58
+ File.unlink(f) unless $DRY_RUN
59
+ end
60
+ end
61
+
62
+ private
63
+
64
+ def paging_enabled?
65
+ @backup.command.include?("split")
66
+ end
67
+
68
+ end
69
+ end
70
+ end
@@ -0,0 +1,32 @@
1
+ module Astrails
2
+ module Safe
3
+ class Mysqldump < Source
4
+
5
+ def command
6
+ "mysqldump --defaults-extra-file=#{mysql_password_file} #{@config[:options]} #{mysql_skip_tables} #{@id}"
7
+ end
8
+
9
+ def extension; '.sql'; end
10
+
11
+ protected
12
+
13
+ def mysql_password_file
14
+ Astrails::Safe::TmpFile.create("mysqldump") do |file|
15
+ file.puts "[mysqldump]"
16
+ %w/user password socket host port/.each do |k|
17
+ v = @config[k]
18
+ # values are quoted if needed
19
+ file.puts "#{k} = #{v.inspect}" if v
20
+ end
21
+ end
22
+ end
23
+
24
+ def mysql_skip_tables
25
+ if skip_tables = @config[:skip_tables]
26
+ [*skip_tables].map{ |t| "--ignore-table=#{@id}.#{t}" }.join(" ")
27
+ end
28
+ end
29
+
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,36 @@
1
+ module Astrails
2
+ module Safe
3
+ class Pgdump < Source
4
+
5
+ def command
6
+ if @config["password"]
7
+ ENV['PGPASSWORD'] = @config["password"]
8
+ else
9
+ ENV['PGPASSWORD'] = nil
10
+ end
11
+ "pg_dump #{postgres_options} #{postgres_username} #{postgres_host} #{postgres_port} #{@id}"
12
+ end
13
+
14
+ def extension; '.sql'; end
15
+
16
+ protected
17
+
18
+ def postgres_options
19
+ @config[:options]
20
+ end
21
+
22
+ def postgres_host
23
+ @config["host"] && "--host='#{@config["host"]}'"
24
+ end
25
+
26
+ def postgres_port
27
+ @config["port"] && "--port='#{@config["port"]}'"
28
+ end
29
+
30
+ def postgres_username
31
+ @config["user"] && "--username='#{@config["user"]}'"
32
+ end
33
+
34
+ end
35
+ end
36
+ end