backup-agent 1.0.9 → 2.0.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 097cb7442ca1df2a35da304073f1f8eb27a1fc4b
4
- data.tar.gz: b4f5c9696a060679a9791fc7ae0925b4b998833a
3
+ metadata.gz: 4f391904e3b281dd04dc9b8f5983c56a9da2f387
4
+ data.tar.gz: 774815f802892ee300d8e5dc87e34f3f7fe8bfee
5
5
  SHA512:
6
- metadata.gz: 9e2554db02299bc6cc2bf813e8cf1e4b0bf49901860d01f52667e6b173787fd8ef3e376043688977add913e2796ec12e1ad217f76c33119084864190d533ffad
7
- data.tar.gz: bbf52beb7a833e7fe33d47778f6fc84d6b3cf70a6fad797e6727364176e627ab9a5ef8fc1ff07563d081db72fa6bc9c6d673c5e0a7ef420d4920812377094585
6
+ metadata.gz: d8dbb5c1a4593f3dfd7b928a655a7c13f8c4b6ad9a60a3b40838e1af04583b034305d90ffbc2434ae6eaecb793c2e252998d3ffd311d7975d8ee2a994887a446
7
+ data.tar.gz: aeef30268dae8a1955d4045e8a195a20efa31c4fb70cfeacc68770d06f0c1feeb3ac9bd4bbc89b6b67787a1ea34de0593dca44181455039be4f463e782488369
@@ -1,4 +1,4 @@
1
- Copyright (c) 2015 Yaroslav Konoplov
1
+ Copyright (c) 2015, 2016, 2017 Yaroslav Konoplov
2
2
 
3
3
  MIT License
4
4
 
data/README.md CHANGED
@@ -1,20 +1,4 @@
1
- ```ruby
2
- require 'backup-agent'
3
-
4
- storage_config = Backup::S3Config.new do
5
- set :access_key_id, 'xxx'
6
- set :secret_access_key, 'yyy'
7
- set :region, 'eu-central-1'
8
- end
9
-
10
- storage = Backup::S3Storage.new(storage_config, bucket: 'my-backups')
11
-
12
- Backup.perform storage do
13
- set :mysql_host, 'xxx.yyy.xxx.yyy'
14
- end
15
- ```
16
-
17
- ## Gemfile
18
- ```ruby
19
- gem 'backup-agent'
20
- ```
1
+ ### TODO:
2
+ * Update description in gemspec
3
+ * Add ability to specify name with slashes in directory backup task
4
+ * Release v2.1
@@ -1,21 +1,23 @@
1
- # encoding: utf-8
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+
4
+ require File.expand_path("../lib/backup-agent/version", __FILE__)
2
5
 
3
6
  Gem::Specification.new do |s|
4
- s.name = 'backup-agent'
5
- s.version = '1.0.9'
6
- s.authors = ['Yaroslav Konoplov']
7
- s.email = ['yaroslav@inbox.com']
8
- s.summary = 'Easy AWS S3 backup'
9
- s.description = 'Easy AWS S3 backup'
10
- s.homepage = 'http://github.com/yivo/backup-agent'
11
- s.license = 'MIT'
7
+ s.name = "backup-agent"
8
+ s.version = Backup::VERSION
9
+ s.authors = ["Yaroslav Konoplov"]
10
+ s.email = ["eahome00@gmail.com"]
11
+ s.summary = "Easy AWS S3 backup"
12
+ s.description = "Easy AWS S3 backup"
13
+ s.homepage = "http://github.com/yivo/backup-agent"
14
+ s.license = "MIT"
12
15
 
13
- s.executables = `git ls-files -z -- bin/*`.split("\x0").map{ |f| File.basename(f) }
14
16
  s.files = `git ls-files -z`.split("\x0")
15
17
  s.test_files = `git ls-files -z -- {test,spec,features}/*`.split("\x0")
16
- s.require_paths = ['lib']
18
+ s.require_paths = ["lib"]
17
19
 
18
- s.add_dependency 'aws-sdk'
19
- s.add_dependency 'confo-config', '>= 1.0.0'
20
- s.add_dependency 'activesupport', '>= 3.2.0'
20
+ s.add_dependency "aws-sdk", "~> 2"
21
+ s.add_dependency "activesupport", ">= 3.0", "< 6.0"
22
+ s.add_dependency "method-not-implemented", "~> 1.0"
21
23
  end
@@ -1,19 +1,34 @@
1
- puts "Ruby version #{RUBY_VERSION}-p#{RUBY_PATCHLEVEL}"
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
2
3
 
3
- %w( rubygems aws-sdk fileutils confo-config shellwords ).each { |el| require(el) }
4
+ require "fileutils"
5
+ require "tempfile"
6
+ require "shellwords"
7
+ require "open3"
8
+ require "singleton"
9
+ require "aws-sdk"
10
+ require "method-not-implemented"
11
+ require "active_support/core_ext/object/blank"
12
+ require "active_support/core_ext/string/filters"
13
+ require "active_support/core_ext/string/multibyte"
14
+ require "active_support/core_ext/numeric/time"
4
15
 
5
- %w( abstract_storage abstract_storage_config abstract_storage_object
6
- s3_storage s3_config s3_object
7
- features task performer ).each { |el| require_relative("backup-agent/#{el}") }
16
+ ["ruby", "tar", "gzip", "xz", "mysql", "mysqldump"].each do |x|
17
+ puts Open3.capture3(x, "--version")[0...2].map(&:squish).reject(&:blank?).join(' ')
18
+ end
8
19
 
9
- module Backup
10
- class << self
11
- def perform(storage, &block)
12
- Performer.new.perform_backup(storage, Task.new(&block))
13
- end
20
+ $LOAD_PATH << __dir__ unless $LOAD_PATH.include?(__dir__)
14
21
 
15
- def features
16
- @features ||= Features.new
17
- end
18
- end
19
- end
22
+ require "backup-agent/dsl"
23
+ require "backup-agent/credentials"
24
+ require "backup-agent/performer"
25
+
26
+ require "backup-agent/storages"
27
+ require "backup-agent/storages/base"
28
+ require "backup-agent/storages/local"
29
+ require "backup-agent/storages/amazon-s3"
30
+
31
+ require "backup-agent/tasks/directory"
32
+ require "backup-agent/tasks/mysql"
33
+
34
+ include Backup::DSL
@@ -0,0 +1,36 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+
4
+ module Backup
5
+ class Credentials
6
+ include Singleton
7
+
8
+ def initialize
9
+ @groups = {}
10
+ end
11
+
12
+ # Usage: credentials(type: :name)
13
+ def [](pair)
14
+ @groups.fetch(pair.keys[0]).fetch(pair.values[0])
15
+ end
16
+
17
+ # define Class => [:type, :name, arguments...]
18
+ def define(definitions)
19
+ definitions.map do |klass, definition|
20
+ (@groups[definition[0]] ||= {})[definition[1]] = klass.new(*definition.drop(2))
21
+ end
22
+ end
23
+
24
+ def mysql(definitions)
25
+ definitions.map do |name, args|
26
+ define Backup::Tasks::MySQL::Credentials => [:mysql, name, *[args].flatten(1)]
27
+ end.flatten(1)
28
+ end
29
+
30
+ def amazon_s3(definitions)
31
+ definitions.map do |name, args|
32
+ define Backup::Storages::AmazonS3::Credentials => [:amazon_s3, name, *[args].flatten(1)]
33
+ end.flatten(1)
34
+ end
35
+ end
36
+ end
@@ -0,0 +1,104 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+
4
+ module Backup
5
+ module DSL
6
+ def echo(*args)
7
+ puts(*args)
8
+ end
9
+
10
+ def with(environment)
11
+ @current_command_environment = environment&.each_with_object({}) { |(k, v), m| m[k.to_s] = v.to_s }
12
+ yield
13
+ ensure
14
+ remove_instance_variable(:@current_command_environment)
15
+ end
16
+
17
+ def stdin(data, binmode: false)
18
+ @current_command_stdin_data = data
19
+ @current_command_stdin_data_binmode = binmode
20
+ yield
21
+ ensure
22
+ remove_instance_variable(:@current_command_stdin_data)
23
+ remove_instance_variable(:@current_command_stdin_data_binmode)
24
+ end
25
+
26
+ def command(*args)
27
+ returned, msec = measure args.map(&:to_s).join(" ") do
28
+
29
+ if instance_variable_defined?(:@current_command_environment) && @current_command_environment
30
+ args.unshift(@current_command_environment)
31
+ end
32
+
33
+ stdout, stderr, exit_status = \
34
+ if instance_variable_defined?(:@current_command_stdin_data)
35
+ Open3.capture3 *args, \
36
+ stdin_data: @current_command_stdin_data,
37
+ binmode: @current_command_stdin_data_binmode
38
+ else
39
+ Open3.capture3(*args)
40
+ end
41
+
42
+ fail stderr unless exit_status.success?
43
+ # echo stdout
44
+ stdout
45
+ end
46
+ returned
47
+ end
48
+
49
+ def measure(action)
50
+ echo "\n", action
51
+ started = Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_millisecond)
52
+ returned = yield
53
+ finished = Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_millisecond)
54
+ echo "(#{ (finished - started).round(1) }ms)", "\n"
55
+ returned
56
+ end
57
+
58
+ def construct_filename(basename, extension_with_dot = nil)
59
+ [basename.gsub(/[^[[:alnum:]]]/i, "-")
60
+ .gsub(/[-–—]+/, "-")
61
+ .mb_chars.downcase.to_s,
62
+ "--#{Time.now.getutc.strftime("%Y-%m-%d--%H-%M-%S--UTC")}",
63
+ extension_with_dot.to_s.mb_chars.downcase.to_s].join("")
64
+ end
65
+
66
+ def storages(pair = nil, &block)
67
+ if pair
68
+ Backup::Storages.instance[pair]
69
+ elsif block
70
+ Backup::Storages.instance.instance_exec(&block)
71
+ else
72
+ Backup::Storages.instance
73
+ end
74
+ end
75
+
76
+ def credentials(pair = nil, &block)
77
+ if pair
78
+ Backup::Credentials.instance[pair]
79
+ elsif block
80
+ Backup::Credentials.instance.instance_exec(&block)
81
+ else
82
+ Backup::Credentials.instance
83
+ end
84
+ end
85
+
86
+ def backup(to:, &block)
87
+ storages = [to].flatten.map { |h| h.map { |k, v| self.storages(k => v) } }.flatten
88
+ Backup::Performer.new(storages).tap { |performer| performer.instance_eval(&block) }
89
+ end
90
+
91
+ def delete_backups_older_than(x)
92
+ cutoff_timestamp = Time.now.utc.to_i - x
93
+ storages.each do |storage|
94
+ storage.each do |object|
95
+ if object.last_modified.to_i < cutoff_timestamp
96
+ puts "Delete #{object.to_s} from #{storage.to_s}"
97
+ storage.delete(object.id)
98
+ end
99
+ end
100
+ end
101
+ end
102
+ end
103
+ end
104
+
@@ -1,119 +1,29 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+
1
4
  module Backup
2
5
  class Performer
3
- attr_reader :storage, :config
4
-
5
- def perform_backup(storage, config)
6
- @storage = storage
7
- @config = config
8
- @started_at = Time.now.utc
9
- @timestamp = @started_at.strftime('%s - %A %d %B %Y %H:%M')
10
- storage.open
11
- make_tmp_dir
12
- backup_mysql if Backup.features.mysql_installed?
13
- backup_mongodb if Backup.features.mongodb_installed?
14
- backup_directories
15
- backup_files
16
- cleanup_old_backups
17
- ensure
18
- remove_tmp_dir
19
- storage.close
20
- end
21
-
22
- protected
23
-
24
- def backup_directories
25
- config.get(:directories).each do |name, dir|
26
- dir_filename = "#{name}.tar.xz"
27
- dir_fileparam = Shellwords.escape(dir_filename)
28
- cmd = "cd #{dir} && /usr/bin/env XZ_OPT=-9 tar -cJvf #{tmp_path}/#{dir_fileparam} ."
29
- puts "Exec #{cmd}"
30
- system(cmd)
31
- storage.upload("#{@timestamp}/#{dir_filename}", "#{tmp_path}/#{dir_filename}")
32
- end
33
- end
34
-
35
- def backup_files
36
- config.get(:files).each do |name, files|
37
- begin
38
- files_tmp_path = File.join(tmp_path, "#{name}-tmp")
39
- file_bunch_name = "#{name}.tar.xz"
40
- file_bunch_param = Shellwords.escape(file_bunch_name)
41
-
42
- FileUtils.mkdir_p(files_tmp_path)
43
- FileUtils.cp(files.select { |el| File.exists?(el) }, files_tmp_path)
44
-
45
- cmd = "cd #{files_tmp_path} && /usr/bin/env XZ_OPT=-3 tar -cJvf #{tmp_path}/#{file_bunch_param} ."
46
- system(cmd)
47
-
48
- storage.upload("#{@timestamp}/#{file_bunch_name}", "#{tmp_path}/#{file_bunch_name}")
49
- ensure
50
- FileUtils.remove_dir(files_tmp_path)
51
- end
52
- end
53
- end
54
-
55
- def backup_mysql
56
- config.get(:mysql_databases).each do |db|
57
- dump_path = "#{tmp_path}/#{ shell_escape(db) }.sql"
58
- dump_cmd = "mysqldump #{config.get(:mysql_connect)} #{config.get(:mysqldump_options).join(' ')} --databases #{db}"
59
-
60
- exec with_env("#{dump_cmd} > #{dump_path}")
61
- exec with_env("xz --compress -9 --keep --threads=0 --verbose #{dump_path}")
62
-
63
- storage.upload("#{@timestamp}/#{ shell_escape(db) }.sql.xz", "#{dump_path}.xz")
64
- end
6
+ def initialize(storages)
7
+ @storages = storages
65
8
  end
66
9
 
67
- def backup_mongodb
68
- mongo_dump_dir = File.join(tmp_path, 'mongo')
69
- FileUtils.mkdir_p(mongo_dump_dir)
70
-
71
- config.get(:mongo_databases).each do |db|
72
- db_filename = "Mongo Database #{db}.tar.xz"
73
- db_fileparam = Shellwords.escape(db_filename)
74
- dump = with_env "mongodump #{config.get(:mongo_connect)} -d #{db} -o #{mongo_dump_dir}"
75
- cd = "cd #{mongo_dump_dir}/#{db}"
76
- tar = with_env "XZ_OPT=-9 tar -cJvf #{tmp_path}/#{db_fileparam} ."
77
-
78
- puts "Exec #{dump} && #{cd} && #{tar}"
79
- system "#{dump} && #{cd} && #{tar}"
80
-
81
- storage.upload("#{@timestamp}/#{db_filename}", "#{tmp_path}/#{db_filename}")
10
+ # task Task => [:foo, :bar, :baz]
11
+ def task(arg)
12
+ arg.each do |klass, args|
13
+ @storages.each { |storage| klass.new(*args).perform(storage) }
82
14
  end
83
- ensure
84
- FileUtils.remove_dir(mongo_dump_dir)
15
+ nil
85
16
  end
86
17
 
87
- def cleanup_old_backups
88
- cutoff_date = Time.now.utc.to_i - (config.get(:days_to_keep_backups) * 86400)
89
- storage.each do |obj|
90
- obj.delete if obj.last_modified.to_i < cutoff_date
18
+ def mysql(options)
19
+ if Symbol === options[:credentials]
20
+ options[:credentials] = credentials(mysql: options[:credentials])
91
21
  end
22
+ task Backup::Tasks::MySQL => [options]
92
23
  end
93
24
 
94
- def make_tmp_dir
95
- FileUtils.mkdir_p(tmp_path)
96
- end
97
-
98
- def remove_tmp_dir
99
- FileUtils.remove_dir(tmp_path)
100
- end
101
-
102
- def tmp_path
103
- "/tmp/backup-agent-#{@started_at.strftime('%d-%m-%Y-%H:%M')}"
104
- end
105
-
106
- def with_env(cmd)
107
- "/usr/bin/env #{cmd}"
108
- end
109
-
110
- def shell_escape(x)
111
- Shellwords.escape(x)
112
- end
113
-
114
- def exec(cmd)
115
- puts "Exec #{cmd}"
116
- system cmd
25
+ def directory(path, options = {})
26
+ task Backup::Tasks::Directory => [path, options]
117
27
  end
118
28
  end
119
29
  end
@@ -0,0 +1,42 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+
4
+ module Backup
5
+ class Storages
6
+ include Singleton
7
+ include Enumerable
8
+
9
+ def initialize
10
+ @groups = {}
11
+ end
12
+
13
+ def [](pair)
14
+ @groups.fetch(pair.keys[0]).fetch(pair.values[0])
15
+ end
16
+
17
+ def each
18
+ @groups.each do |type, storages|
19
+ storages.each { |name, storage| yield storage, type, name }
20
+ end
21
+ end
22
+
23
+ # register AmazonS3 => [:amazon_s3, :default, storage constructor arguments...]
24
+ def register(arg)
25
+ arg.map do |klass, rest|
26
+ (@groups[rest[0]] ||= {})[rest[1]] = klass.new(*rest.drop(2))
27
+ end
28
+ end
29
+
30
+ def local(definitions)
31
+ definitions.map do |name, args|
32
+ register Backup::Storages::Local => [:local, name, *[args].flatten(1)]
33
+ end.flatten(1)
34
+ end
35
+
36
+ def amazon_s3(definitions)
37
+ definitions.map do |name, options|
38
+ register Backup::Storages::AmazonS3 => [:amazon_s3, name, options.merge(credentials: credentials[amazon_s3: name])]
39
+ end.flatten(1)
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,51 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+
4
+ #
5
+ # Storage based on Amazon S3
6
+ #
7
+ class Backup::Storages
8
+ class AmazonS3 < Backup::Storages::Base
9
+ def initialize(region:, bucket:, credentials:)
10
+ @aws_s3_ruby_credentials = Aws::Credentials.new(credentials.access_key_id, credentials.secret_access_key)
11
+ @aws_s3_ruby_resource = Aws::S3::Resource.new(region: region, credentials: @aws_s3_ruby_credentials)
12
+ @aws_s3_ruby_bucket = @aws_s3_ruby_resource.bucket(bucket)
13
+ end
14
+
15
+ def store(id, file_to_upload)
16
+ aws_s3_ruby_object = @aws_s3_ruby_bucket.object(id)
17
+ aws_s3_ruby_object.upload_file(file_to_upload)
18
+ Backup::Storages::AmazonS3::Object.new(self, @aws_s3_ruby_bucket, aws_s3_ruby_object.key)
19
+ end
20
+
21
+ def delete(id)
22
+ @aws_s3_ruby_bucket.object(id).delete
23
+ end
24
+
25
+ def each
26
+ @aws_s3_ruby_bucket.objects.each do |aws_s3_ruby_object|
27
+ yield Backup::Storages::AmazonS3::Object.new(self, @aws_s3_ruby_bucket, aws_s3_ruby_object.key)
28
+ end
29
+ end
30
+
31
+ class Credentials
32
+ attr_reader :access_key_id, :secret_access_key
33
+
34
+ def initialize(access_key_id:, secret_access_key:)
35
+ @access_key_id = access_key_id
36
+ @secret_access_key = secret_access_key
37
+ end
38
+ end
39
+
40
+ class Object < Backup::Storages::Base::Object
41
+ def initialize(storage, aws_s3_ruby_bucket, id)
42
+ super(storage, id)
43
+ @aws_s3_ruby_bucket = aws_s3_ruby_bucket
44
+ end
45
+
46
+ def last_modified
47
+ @aws_s3_ruby_bucket.object(id).last_modified.utc
48
+ end
49
+ end
50
+ end
51
+ end
@@ -0,0 +1,38 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+
4
+ #
5
+ # Base stuff for storages
6
+ #
7
+ class Backup::Storages
8
+ class Base
9
+ def store(id, path)
10
+ method_not_implemented
11
+ end
12
+
13
+ def delete(id)
14
+ method_not_implemented
15
+ end
16
+
17
+ def each
18
+ method_not_implemented
19
+ end
20
+
21
+ class Object
22
+ attr_reader :storage, :id
23
+
24
+ def initialize(storage, id)
25
+ @storage = storage
26
+ @id = id
27
+ end
28
+
29
+ def last_modified
30
+ method_not_implemented
31
+ end
32
+
33
+ def to_s
34
+ id
35
+ end
36
+ end
37
+ end
38
+ end
@@ -0,0 +1,37 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+
4
+ #
5
+ # Storage based on some directory in local filesystem
6
+ #
7
+ class Backup::Storages
8
+ class Local < Backup::Storages::Base
9
+ attr_reader :directory
10
+
11
+ def initialize(directory:)
12
+ @directory = directory.gsub(/\/*\z/, "") # Ensure trailing slash
13
+ end
14
+
15
+ def store(relative_path, file_to_write)
16
+ FileUtils.mkdir_p(directory)
17
+ FileUtils.cp_r(file_to_write, File.join(directory, relative_path))
18
+ Backup::Storages::Local::Object.new(self, relative_path)
19
+ end
20
+
21
+ def delete(relative_path)
22
+ FileUtils.rm_f(File.join(directory, relative_path))
23
+ end
24
+
25
+ def each
26
+ Dir.glob File.join(directory, "**", "*") do |path|
27
+ yield Backup::Storages::Local::Object.new(self, path[directory.size+1..-1])
28
+ end
29
+ end
30
+
31
+ class Object < Backup::Storages::Base::Object
32
+ def last_modified
33
+ File.mtime(File.join(storage.directory, id)).utc
34
+ end
35
+ end
36
+ end
37
+ end
@@ -0,0 +1,61 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+
4
+ module Backup::Tasks
5
+ class Directory
6
+ def initialize(path, options = {})
7
+ @path = path
8
+ # @name = options.fetch(:name)
9
+ @options = options
10
+
11
+ if options[:compressor]
12
+ @compressor = Symbol === options[:compressor] ? { type: options[:compressor] } : options[:compressor]
13
+ end
14
+ end
15
+
16
+ def perform(storage)
17
+ return if !File.readable?(@path) || !File.directory?(@path)
18
+
19
+ @options.fetch(:name, @path).tap do |x|
20
+ @filename = add_extension(construct_filename(File.basename(x, ".*")) + File.extname(x))
21
+ end
22
+
23
+ Tempfile.open do |tempfile|
24
+ with compression_environment do
25
+ command "tar", tar_flags, tempfile.path, "-C", @path, "."
26
+ end
27
+ storage.store(@filename, tempfile.path)
28
+ end
29
+ end
30
+
31
+ def compression_environment
32
+ case @compressor&.fetch(:type)
33
+ when :xz then { XZ_OPT: "-#{@compressor.fetch(:level, 3)}" }
34
+ when :gzip then { GZIP: "-#{@compressor.fetch(:level, 3)}" }
35
+ end
36
+ end
37
+
38
+ def tar_flags
39
+ flags = ["c"]
40
+
41
+ flags << "h" if @options.fetch(:symlinks, :follow) == :follow
42
+
43
+ case @compressor&.fetch(:type)
44
+ when :xz then flags << "J"
45
+ when :gzip then flags << "z"
46
+ end
47
+
48
+ flags << "v"
49
+ flags << "f"
50
+ flags.join("")
51
+ end
52
+
53
+ def add_extension(name)
54
+ case @compressor&.fetch(:type)
55
+ when :xz then name + ".tar.xz"
56
+ when :gzip then name + ".tar.gz"
57
+ else name + ".tar"
58
+ end
59
+ end
60
+ end
61
+ end
@@ -0,0 +1,84 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+
4
+ module Backup::Tasks
5
+ class MySQL
6
+ attr_reader :host, :credentials, :options
7
+
8
+ def initialize(host:, credentials:, databases: :all, **options)
9
+ @host = host
10
+ @credentials = credentials
11
+ @databases = databases
12
+ @options = options
13
+ end
14
+
15
+ def perform(storage)
16
+ databases.map do |db|
17
+ Tempfile.open do |tempfile|
18
+ command("mysqldump", *credentials.to_options, *host_options, *dump_options, "--databases", db).tap do |dump_sql|
19
+ stdin dump_sql do
20
+ command("xz", "--compress", "-9", "--format=xz", "--keep", "--threads=0", "--verbose", "--check=sha256").tap do |dump_xz|
21
+ tempfile.write(dump_xz)
22
+ storage.store(construct_filename(db, ".sql.xz"), tempfile.path)
23
+ end
24
+ end
25
+ end
26
+ end
27
+ end
28
+ end
29
+
30
+ def databases
31
+ if @databases == :all
32
+ command("mysql", *credentials.to_options, *host_options, "-e", "SHOW DATABASES;")
33
+ .split("\n")
34
+ .reject { |el| el =~ /database|information_schema|mysql|performance_schema|test|phpmyadmin|pma|sys/i }
35
+ else
36
+ [@databases].flatten.compact
37
+ end
38
+ end
39
+
40
+ def dump_options
41
+ @options.fetch(:dump_options) do
42
+ %W( --add-drop-table
43
+ --add-locks
44
+ --allow-keywords
45
+ --comments
46
+ --complete-insert
47
+ --create-options
48
+ --disable-keys
49
+ --extended-insert
50
+ --lock-tables
51
+ --quick
52
+ --quote-names
53
+ --routines
54
+ --set-charset
55
+ --dump-date
56
+ --tz-utc
57
+ --verbose )
58
+ end
59
+ end
60
+
61
+ def host_options
62
+ ["--host=#{@host}"]
63
+ end
64
+
65
+ class Credentials
66
+ def initialize(user:, password:)
67
+ @user = user
68
+ @password = password
69
+ end
70
+
71
+ def stringify
72
+ "--user #{@user} #{stringify_password}"
73
+ end
74
+
75
+ def stringify_password
76
+ @password.nil? || @password.empty? ? "" : "--password=#{@password}"
77
+ end
78
+
79
+ def to_options
80
+ ["--user", @user, stringify_password]
81
+ end
82
+ end
83
+ end
84
+ end
@@ -0,0 +1,6 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+
4
+ module Backup
5
+ VERSION = "2.0.0"
6
+ end
data/test.rb ADDED
@@ -0,0 +1,20 @@
1
+ # encoding: UTF-8
2
+ # frozen_string_literal: true
3
+
4
+ require File.expand_path('../lib/backup-agent', __FILE__)
5
+
6
+ credentials do
7
+ mysql localhost: { user: 'root', password: 'root' }
8
+ amazon_s3 default: { access_key_id: 'AKIAIJDU5HT7XT6HRINA', secret_access_key: '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' }
9
+ end
10
+
11
+ storages do
12
+ amazon_s3 default: { region: 'eu-central-1', bucket: 'perseidev-backups' }
13
+ end
14
+
15
+ backup to: { amazon_s3: :default, local: :default } do
16
+ mysql host: '127.0.0.1', credentials: :localhost, databases: :all
17
+ Dir['/var/www/*/current/public'].each { |dir| directory dir }
18
+ end
19
+
20
+ delete_backups_older_than 30.days
metadata CHANGED
@@ -1,80 +1,86 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: backup-agent
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.9
4
+ version: 2.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Yaroslav Konoplov
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-11-29 00:00:00.000000000 Z
11
+ date: 2017-04-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aws-sdk
15
15
  requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - ">="
17
+ - - "~>"
18
18
  - !ruby/object:Gem::Version
19
- version: '0'
19
+ version: '2'
20
20
  type: :runtime
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
- - - ">="
24
+ - - "~>"
25
25
  - !ruby/object:Gem::Version
26
- version: '0'
26
+ version: '2'
27
27
  - !ruby/object:Gem::Dependency
28
- name: confo-config
28
+ name: activesupport
29
29
  requirement: !ruby/object:Gem::Requirement
30
30
  requirements:
31
31
  - - ">="
32
32
  - !ruby/object:Gem::Version
33
- version: 1.0.0
33
+ version: '3.0'
34
+ - - "<"
35
+ - !ruby/object:Gem::Version
36
+ version: '6.0'
34
37
  type: :runtime
35
38
  prerelease: false
36
39
  version_requirements: !ruby/object:Gem::Requirement
37
40
  requirements:
38
41
  - - ">="
39
42
  - !ruby/object:Gem::Version
40
- version: 1.0.0
43
+ version: '3.0'
44
+ - - "<"
45
+ - !ruby/object:Gem::Version
46
+ version: '6.0'
41
47
  - !ruby/object:Gem::Dependency
42
- name: activesupport
48
+ name: method-not-implemented
43
49
  requirement: !ruby/object:Gem::Requirement
44
50
  requirements:
45
- - - ">="
51
+ - - "~>"
46
52
  - !ruby/object:Gem::Version
47
- version: 3.2.0
53
+ version: '1.0'
48
54
  type: :runtime
49
55
  prerelease: false
50
56
  version_requirements: !ruby/object:Gem::Requirement
51
57
  requirements:
52
- - - ">="
58
+ - - "~>"
53
59
  - !ruby/object:Gem::Version
54
- version: 3.2.0
60
+ version: '1.0'
55
61
  description: Easy AWS S3 backup
56
62
  email:
57
- - yaroslav@inbox.com
63
+ - eahome00@gmail.com
58
64
  executables: []
59
65
  extensions: []
60
66
  extra_rdoc_files: []
61
67
  files:
62
68
  - ".gitignore"
63
- - Gemfile
64
- - LICENSE.txt
69
+ - LICENSE
65
70
  - README.md
66
- - Rakefile
67
71
  - backup-agent.gemspec
68
72
  - lib/backup-agent.rb
69
- - lib/backup-agent/abstract_storage.rb
70
- - lib/backup-agent/abstract_storage_config.rb
71
- - lib/backup-agent/abstract_storage_object.rb
72
- - lib/backup-agent/features.rb
73
+ - lib/backup-agent/credentials.rb
74
+ - lib/backup-agent/dsl.rb
73
75
  - lib/backup-agent/performer.rb
74
- - lib/backup-agent/s3_config.rb
75
- - lib/backup-agent/s3_object.rb
76
- - lib/backup-agent/s3_storage.rb
77
- - lib/backup-agent/task.rb
76
+ - lib/backup-agent/storages.rb
77
+ - lib/backup-agent/storages/amazon-s3.rb
78
+ - lib/backup-agent/storages/base.rb
79
+ - lib/backup-agent/storages/local.rb
80
+ - lib/backup-agent/tasks/directory.rb
81
+ - lib/backup-agent/tasks/mysql.rb
82
+ - lib/backup-agent/version.rb
83
+ - test.rb
78
84
  homepage: http://github.com/yivo/backup-agent
79
85
  licenses:
80
86
  - MIT
@@ -95,7 +101,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
95
101
  version: '0'
96
102
  requirements: []
97
103
  rubyforge_project:
98
- rubygems_version: 2.5.1
104
+ rubygems_version: 2.6.8
99
105
  signing_key:
100
106
  specification_version: 4
101
107
  summary: Easy AWS S3 backup
data/Gemfile DELETED
@@ -1,4 +0,0 @@
1
- source 'https://rubygems.org'
2
-
3
- # Specify your gem's dependencies in confo.gemspec
4
- gemspec
data/Rakefile DELETED
@@ -1 +0,0 @@
1
- require 'bundler/gem_tasks'
@@ -1,32 +0,0 @@
1
- module Backup
2
- class AbstractStorage
3
- include Enumerable
4
-
5
- attr_reader :config, :env
6
-
7
- def initialize(config, env = {})
8
- @config = config
9
- @env = env
10
- end
11
-
12
- def open
13
-
14
- end
15
-
16
- def close
17
-
18
- end
19
-
20
- def upload(key, path)
21
-
22
- end
23
-
24
- def delete(key)
25
-
26
- end
27
-
28
- def each
29
-
30
- end
31
- end
32
- end
@@ -1,5 +0,0 @@
1
- module Backup
2
- class AbstractStorageConfig < Confo::Config
3
-
4
- end
5
- end
@@ -1,19 +0,0 @@
1
- module Backup
2
- class AbstractStorageObject
3
- attr_reader :storage, :key, :env
4
-
5
- def initialize(storage, key, env = {})
6
- @storage = storage
7
- @key = key
8
- @env = env
9
- end
10
-
11
- def last_modified
12
-
13
- end
14
-
15
- def delete
16
-
17
- end
18
- end
19
- end
@@ -1,30 +0,0 @@
1
- module Backup
2
- class Features
3
- def initialize
4
- check_mysql
5
- check_mongodb
6
- end
7
-
8
- def check_mysql
9
- if @mysql_check.nil?
10
- @mysql_check = system('/usr/bin/env mysql --version') ? true : (puts('MySQL is not installed'); false)
11
- end
12
- @mysql_check
13
- end
14
-
15
- def check_mongodb
16
- if @mongodb_check.nil?
17
- @mongodb_check = system('/usr/bin/env mongod --version') ? true : (puts('MongoDB is not installed'))
18
- end
19
- @mongodb_check
20
- end
21
-
22
- def mysql_installed?
23
- !!@mysql_check
24
- end
25
-
26
- def mongodb_installed?
27
- !!@mongodb_check
28
- end
29
- end
30
- end
@@ -1,11 +0,0 @@
1
- module Backup
2
- class S3Config < AbstractStorageConfig
3
- def initialize(*)
4
- set :access_key_id, nil
5
- set :secret_access_key, nil
6
- set :region, nil
7
- set :bucket, nil
8
- super
9
- end
10
- end
11
- end
@@ -1,17 +0,0 @@
1
- module Backup
2
- class S3Object < AbstractStorageObject
3
- def initialize(*)
4
- super
5
- @object = env.fetch(:object)
6
- @bucket = env.fetch(:bucket)
7
- end
8
-
9
- def last_modified
10
- @object.last_modified
11
- end
12
-
13
- def delete
14
- storage.delete(key)
15
- end
16
- end
17
- end
@@ -1,45 +0,0 @@
1
- module Backup
2
- class S3Storage < AbstractStorage
3
-
4
- def initialize(*)
5
- super
6
- @bucket_name = env.fetch(:bucket)
7
- end
8
-
9
- def s3
10
- @s3 ||= begin
11
- Aws.config.update(
12
- region: config.region,
13
- credentials: Aws::Credentials.new(config.access_key_id, config.secret_access_key)
14
- )
15
- Aws::S3::Resource.new
16
- end
17
- end
18
-
19
- def bucket
20
- s3.bucket(@bucket_name)
21
- end
22
-
23
- def open
24
- s3
25
- end
26
-
27
- def upload(key, path)
28
- bucket.object(key).upload_file(path)
29
- end
30
-
31
- def delete(key)
32
- bucket.object(key).delete
33
- end
34
-
35
- def object(key)
36
- S3Object.new(self, key, object: bucket.object(key), bucket: bucket)
37
- end
38
-
39
- def each
40
- bucket.objects.each do |s3_obj|
41
- yield S3Object.new(self, s3_obj.key, object: s3_obj, bucket: bucket)
42
- end
43
- end
44
- end
45
- end
@@ -1,66 +0,0 @@
1
- module Backup
2
- class Task < Confo::Config
3
- def initialize(*)
4
- set :mysql_user, 'root'
5
- set :mysql_password, 'root'
6
- set :mysql_host, 'localhost'
7
- set :mysql_databases, -> do
8
- `/usr/bin/env mysql #{get(:mysql_connect)} -e "SHOW DATABASES;"`
9
- .split("\n")
10
- .reject { |el| el =~ /Database|information_schema|mysql|performance_schema|test|phpmyadmin/ }
11
- end
12
-
13
- set :mysqldump_options, %w(
14
- --add-drop-database
15
- --add-drop-table
16
- --add-locks
17
- --allow-keywords
18
- --comments
19
- --complete-insert
20
- --create-options
21
- --debug-check
22
- --debug-info
23
- --extended-insert
24
- --flush-privileges
25
- --insert-ignore
26
- --lock-tables
27
- --quick
28
- --quote-names
29
- --set-charset
30
- --dump-date
31
- --secure-auth
32
- --tz-utc
33
- --disable-keys )
34
-
35
- set :mysql_connect, -> do
36
- pass = get(:mysql_password)
37
- pass_param = pass && !pass.empty? ? "--password=#{pass}" : ''
38
- "--user #{get(:mysql_user)} #{pass_param} --host=#{get(:mysql_host)}"
39
- end
40
-
41
- set :mongo_databases, -> do
42
- if `/usr/bin/env mongo --eval "db.getMongo().getDBNames()"` =~ /connecting to: (.*)/m
43
- $1.split(/[\n,]/).reject(&:empty?)
44
- else
45
- []
46
- end
47
- end
48
-
49
- set :mongo_host, 'localhost'
50
- set :mongo_connect, -> { "-h #{get(:mongo_host)}" }
51
-
52
- set :directories, -> {
53
- Dir['/var/www/*'].each_with_object({}) do |el, memo|
54
- if Dir.exists?(File.join(el, 'current/public/uploads'))
55
- memo["Uploads #{File.basename(el)}"] = File.join(el, 'current/public/uploads')
56
- end
57
- end
58
- }
59
-
60
- set :files, {}
61
-
62
- set :days_to_keep_backups, 30
63
- super
64
- end
65
- end
66
- end