bibliotech 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/bin/bibliotech +5 -0
- data/doc/example_config_file.yml +58 -0
- data/doc/todo.txt +19 -0
- data/lib/bibliotech/application.rb +95 -0
- data/lib/bibliotech/backups/file_record.rb +16 -0
- data/lib/bibliotech/backups/prune_list.rb +58 -0
- data/lib/bibliotech/backups/pruner.rb +71 -0
- data/lib/bibliotech/backups/scheduler.rb +49 -0
- data/lib/bibliotech/builders/database.rb +25 -0
- data/lib/bibliotech/builders/file.rb +75 -0
- data/lib/bibliotech/builders/gzip.rb +51 -0
- data/lib/bibliotech/builders/mysql.rb +35 -0
- data/lib/bibliotech/builders/postgres.rb +37 -0
- data/lib/bibliotech/builders.rb +43 -0
- data/lib/bibliotech/cli.rb +24 -0
- data/lib/bibliotech/command_generator.rb +86 -0
- data/lib/bibliotech/command_runner.rb +36 -0
- data/lib/bibliotech/compression/bzip2.rb +6 -0
- data/lib/bibliotech/compression/gzip.rb +6 -0
- data/lib/bibliotech/compression/sevenzip.rb +5 -0
- data/lib/bibliotech/compression.rb +35 -0
- data/lib/bibliotech/config.rb +269 -0
- data/lib/bibliotech/rake_lib.rb +82 -0
- data/lib/bibliotech.rb +7 -0
- data/spec/bibliotech/backup_pruner_spec.rb +58 -0
- data/spec/bibliotech/backup_scheduler_spec.rb +108 -0
- data/spec/bibliotech/command_generator/mysql_spec.rb +170 -0
- data/spec/bibliotech/command_generator/postgres_spec.rb +180 -0
- data/spec/bibliotech/command_generator_spec.rb +99 -0
- data/spec/bibliotech/command_runner_spec.rb +50 -0
- data/spec/bibliotech/compression/bunzip2_spec.rb +9 -0
- data/spec/bibliotech/compression/bzip2_spec.rb +9 -0
- data/spec/bibliotech/compression/gzip_spec.rb +9 -0
- data/spec/bibliotech/compression/sevenzip_spec.rb +9 -0
- data/spec/bibliotech/compression_spec.rb +28 -0
- data/spec/bibliotech/config_spec.rb +151 -0
- data/spec/gem_test_suite.rb +0 -0
- data/spec/spec_helper.rb +2 -0
- metadata +150 -0
@@ -0,0 +1,86 @@
|
|
1
|
+
require 'caliph'
|
2
|
+
|
3
|
+
require 'bibliotech/builders/gzip'
|
4
|
+
require 'bibliotech/builders/postgres'
|
5
|
+
require 'bibliotech/builders/mysql'
|
6
|
+
|
7
|
+
module BiblioTech
|
8
|
+
class CommandGenerator
|
9
|
+
|
10
|
+
include Caliph::CommandLineDSL
|
11
|
+
|
12
|
+
attr_accessor :config
|
13
|
+
|
14
|
+
def initialize(config)
|
15
|
+
@config = config
|
16
|
+
end
|
17
|
+
|
18
|
+
def export(options = nil)
|
19
|
+
options = config.merge(options || {})
|
20
|
+
command = cmd
|
21
|
+
command = Builders::Export.for(options).go(command)
|
22
|
+
Builders::FileOutput.for(options).go(command)
|
23
|
+
end
|
24
|
+
|
25
|
+
def import(options = nil)
|
26
|
+
options = config.merge(options || {})
|
27
|
+
command = cmd()
|
28
|
+
command = Builders::Import.for(options).go(command)
|
29
|
+
Builders::FileInput.for(options).go(command)
|
30
|
+
end
|
31
|
+
|
32
|
+
def fetch(remote, filename, options = nil)
|
33
|
+
options = config.merge(options || {})
|
34
|
+
cmd("scp") do |cmd|
|
35
|
+
options.optionally{ cmd.options << "-i #{options.id_file(remote)}" }
|
36
|
+
cmd.options << options.remote_file(remote, filename)
|
37
|
+
cmd.options << options.local_file(filename)
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
def push(remote, filename, options = nil)
|
42
|
+
options = config.merge(options || {})
|
43
|
+
cmd("scp") do |cmd|
|
44
|
+
cmd.options << options.local_file(filename)
|
45
|
+
cmd.options << options.remote_file(remote, filename)
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
def remote_cli(remote, *command_options)
|
50
|
+
options = {}
|
51
|
+
if command_options.last.is_a? Hash
|
52
|
+
options = command_options.pop
|
53
|
+
end
|
54
|
+
options = config.merge(options)
|
55
|
+
command_on_remote = cmd("cd") do |cmd|
|
56
|
+
cmd.options << options.root_dir_on(remote)
|
57
|
+
end & cmd("bundle", "exec", "bibliotech", *command_options)
|
58
|
+
cmd("ssh") do |cmd|
|
59
|
+
cmd.options << "-n" #because we're not going to be doing any input
|
60
|
+
options.optionally{ cmd.options << "-i #{options.id_file(remote)}" }
|
61
|
+
options.optionally{ cmd.options << "-l #{options.remote_user(remote)}" }
|
62
|
+
|
63
|
+
cmd.options << options.remote_host(remote)
|
64
|
+
|
65
|
+
options.optionally{ cmd.options << "-p #{options.remote_port(remote)}" } #ok
|
66
|
+
|
67
|
+
|
68
|
+
options.optionally do
|
69
|
+
options.ssh_options(remote).each do |opt|
|
70
|
+
cmd.options << "-o #{opt}"
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end - escaped_command(command_on_remote)
|
74
|
+
end
|
75
|
+
|
76
|
+
def wipe()
|
77
|
+
raise NotImplementedError
|
78
|
+
end
|
79
|
+
def delete()
|
80
|
+
raise NotImplementedError
|
81
|
+
end
|
82
|
+
def create()
|
83
|
+
raise NotImplementedError
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
module BiblioTech
|
2
|
+
class CommandRunner
|
3
|
+
|
4
|
+
attr_reader :generator
|
5
|
+
attr_accessor :shell
|
6
|
+
|
7
|
+
def initialize(config)
|
8
|
+
@config = config
|
9
|
+
@generator = CommandGenerator.for(config.db_config)
|
10
|
+
@shell = Caliph.new
|
11
|
+
end
|
12
|
+
|
13
|
+
def export(filepath)
|
14
|
+
run decorate_for_compression(generator, filepath).export(filepath)
|
15
|
+
end
|
16
|
+
|
17
|
+
def import(filepath)
|
18
|
+
run decorate_for_compression(generator, filepath).import(filepath)
|
19
|
+
end
|
20
|
+
|
21
|
+
#def wipe()
|
22
|
+
#tables = system(CommandGenerator.new.fetch_tables(@config))
|
23
|
+
#filter_tables_for_wipeable(tables)
|
24
|
+
#system(CommandGenerator.new.wipe_tables(@config,tables))
|
25
|
+
#end
|
26
|
+
def run(command)
|
27
|
+
@shell.run(command)
|
28
|
+
end
|
29
|
+
|
30
|
+
private
|
31
|
+
def decorate_for_compression(generator, filepath)
|
32
|
+
Compression.for(filepath, generator)
|
33
|
+
end
|
34
|
+
|
35
|
+
end
|
36
|
+
end
|
@@ -0,0 +1,35 @@
|
|
1
|
+
module BiblioTech
|
2
|
+
class Compression
|
3
|
+
|
4
|
+
class << self
|
5
|
+
def register(adapter_pattern, klass)
|
6
|
+
Compression.registry[adapter_pattern] = klass
|
7
|
+
end
|
8
|
+
|
9
|
+
def registry
|
10
|
+
@adapter_registry ||={}
|
11
|
+
end
|
12
|
+
|
13
|
+
def supported_adapters
|
14
|
+
@adapter_registry.keys
|
15
|
+
end
|
16
|
+
|
17
|
+
def for(filepath, generator)
|
18
|
+
_, klass = @adapter_registry.find{ |pattern, klass|
|
19
|
+
filepath =~ pattern
|
20
|
+
}
|
21
|
+
return generator if klass.nil?
|
22
|
+
klass.new(generator)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
def initialize(generator)
|
27
|
+
@generator = generator
|
28
|
+
end
|
29
|
+
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
require 'bibliotech/compression/gzip'
|
34
|
+
require 'bibliotech/compression/sevenzip'
|
35
|
+
require 'bibliotech/compression/bzip2'
|
@@ -0,0 +1,269 @@
|
|
1
|
+
module BiblioTech
|
2
|
+
class Config
|
3
|
+
class MissingConfig < KeyError; end
|
4
|
+
|
5
|
+
CONFIG_STEPS = {
|
6
|
+
:database_config_file => [ "database_config_file" ] ,
|
7
|
+
:database_config_env => [ "database_config_env" ] ,
|
8
|
+
:host => [ "host" ] ,
|
9
|
+
:port => [ "port" ] ,
|
10
|
+
:user => [ "user" ] ,
|
11
|
+
:rsa_files => [ "rsa_files" ] ,
|
12
|
+
:ssh_options => [ "ssh_options" ] ,
|
13
|
+
:file => [ "backups" , "file" ] ,
|
14
|
+
:filename => [ "backups" , "filename" ] ,
|
15
|
+
:backup_path => [ "backups" , "dir" ] ,
|
16
|
+
:root_path => [ "path" ] ,
|
17
|
+
:fetch_dir => [ "fetched_dir" ] ,
|
18
|
+
:compressor => [ "backups" , "compress" ] ,
|
19
|
+
:prune_schedule => [ "backups" , "keep" ] ,
|
20
|
+
:backup_name => [ "backups" , "prefix" ] ,
|
21
|
+
:backup_frequency => [ "backups" , "frequency" ] ,
|
22
|
+
:db_adapter => [ "database_config" , "adapter" ] ,
|
23
|
+
:db_host => [ "database_config" , "host" ] ,
|
24
|
+
:db_port => [ "database_config" , "port" ] ,
|
25
|
+
:db_database => [ "database_config" , "database" ] ,
|
26
|
+
:db_username => [ "database_config" , "username" ] ,
|
27
|
+
:db_password => [ "database_config" , "password" ] ,
|
28
|
+
}
|
29
|
+
|
30
|
+
def initialize(valise)
|
31
|
+
@valise = valise
|
32
|
+
end
|
33
|
+
|
34
|
+
attr_reader :valise
|
35
|
+
attr_writer :hash
|
36
|
+
|
37
|
+
def hash
|
38
|
+
@hash ||= stringify_keys(valise.contents("config.yaml"))
|
39
|
+
end
|
40
|
+
|
41
|
+
def stringify_keys(hash) # sym -> string
|
42
|
+
hash.keys.each do |key|
|
43
|
+
if key.is_a?(Symbol)
|
44
|
+
hash[key.to_s] = hash.delete(key)
|
45
|
+
end
|
46
|
+
if hash[key.to_s].is_a?(Hash)
|
47
|
+
hash[key.to_s] = stringify_keys(hash[key.to_s])
|
48
|
+
end
|
49
|
+
end
|
50
|
+
hash
|
51
|
+
end
|
52
|
+
|
53
|
+
def merge(other_hash)
|
54
|
+
self.class.new(valise).tap do |newbie|
|
55
|
+
newbie.hash = hash.merge(stringify_keys(other_hash))
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
def steps_for(key)
|
60
|
+
CONFIG_STEPS.fetch(key)
|
61
|
+
end
|
62
|
+
|
63
|
+
def optional(&block)
|
64
|
+
yield
|
65
|
+
rescue MissingConfig
|
66
|
+
end
|
67
|
+
alias optionally optional
|
68
|
+
|
69
|
+
def extract(*steps_chain)
|
70
|
+
steps_chain.each do |steps|
|
71
|
+
begin
|
72
|
+
return steps.inject(hash) do |hash, step|
|
73
|
+
raise MissingConfig if hash.nil?
|
74
|
+
hash.fetch(step)
|
75
|
+
end
|
76
|
+
rescue KeyError
|
77
|
+
end
|
78
|
+
end
|
79
|
+
raise MissingConfig, "No value configured at any of: #{steps_chain.map{|steps| steps.join(">")}}"
|
80
|
+
end
|
81
|
+
|
82
|
+
def local
|
83
|
+
extract(["local"])
|
84
|
+
end
|
85
|
+
|
86
|
+
def remote
|
87
|
+
extract(["remote"])
|
88
|
+
end
|
89
|
+
|
90
|
+
def local_get(key)
|
91
|
+
steps = steps_for(key)
|
92
|
+
steps_chain =
|
93
|
+
begin
|
94
|
+
[steps, [local] + steps]
|
95
|
+
rescue MissingConfig
|
96
|
+
[steps]
|
97
|
+
end
|
98
|
+
extract(*steps_chain)
|
99
|
+
end
|
100
|
+
|
101
|
+
def remote_get(remote_name, key)
|
102
|
+
steps = [remote_name] + steps_for(key)
|
103
|
+
extract(steps, ["remotes"] + steps)
|
104
|
+
end
|
105
|
+
|
106
|
+
def ssh_options(for_remote)
|
107
|
+
steps = steps_for(:ssh_options) + [for_remote]
|
108
|
+
steps_chain =
|
109
|
+
begin
|
110
|
+
[steps, [local] + steps]
|
111
|
+
rescue MissingConfig
|
112
|
+
[steps]
|
113
|
+
end
|
114
|
+
extract(steps_chain)
|
115
|
+
end
|
116
|
+
|
117
|
+
def id_file(for_remote)
|
118
|
+
steps = steps_for(:rsa_files) + [for_remote]
|
119
|
+
steps_chain =
|
120
|
+
begin
|
121
|
+
[steps, [local] + steps]
|
122
|
+
rescue MissingConfig
|
123
|
+
[steps]
|
124
|
+
end
|
125
|
+
extract(steps_chain)
|
126
|
+
end
|
127
|
+
|
128
|
+
def local_path
|
129
|
+
local_get(:fetch_dir)
|
130
|
+
rescue MissingConfig
|
131
|
+
local_get(:root_path)
|
132
|
+
end
|
133
|
+
|
134
|
+
def local_file(filename)
|
135
|
+
File::join(local_path, filename)
|
136
|
+
end
|
137
|
+
|
138
|
+
def root_dir_on(remote)
|
139
|
+
remote_get(remote, :root_path)
|
140
|
+
end
|
141
|
+
|
142
|
+
def remote_host(remote)
|
143
|
+
remote_get(remote, :host)
|
144
|
+
end
|
145
|
+
|
146
|
+
def remote_port(remote)
|
147
|
+
remote_get(remote, :port)
|
148
|
+
end
|
149
|
+
|
150
|
+
def remote_user(remote)
|
151
|
+
remote_get(remote, :user)
|
152
|
+
end
|
153
|
+
|
154
|
+
def remote_path(remote)
|
155
|
+
path = "#{remote_host(remote)}:#{root_dir_on(remote)}"
|
156
|
+
begin
|
157
|
+
"#{remote_user(remote)}@#{path}"
|
158
|
+
rescue MissingConfig
|
159
|
+
path
|
160
|
+
end
|
161
|
+
end
|
162
|
+
|
163
|
+
def remote_file(remote, filename)
|
164
|
+
File::join(remote_path(remote), filename)
|
165
|
+
end
|
166
|
+
|
167
|
+
SCHEDULE_SHORTHANDS = {
|
168
|
+
"hourly" => 60,
|
169
|
+
"hourlies" => 60,
|
170
|
+
"daily" => 60 * 24,
|
171
|
+
"dailies" => 60 * 24,
|
172
|
+
"weekly" => 60 * 24 * 7,
|
173
|
+
"weeklies" => 60 * 24 * 7,
|
174
|
+
"monthly" => 60 * 24 * 30,
|
175
|
+
"monthlies" => 60 * 24 * 30,
|
176
|
+
"quarterly" => 60 * 24 * 120,
|
177
|
+
"quarterlies" => 60 * 24 * 120,
|
178
|
+
"yearly" => 60 * 24 * 365,
|
179
|
+
"yearlies" => 60 * 24 * 365,
|
180
|
+
}
|
181
|
+
def regularize_frequency(frequency)
|
182
|
+
Integer( SCHEDULE_SHORTHANDS.fetch(frequency){ frequency } )
|
183
|
+
rescue ArgumentError
|
184
|
+
raise "#{frequency.inspect} is neither a number of minutes or a shorthand. Try:\n #{SCHEDULE_SHORTHANDS.keys.join(" ")}"
|
185
|
+
end
|
186
|
+
|
187
|
+
def backup_name
|
188
|
+
local_get(:backup_name)
|
189
|
+
end
|
190
|
+
|
191
|
+
def backup_frequency
|
192
|
+
@backup_frequency ||= regularize_frequency(local_get(:backup_frequency))
|
193
|
+
end
|
194
|
+
|
195
|
+
def each_prune_schedule
|
196
|
+
local_get(:prune_schedule).each do |frequency, limit|
|
197
|
+
real_frequency = regularize_frequency(frequency)
|
198
|
+
unless real_frequency % backup_frequency == 0
|
199
|
+
raise "Pruning frequency #{real_frequency}:#{frequency} is not a multiple of backup frequency: #{backup_frequency}:#{local_get(:backup_frequency)}"
|
200
|
+
end
|
201
|
+
yield(real_frequency, limit)
|
202
|
+
end
|
203
|
+
end
|
204
|
+
|
205
|
+
def database_config
|
206
|
+
hash["database_config"] ||= valise.contents(local_get(:database_config_file))[local_get(:database_config_env)]
|
207
|
+
end
|
208
|
+
|
209
|
+
#@group File management
|
210
|
+
def backup_file
|
211
|
+
local_get(:file)
|
212
|
+
rescue MissingConfig
|
213
|
+
::File.join(backup_path, filename)
|
214
|
+
end
|
215
|
+
|
216
|
+
def filename
|
217
|
+
local_get(:filename)
|
218
|
+
end
|
219
|
+
|
220
|
+
def backup_path
|
221
|
+
local_get(:backup_path)
|
222
|
+
end
|
223
|
+
|
224
|
+
def expander
|
225
|
+
if remote.nil?
|
226
|
+
local_get(:expander)
|
227
|
+
else
|
228
|
+
remote_get(remote, :expander)
|
229
|
+
end
|
230
|
+
end
|
231
|
+
|
232
|
+
def compressor
|
233
|
+
local_get(:compressor)
|
234
|
+
end
|
235
|
+
#@endgroup
|
236
|
+
|
237
|
+
#@group Database
|
238
|
+
def adapter
|
239
|
+
database_config
|
240
|
+
local_get(:db_adapter)
|
241
|
+
end
|
242
|
+
|
243
|
+
def host
|
244
|
+
database_config
|
245
|
+
local_get(:db_host)
|
246
|
+
end
|
247
|
+
|
248
|
+
def port
|
249
|
+
database_config
|
250
|
+
local_get(:db_port)
|
251
|
+
end
|
252
|
+
|
253
|
+
def username
|
254
|
+
database_config
|
255
|
+
local_get(:db_username)
|
256
|
+
end
|
257
|
+
|
258
|
+
def database
|
259
|
+
database_config
|
260
|
+
local_get(:db_database)
|
261
|
+
end
|
262
|
+
|
263
|
+
def password
|
264
|
+
database_config
|
265
|
+
local_get(:db_password)
|
266
|
+
end
|
267
|
+
#@endgroup
|
268
|
+
end
|
269
|
+
end
|
@@ -0,0 +1,82 @@
|
|
1
|
+
require 'mattock'
|
2
|
+
require 'bibliotech/application'
|
3
|
+
|
4
|
+
module BiblioTech
|
5
|
+
class Tasklib < ::Mattock::Tasklib
|
6
|
+
setting(:app)
|
7
|
+
setting(:config_path)
|
8
|
+
setting(:local, nil)
|
9
|
+
setting(:remote, nil)
|
10
|
+
|
11
|
+
def default_configuration
|
12
|
+
super
|
13
|
+
self.app = App.new
|
14
|
+
|
15
|
+
self.config_path = app.config_path
|
16
|
+
from_hash(app.config.hash)
|
17
|
+
@default_state = to_hash
|
18
|
+
@default_state.delete(:app)
|
19
|
+
@default_state.delete(:config_path)
|
20
|
+
end
|
21
|
+
|
22
|
+
def resolve_configuration
|
23
|
+
configured_state = to_hash
|
24
|
+
configured_state.delete(:app)
|
25
|
+
configured_state.delete(:config_path)
|
26
|
+
case [config_path == app.config_path, configured_state == to_hash.delete(:config_path)]
|
27
|
+
when [false, false]
|
28
|
+
when [true, true]
|
29
|
+
raise "Cannot both change to config path and any other setting (sorry) - put configs in a file"
|
30
|
+
when [true, false]
|
31
|
+
app.config.hash.merge!(configured_state)
|
32
|
+
when [false, true]
|
33
|
+
app.config_path = config_path
|
34
|
+
app.reset
|
35
|
+
end
|
36
|
+
super
|
37
|
+
end
|
38
|
+
|
39
|
+
default_namespace :bibliotech
|
40
|
+
|
41
|
+
def define
|
42
|
+
in_namespace do
|
43
|
+
namespace :backups do
|
44
|
+
task :restore, [:name] do |task, args|
|
45
|
+
fail ":name is required" if args[:name].nil?
|
46
|
+
options = { :backups => { :filename => args[:name] } }
|
47
|
+
if %r[/] =~ args[:name]
|
48
|
+
options = { :backups => { :file => args[:name] } }
|
49
|
+
end
|
50
|
+
app.import(options)
|
51
|
+
end
|
52
|
+
|
53
|
+
task :create, [:prefix] do |task, args|
|
54
|
+
fail ":prefix is required" if args[:prefix].nil?
|
55
|
+
app.create_backup( :backups => { :prefix => args[:prefix] } )
|
56
|
+
end
|
57
|
+
|
58
|
+
task :clean, [:prefix] do |task, args|
|
59
|
+
fail ":prefix is required" if args[:prefix].nil?
|
60
|
+
app.prune( :backups => { :prefix => args[:prefix] } )
|
61
|
+
end
|
62
|
+
|
63
|
+
task :perform, [:prefix] => [:create, :clean]
|
64
|
+
end
|
65
|
+
|
66
|
+
namespace :remote_sync do
|
67
|
+
task :down do
|
68
|
+
filename = app.remote_cli(remote, "latest")
|
69
|
+
app.get(remote, filename)
|
70
|
+
app.import(:backups => { :filename => filename})
|
71
|
+
end
|
72
|
+
|
73
|
+
task :up do
|
74
|
+
filename = app.latest
|
75
|
+
app.send(remote, filename)
|
76
|
+
app.remote_cli(remote, "load", filename)
|
77
|
+
end
|
78
|
+
end
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
end
|
data/lib/bibliotech.rb
ADDED
@@ -0,0 +1,58 @@
|
|
1
|
+
require 'bibliotech/application'
|
2
|
+
require 'bibliotech/backups/pruner'
|
3
|
+
require 'file-sandbox'
|
4
|
+
module BiblioTech
|
5
|
+
describe Backups::Pruner do
|
6
|
+
include FileSandbox
|
7
|
+
|
8
|
+
before :each do
|
9
|
+
sandbox.new :directory => "db_backups"
|
10
|
+
sandbox.new :file => "db_backups/backup-2014-08-12_00:00.sql.7z"
|
11
|
+
end
|
12
|
+
|
13
|
+
let :app do
|
14
|
+
App.new
|
15
|
+
end
|
16
|
+
|
17
|
+
it "should something latest" do
|
18
|
+
expect(app.latest("local" => "production")).to eql "db_backups/backup-2014-08-12_00:00.sql.7z"
|
19
|
+
end
|
20
|
+
|
21
|
+
end
|
22
|
+
|
23
|
+
describe Backups::PruneList do
|
24
|
+
subject :pruner do
|
25
|
+
Backups::PruneList.new("/some/path/for/files", "testing")
|
26
|
+
end
|
27
|
+
|
28
|
+
it "should warn when other files are present" do
|
29
|
+
expect(pruner).to receive(:warn)
|
30
|
+
pruner.build_record("some.random.file")
|
31
|
+
end
|
32
|
+
|
33
|
+
it "should fail when correct prefix doesn't match timestamp" do
|
34
|
+
expect do
|
35
|
+
pruner.build_record("testing-WACKYTIMESTAMP.sql.gz")
|
36
|
+
end.to raise_error
|
37
|
+
end
|
38
|
+
|
39
|
+
describe "creating filenames" do
|
40
|
+
it "should match filenames it creates" do
|
41
|
+
time = Time.new(2014, 7, 30, 3, 14, 37, 0)
|
42
|
+
record = pruner.build_record(Backups::PruneList.filename_for("testing", time))
|
43
|
+
expect(record.timestamp).to be_within(60).of(time)
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
describe "producing a record" do
|
48
|
+
subject :record do
|
49
|
+
pruner.build_record("testing-2014-07-30_03:14.sql.gz")
|
50
|
+
end
|
51
|
+
|
52
|
+
it { is_expected.to be_a(Backups::FileRecord) }
|
53
|
+
it "should have a good time" do
|
54
|
+
expect(record.timestamp).to eql Time.new(2014, 7, 30, 3, 14, 0, 0)
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
@@ -0,0 +1,108 @@
|
|
1
|
+
require 'bibliotech/backups/scheduler'
|
2
|
+
|
3
|
+
module BiblioTech::Backups
|
4
|
+
describe Scheduler do
|
5
|
+
let(:test_jitter){ 0 }
|
6
|
+
|
7
|
+
let :unfiltered_files do
|
8
|
+
(0..interval).step(frequency).map do |seconds| #every 15 seconds for 8 hours
|
9
|
+
seconds = seconds - test_jitter/2 + rand(test_jitter)
|
10
|
+
FileRecord.new("", Time.now - seconds)
|
11
|
+
end
|
12
|
+
end
|
13
|
+
|
14
|
+
let :filtered_files do
|
15
|
+
scheduler.mark(unfiltered_files)
|
16
|
+
end
|
17
|
+
|
18
|
+
let :kept_files do
|
19
|
+
filtered_files.select do |record|
|
20
|
+
record.keep?
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
describe "without a limit" do
|
25
|
+
let :scheduler do
|
26
|
+
Scheduler.new(60, nil)
|
27
|
+
end
|
28
|
+
|
29
|
+
context "when there's more than enough backups" do
|
30
|
+
let(:interval){ 60*60*12 - 1}
|
31
|
+
let(:frequency) { 15 }
|
32
|
+
let(:test_jitter){ 60 }
|
33
|
+
|
34
|
+
it "should mark 8 files kept" do
|
35
|
+
expect(kept_files.count).to eql 13
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
describe "with a limit" do
|
41
|
+
let :scheduler do
|
42
|
+
Scheduler.new(60, 8)
|
43
|
+
end
|
44
|
+
|
45
|
+
context "when there's just enough backups" do
|
46
|
+
let(:interval){ 60*60*8 - 1 }
|
47
|
+
let(:frequency){ 60*8 }
|
48
|
+
let(:test_jitter){ 60 }
|
49
|
+
|
50
|
+
it "should mark 8 files kept" do
|
51
|
+
expect(kept_files.count).to eql 8
|
52
|
+
end
|
53
|
+
|
54
|
+
context "even if we're pruning much later" do
|
55
|
+
let :filtered_files do
|
56
|
+
unfiltered_files.each do |record|
|
57
|
+
record.timestamp += 60*60*24
|
58
|
+
end
|
59
|
+
|
60
|
+
scheduler.mark(unfiltered_files)
|
61
|
+
end
|
62
|
+
|
63
|
+
it "should mark 8 files kept" do
|
64
|
+
expect(kept_files.count).to eql 8
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
context "when there's more than enough backups" do
|
70
|
+
let(:interval){ 60*60*12 }
|
71
|
+
let(:frequency) { 15 }
|
72
|
+
let(:test_jitter){ 60 }
|
73
|
+
|
74
|
+
it "should mark 8 files kept" do
|
75
|
+
expect(kept_files.count).to eql 8
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
context "when there are too few backups" do
|
80
|
+
let(:interval){ 60*60*4 - 1 }
|
81
|
+
let(:frequency){ 60*8 }
|
82
|
+
let(:test_jitter){ 60 }
|
83
|
+
|
84
|
+
it "should mark 8 files kept" do
|
85
|
+
expect(kept_files.count).to eql 5
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
89
|
+
context "when files already marked to keep" do
|
90
|
+
let :filtered_files do
|
91
|
+
unfiltered_files.each do |record|
|
92
|
+
record.keep = true
|
93
|
+
end
|
94
|
+
|
95
|
+
scheduler.mark(unfiltered_files)
|
96
|
+
end
|
97
|
+
|
98
|
+
let(:interval){ 60*60*12 }
|
99
|
+
let(:frequency) { 15 }
|
100
|
+
let(:test_jitter){ 60 }
|
101
|
+
|
102
|
+
it "should not unmark any" do
|
103
|
+
expect(kept_files.length).to eql(unfiltered_files.length)
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
108
|
+
end
|