mongo-oplog-backup 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +19 -0
- data/.rspec +2 -0
- data/.ruby-version +1 -0
- data/.travis.yml +11 -0
- data/Gemfile +4 -0
- data/LICENSE.txt +22 -0
- data/README.md +34 -0
- data/Rakefile +6 -0
- data/bin/mongo-oplog-backup +61 -0
- data/lib/mongo_oplog_backup.rb +21 -0
- data/lib/mongo_oplog_backup/backup.rb +148 -0
- data/lib/mongo_oplog_backup/config.rb +47 -0
- data/lib/mongo_oplog_backup/ext/enumerable.rb +17 -0
- data/lib/mongo_oplog_backup/ext/timestamp.rb +34 -0
- data/lib/mongo_oplog_backup/oplog.rb +110 -0
- data/lib/mongo_oplog_backup/version.rb +3 -0
- data/mongo-oplog-backup.gemspec +28 -0
- data/oplog-last-timestamp.js +11 -0
- data/spec/backup_spec.rb +55 -0
- data/spec/enumerable_spec.rb +21 -0
- data/spec/fixtures/oplog-1408088734:1-1408088740:1.bson +0 -0
- data/spec/fixtures/oplog-1408088740:1-1408088810:1.bson +0 -0
- data/spec/fixtures/oplog-1408088810:1-1408088928:1.bson +0 -0
- data/spec/fixtures/oplog-merged.bson +0 -0
- data/spec/oplog_spec.rb +58 -0
- data/spec/spec_helper.rb +54 -0
- data/spec/timestamp_spec.rb +49 -0
- metadata +165 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA1:
|
|
3
|
+
metadata.gz: 3acf779f860fdb9a7fc941b68f7755c36f215ad6
|
|
4
|
+
data.tar.gz: 5a1a19a402c4d337b90c81efc693aeb7d583e454
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: 5c01fe3144e8a2c891c363bac256082c17f836d71fa2b097d8f3e737afcaba51d41034b6c7dda7025ac5d0ee282a35908796441bbb9bf41cafc0aa89ac9e35af
|
|
7
|
+
data.tar.gz: 94b42dac6b402b476c83ff6f1f131884d266d2b8bc545b389440574fc36f084bcbd7df9152d6149ee1d2f9a2c37e47864822d795c0d619109c93a3de2d1a17e1
|
data/.gitignore
ADDED
data/.rspec
ADDED
data/.ruby-version
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
2.1.1
|
data/.travis.yml
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
language: ruby
|
|
2
|
+
rvm:
|
|
3
|
+
- 2.1.1
|
|
4
|
+
before_script:
|
|
5
|
+
- mkdir testdb
|
|
6
|
+
- mongod --port 27017 --dbpath testdb --replSet rs0 --oplogSize 20 --noprealloc --fork --smallfiles --logpath mongodb.log
|
|
7
|
+
- sleep 3
|
|
8
|
+
- mongo admin --eval 'printjson(rs.initiate());'
|
|
9
|
+
- sleep 20
|
|
10
|
+
script:
|
|
11
|
+
- bundle exec rspec
|
data/Gemfile
ADDED
data/LICENSE.txt
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
Copyright (c) 2014 Ralf Kistner
|
|
2
|
+
|
|
3
|
+
MIT License
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
|
6
|
+
a copy of this software and associated documentation files (the
|
|
7
|
+
"Software"), to deal in the Software without restriction, including
|
|
8
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
|
9
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
|
10
|
+
permit persons to whom the Software is furnished to do so, subject to
|
|
11
|
+
the following conditions:
|
|
12
|
+
|
|
13
|
+
The above copyright notice and this permission notice shall be
|
|
14
|
+
included in all copies or substantial portions of the Software.
|
|
15
|
+
|
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
17
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
18
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
19
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
20
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
21
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
22
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# MongoOplogBackup
|
|
2
|
+
|
|
3
|
+
**Experimental** incremental backup system for MongoDB based on the oplog.
|
|
4
|
+
|
|
5
|
+
Not ready for any important data yet.
|
|
6
|
+
|
|
7
|
+
## Installation
|
|
8
|
+
|
|
9
|
+
git clone git@github.com:journeyapps/mongo-oplog-backup.git
|
|
10
|
+
cd mongo-oplog-backup
|
|
11
|
+
rake install
|
|
12
|
+
|
|
13
|
+
## Usage
|
|
14
|
+
|
|
15
|
+
mongo-oplog-backup backup --dir mybackup
|
|
16
|
+
|
|
17
|
+
TODO: Write usage instructions here
|
|
18
|
+
|
|
19
|
+
## Backup structure
|
|
20
|
+
|
|
21
|
+
* `backup.json` - Stores the current state (oplog timestamp and backup folder).
|
|
22
|
+
The only file required to perform incremental backups. It is not used for restoring a backup.
|
|
23
|
+
* `backup-<timestamp>` - The current backup folder.
|
|
24
|
+
* `dump` - a full mongodump
|
|
25
|
+
* `oplog-<start>-<end>.bson` - The oplog from the start timestamp until the end timestamp (inclusive).
|
|
26
|
+
|
|
27
|
+
Each time a full backup is performed, a new backup folder is created.
|
|
28
|
+
## Contributing
|
|
29
|
+
|
|
30
|
+
1. Fork it ( http://github.com/<my-github-username>/mongo-oplog-backup/fork )
|
|
31
|
+
2. Create your feature branch (`git checkout -b my-new-feature`)
|
|
32
|
+
3. Commit your changes (`git commit -am 'Add some feature'`)
|
|
33
|
+
4. Push to the branch (`git push origin my-new-feature`)
|
|
34
|
+
5. Create new Pull Request
|
data/Rakefile
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
#!/usr/bin/env ruby
|
|
2
|
+
|
|
3
|
+
require 'mongo_oplog_backup'
|
|
4
|
+
require 'slop'
|
|
5
|
+
|
|
6
|
+
opts = Slop.parse(help: true, strict: true) do
|
|
7
|
+
banner 'Usage: mongo-oplog-backup [options]'
|
|
8
|
+
|
|
9
|
+
command 'backup' do
|
|
10
|
+
banner 'Usage: mongo-oplog-backup backup [options]'
|
|
11
|
+
on :v, :verbose, 'Enable verbose mode'
|
|
12
|
+
on :d, :dir, "Directory to store backup files. Defaults to 'backup'.", argument: :required
|
|
13
|
+
on :full, 'Force full backup'
|
|
14
|
+
on :oplog, 'Force oplog backup'
|
|
15
|
+
|
|
16
|
+
on :ssl, "Connect to a mongod instance over an SSL connection"
|
|
17
|
+
on :host, "Specifies a resolvable hostname for the mongod that you wish to backup.", default: 'localhost', argument: :required
|
|
18
|
+
on :port, "Specifies the port that mongod is running on", default: '27017', argument: :required
|
|
19
|
+
on :u, :username, "Specifies a username to authenticate to the MongoDB instance, if your database requires authentication. Use in conjunction with the --password option to supply a password.", argument: :required
|
|
20
|
+
on :p, :password, "Specifies a password to authenticate to the MongoDB instance. Use in conjunction with the --username option to supply a username. Note. the password will not be prompted for, so must be passed as an argument", argument: :required
|
|
21
|
+
|
|
22
|
+
run do |opts, args|
|
|
23
|
+
dir = opts[:dir] || 'backup'
|
|
24
|
+
config_opts = {
|
|
25
|
+
dir: dir,
|
|
26
|
+
ssl: opts.ssl?
|
|
27
|
+
}
|
|
28
|
+
config_opts[:host] = opts[:host]
|
|
29
|
+
config_opts[:port] = opts[:port]
|
|
30
|
+
config_opts[:username] = opts[:username]
|
|
31
|
+
config_opts[:password] = opts[:password]
|
|
32
|
+
|
|
33
|
+
mode = :auto
|
|
34
|
+
if opts.full?
|
|
35
|
+
mode = :full
|
|
36
|
+
elsif opts.oplog?
|
|
37
|
+
mode = :oplog
|
|
38
|
+
end
|
|
39
|
+
config = MongoOplogBackup::Config.new(config_opts)
|
|
40
|
+
backup = MongoOplogBackup::Backup.new(config)
|
|
41
|
+
backup.perform(mode)
|
|
42
|
+
end
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
command 'merge' do
|
|
46
|
+
banner 'Usage: mongo-oplog-backup merge [options]'
|
|
47
|
+
on :v, :verbose, 'Enable verbose mode'
|
|
48
|
+
on :d, :dir, "Directory containing the backup to restore. Must contain a 'dump' folder.", argument: :required
|
|
49
|
+
|
|
50
|
+
run do |opts, args|
|
|
51
|
+
dir = opts[:dir]
|
|
52
|
+
raise ArgumentError, 'dir must be specified' unless dir
|
|
53
|
+
raise ArgumentError, 'dir must contain a dump subfolder' unless File.directory?(File.join(dir, 'dump'))
|
|
54
|
+
|
|
55
|
+
MongoOplogBackup::Oplog.merge_backup(dir)
|
|
56
|
+
puts
|
|
57
|
+
puts "Restore the backup with: "
|
|
58
|
+
puts "mongorestore [--drop] --oplogReplay #{File.join(dir, 'dump')}"
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
end
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
require 'logger'
|
|
2
|
+
|
|
3
|
+
require 'mongo_oplog_backup/version'
|
|
4
|
+
require 'mongo_oplog_backup/ext/enumerable'
|
|
5
|
+
require 'mongo_oplog_backup/ext/timestamp'
|
|
6
|
+
|
|
7
|
+
require 'mongo_oplog_backup/config'
|
|
8
|
+
require 'mongo_oplog_backup/backup'
|
|
9
|
+
require 'mongo_oplog_backup/oplog'
|
|
10
|
+
|
|
11
|
+
module MongoOplogBackup
|
|
12
|
+
def self.log
|
|
13
|
+
@@log
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def self.log= log
|
|
17
|
+
@@log = log
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
@@log = Logger.new STDOUT
|
|
21
|
+
end
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
require 'json'
|
|
2
|
+
require 'fileutils'
|
|
3
|
+
require 'mongo_oplog_backup/oplog'
|
|
4
|
+
|
|
5
|
+
module MongoOplogBackup
|
|
6
|
+
class Backup
|
|
7
|
+
attr_reader :config
|
|
8
|
+
|
|
9
|
+
def initialize(config)
|
|
10
|
+
@config = config
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
def backup_oplog(options={})
|
|
14
|
+
start_at = options[:start]
|
|
15
|
+
backup = options[:backup]
|
|
16
|
+
raise ArgumentError, ":backup is required" unless backup
|
|
17
|
+
raise ArgumentError, ":start is required" unless start_at
|
|
18
|
+
|
|
19
|
+
if start_at
|
|
20
|
+
query = "--query \"{ts : { \\$gte : { \\$timestamp : { t : #{start_at.seconds}, i : #{start_at.increment} } } }}\""
|
|
21
|
+
else
|
|
22
|
+
query = ""
|
|
23
|
+
end
|
|
24
|
+
config.mongodump("--out #{config.oplog_dump_folder} --db local --collection oplog.rs #{query}")
|
|
25
|
+
|
|
26
|
+
unless File.exists? config.oplog_dump
|
|
27
|
+
raise "mongodump failed"
|
|
28
|
+
end
|
|
29
|
+
MongoOplogBackup.log.debug "Checking timestamps..."
|
|
30
|
+
timestamps = Oplog.oplog_timestamps(config.oplog_dump)
|
|
31
|
+
|
|
32
|
+
unless timestamps.increasing?
|
|
33
|
+
raise "Something went wrong - oplog is not ordered."
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
first = timestamps[0]
|
|
37
|
+
last = timestamps[-1]
|
|
38
|
+
|
|
39
|
+
if first > start_at
|
|
40
|
+
raise "Expected first oplog entry to be #{start_at.inspect} but was #{first.inspect}\n" +
|
|
41
|
+
"The oplog is probably too small.\n" +
|
|
42
|
+
"Increase the oplog size, the start with another full backup."
|
|
43
|
+
elsif first < start_at
|
|
44
|
+
raise "Expected first oplog entry to be #{start_at.inspect} but was #{first.inspect}\n" +
|
|
45
|
+
"Something went wrong in our query."
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
result = {
|
|
49
|
+
entries: timestamps.count,
|
|
50
|
+
first: first,
|
|
51
|
+
position: last
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if timestamps.count == 1
|
|
55
|
+
result[:empty] = true
|
|
56
|
+
else
|
|
57
|
+
outfile = "oplog-#{first}-#{last}.bson"
|
|
58
|
+
full_path = File.join(config.backup_dir, backup, outfile)
|
|
59
|
+
FileUtils.mkdir_p File.join(config.backup_dir, backup)
|
|
60
|
+
FileUtils.mv config.oplog_dump, full_path
|
|
61
|
+
|
|
62
|
+
result[:file] = full_path
|
|
63
|
+
result[:empty] = false
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
FileUtils.rm_r config.oplog_dump_folder rescue nil
|
|
67
|
+
result
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
def latest_oplog_timestamp
|
|
71
|
+
script = File.expand_path('../../oplog-last-timestamp.js', File.dirname(__FILE__))
|
|
72
|
+
result_text = config.mongo('local', script)
|
|
73
|
+
begin
|
|
74
|
+
response = JSON.parse(result_text)
|
|
75
|
+
return nil unless response['position']
|
|
76
|
+
BSON::Timestamp.from_json(response['position'])
|
|
77
|
+
rescue JSON::ParserError => e
|
|
78
|
+
raise StandardError, "Failed to connect to MongoDB: #{result_text}"
|
|
79
|
+
end
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
def backup_full
|
|
83
|
+
position = latest_oplog_timestamp
|
|
84
|
+
raise "Cannot backup with empty oplog" if position.nil?
|
|
85
|
+
backup_name = "backup-#{position}"
|
|
86
|
+
dump_folder = File.join(config.backup_dir, backup_name, 'dump')
|
|
87
|
+
config.mongodump("--out #{dump_folder}")
|
|
88
|
+
return {
|
|
89
|
+
position: position,
|
|
90
|
+
backup: backup_name
|
|
91
|
+
}
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
def perform(mode=:auto)
|
|
95
|
+
state_file = config.state_file
|
|
96
|
+
state = JSON.parse(File.read(state_file)) rescue nil
|
|
97
|
+
state ||= {}
|
|
98
|
+
have_position = (state['position'] && state['backup'])
|
|
99
|
+
|
|
100
|
+
if mode == :auto
|
|
101
|
+
if have_position
|
|
102
|
+
mode = :oplog
|
|
103
|
+
else
|
|
104
|
+
mode = :full
|
|
105
|
+
end
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
if mode == :oplog
|
|
109
|
+
raise "Unknown backup position - cannot perform oplog backup." unless have_position
|
|
110
|
+
MongoOplogBackup.log.info "Performing incremental oplog backup"
|
|
111
|
+
position = BSON::Timestamp.from_json(state['position'])
|
|
112
|
+
result = backup_oplog(start: position, backup: state['backup'])
|
|
113
|
+
unless result[:empty]
|
|
114
|
+
new_entries = result[:entries] - 1
|
|
115
|
+
state['position'] = result[:position]
|
|
116
|
+
File.write(state_file, state.to_json)
|
|
117
|
+
MongoOplogBackup.log.info "Backed up #{new_entries} new entries to #{result[:file]}"
|
|
118
|
+
else
|
|
119
|
+
MongoOplogBackup.log.info "Nothing new to backup"
|
|
120
|
+
end
|
|
121
|
+
elsif mode == :full
|
|
122
|
+
MongoOplogBackup.log.info "Performing full backup"
|
|
123
|
+
result = backup_full
|
|
124
|
+
state = result
|
|
125
|
+
File.write(state_file, state.to_json)
|
|
126
|
+
MongoOplogBackup.log.info "Performed full backup"
|
|
127
|
+
|
|
128
|
+
# Oplog backup
|
|
129
|
+
perform(:oplog)
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
def latest_oplog_timestamp_moped
|
|
134
|
+
# Alternative implementation for `latest_oplog_timestamp`
|
|
135
|
+
require 'moped'
|
|
136
|
+
session = Moped::Session.new([ "127.0.0.1:27017" ])
|
|
137
|
+
session.use 'local'
|
|
138
|
+
oplog = session['oplog.rs']
|
|
139
|
+
entry = oplog.find.limit(1).sort('$natural' => -1).one
|
|
140
|
+
if entry
|
|
141
|
+
entry['ts']
|
|
142
|
+
else
|
|
143
|
+
nil
|
|
144
|
+
end
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
end
|
|
148
|
+
end
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
module MongoOplogBackup
|
|
2
|
+
class Config
|
|
3
|
+
attr_reader :options
|
|
4
|
+
|
|
5
|
+
def initialize(options)
|
|
6
|
+
@options = options
|
|
7
|
+
end
|
|
8
|
+
|
|
9
|
+
def backup_dir
|
|
10
|
+
options[:dir]
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
def command_line_options
|
|
14
|
+
ssl = options[:ssl] ? '--ssl ' : ''
|
|
15
|
+
host = options[:host] ? "--host #{options[:host].strip} " : ''
|
|
16
|
+
port = options[:port] ? "--port #{options[:port].strip} " : ''
|
|
17
|
+
username = options[:username] ? "--username #{options[:username].strip} " : ''
|
|
18
|
+
password = options[:password] ? "--password #{options[:password].strip} " : ''
|
|
19
|
+
"#{host}#{port}#{ssl}#{username}#{password}"
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def oplog_dump_folder
|
|
23
|
+
File.join(backup_dir, 'dump')
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def oplog_dump
|
|
27
|
+
File.join(oplog_dump_folder, 'local/oplog.rs.bson')
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def state_file
|
|
31
|
+
File.join(backup_dir, 'backup.json')
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def exec(cmd)
|
|
35
|
+
MongoOplogBackup.log.debug ">>> #{cmd}"
|
|
36
|
+
`#{cmd}`
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
def mongodump(args)
|
|
40
|
+
MongoOplogBackup.log.info exec("mongodump #{command_line_options} #{args}")
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def mongo(db, script)
|
|
44
|
+
exec("mongo #{command_line_options} --quiet --norc #{db} #{script}")
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
end
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
# Define Enumerable#sorted?
|
|
2
|
+
|
|
3
|
+
module Enumerable
|
|
4
|
+
# Sorted in ascending order
|
|
5
|
+
def sorted?
|
|
6
|
+
each_cons(2).all? do |a, b|
|
|
7
|
+
(a <=> b) <= 0
|
|
8
|
+
end
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
# Strictly increasing, in other words sorted and unique
|
|
12
|
+
def increasing?
|
|
13
|
+
each_cons(2).all? do |a, b|
|
|
14
|
+
(a <=> b) < 0
|
|
15
|
+
end
|
|
16
|
+
end
|
|
17
|
+
end
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# Make BSON::Timestamp comparable
|
|
2
|
+
require 'bson'
|
|
3
|
+
|
|
4
|
+
module MongoOplogBackup::Ext
|
|
5
|
+
module Timestamp
|
|
6
|
+
def <=> other
|
|
7
|
+
[seconds, increment] <=> [other.seconds, other.increment]
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
def to_s
|
|
11
|
+
"#{seconds}:#{increment}"
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def hash
|
|
15
|
+
to_s.hash
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def eql? other
|
|
19
|
+
self == other
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
module ClassMethods
|
|
23
|
+
# Accepts {'t' => seconds, 'i' => increment}
|
|
24
|
+
def from_json(data)
|
|
25
|
+
self.new(data['t'], data['i'])
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
::BSON::Timestamp.__send__(:include, Comparable)
|
|
33
|
+
::BSON::Timestamp.__send__(:include, MongoOplogBackup::Ext::Timestamp)
|
|
34
|
+
::BSON::Timestamp.extend(MongoOplogBackup::Ext::Timestamp::ClassMethods)
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
module MongoOplogBackup
|
|
2
|
+
module Oplog
|
|
3
|
+
def self.each_document(filename)
|
|
4
|
+
File.open(filename, 'rb') do |stream|
|
|
5
|
+
while !stream.eof?
|
|
6
|
+
yield BSON::Document.from_bson(stream)
|
|
7
|
+
end
|
|
8
|
+
end
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def self.oplog_timestamps(filename)
|
|
12
|
+
timestamps = []
|
|
13
|
+
each_document(filename) do |doc|
|
|
14
|
+
# This can be optimized by only decoding the timestamp
|
|
15
|
+
# (first field), instead of decoding the entire document.
|
|
16
|
+
timestamps << doc['ts']
|
|
17
|
+
end
|
|
18
|
+
timestamps
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
FILENAME_RE = /\/oplog-(\d+):(\d+)-(\d+):(\d+)\.bson\z/
|
|
22
|
+
|
|
23
|
+
def self.timestamps_from_filename filename
|
|
24
|
+
match = FILENAME_RE.match(filename)
|
|
25
|
+
return nil unless match
|
|
26
|
+
s1 = match[1].to_i
|
|
27
|
+
i1 = match[2].to_i
|
|
28
|
+
s2 = match[3].to_i
|
|
29
|
+
i2 = match[4].to_i
|
|
30
|
+
first = BSON::Timestamp.new(s1, i1)
|
|
31
|
+
last = BSON::Timestamp.new(s2, i2)
|
|
32
|
+
{
|
|
33
|
+
first: first,
|
|
34
|
+
last: last
|
|
35
|
+
}
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
def self.merge(target, source_files, options={})
|
|
39
|
+
limit = options[:limit] # TODO: use
|
|
40
|
+
force = options[:force]
|
|
41
|
+
|
|
42
|
+
File.open(target, 'wb') do |output|
|
|
43
|
+
last_timestamp = nil
|
|
44
|
+
first = true
|
|
45
|
+
|
|
46
|
+
source_files.each do |filename|
|
|
47
|
+
timestamps = timestamps_from_filename(filename)
|
|
48
|
+
if timestamps
|
|
49
|
+
expected_first = timestamps[:first]
|
|
50
|
+
expected_last = timestamps[:last]
|
|
51
|
+
else
|
|
52
|
+
expected_first = nil
|
|
53
|
+
expected_last = nil
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
# Optimize:
|
|
57
|
+
# We can assume that the timestamps are in order.
|
|
58
|
+
# This means we only need to find the first non-overlapping point,
|
|
59
|
+
# and the rest we can pass through directly.
|
|
60
|
+
MongoOplogBackup.log.debug "Reading #{filename}"
|
|
61
|
+
last_file_timestamp = nil
|
|
62
|
+
skipped = 0
|
|
63
|
+
wrote = 0
|
|
64
|
+
first_file_timestamp = nil
|
|
65
|
+
Oplog.each_document(filename) do |doc|
|
|
66
|
+
timestamp = doc['ts']
|
|
67
|
+
first_file_timestamp = timestamp if first_file_timestamp.nil?
|
|
68
|
+
if !last_timestamp.nil? && timestamp <= last_timestamp
|
|
69
|
+
skipped += 1
|
|
70
|
+
elsif !last_file_timestamp.nil? && timestamp <= last_file_timestamp
|
|
71
|
+
raise "Timestamps out of order in #{filename}"
|
|
72
|
+
else
|
|
73
|
+
output.write(doc.to_bson)
|
|
74
|
+
wrote += 1
|
|
75
|
+
last_timestamp = timestamp
|
|
76
|
+
end
|
|
77
|
+
last_file_timestamp = timestamp
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
if expected_first && first_file_timestamp != expected_first
|
|
81
|
+
raise "#{expected_first} was not the first timestamp in #{filename}"
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
if expected_last && last_file_timestamp != expected_last
|
|
85
|
+
raise "#{expected_last} was not the last timestamp in #{filename}"
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
MongoOplogBackup.log.info "Wrote #{wrote} and skipped #{skipped} oplog entries from #{filename}"
|
|
89
|
+
raise "Overlap must be exactly 1" unless first || skipped == 1 || force
|
|
90
|
+
first = false
|
|
91
|
+
end
|
|
92
|
+
end
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
def self.find_oplogs(dir)
|
|
96
|
+
files = Dir.glob(File.join(dir, 'oplog-*.bson'))
|
|
97
|
+
files.keep_if {|name| name =~ FILENAME_RE}
|
|
98
|
+
files.sort! {|a, b| timestamps_from_filename(a)[:first] <=> timestamps_from_filename(b)[:first]}
|
|
99
|
+
files
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
def self.merge_backup(dir)
|
|
103
|
+
oplogs = find_oplogs(dir)
|
|
104
|
+
target = File.join(dir, 'dump', 'oplog.bson')
|
|
105
|
+
FileUtils.mkdir_p(File.join(dir, 'dump'))
|
|
106
|
+
merge(target, oplogs)
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
end
|
|
110
|
+
end
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# coding: utf-8
|
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
|
4
|
+
require 'mongo_oplog_backup/version'
|
|
5
|
+
|
|
6
|
+
Gem::Specification.new do |spec|
|
|
7
|
+
spec.name = "mongo-oplog-backup"
|
|
8
|
+
spec.version = MongoOplogBackup::VERSION
|
|
9
|
+
spec.authors = ["Ralf Kistner"]
|
|
10
|
+
spec.email = ["ralf@journeyapps.com"]
|
|
11
|
+
spec.summary = %q{Incremental backups for MongoDB using the oplog.}
|
|
12
|
+
spec.description = %q{Periodically backup new sections of the oplog for incremental backups.}
|
|
13
|
+
spec.homepage = ""
|
|
14
|
+
spec.license = "MIT"
|
|
15
|
+
|
|
16
|
+
spec.files = `git ls-files -z`.split("\x0")
|
|
17
|
+
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
|
18
|
+
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
|
19
|
+
spec.require_paths = ["lib"]
|
|
20
|
+
|
|
21
|
+
spec.add_dependency "bson", "~> 2.3"
|
|
22
|
+
spec.add_dependency "slop", "~> 3.6"
|
|
23
|
+
|
|
24
|
+
spec.add_development_dependency "bundler", "~> 1.5"
|
|
25
|
+
spec.add_development_dependency "rake"
|
|
26
|
+
spec.add_development_dependency "rspec", "~> 3.0"
|
|
27
|
+
spec.add_development_dependency "moped", "~> 2.0"
|
|
28
|
+
end
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
// Get the timestamp of the last oplog entry.
|
|
2
|
+
// Usage: mongo --quiet --norc local oplog-last-timestamp.js
|
|
3
|
+
|
|
4
|
+
var local = db.getSiblingDB('local');
|
|
5
|
+
var last = local['oplog.rs'].find().sort({'$natural': -1}).limit(1)[0];
|
|
6
|
+
var result = {};
|
|
7
|
+
if(last != null) {
|
|
8
|
+
result = {position: last['ts']};
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
print(JSON.stringify(result));
|
data/spec/backup_spec.rb
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
require 'spec_helper'
|
|
2
|
+
require 'moped'
|
|
3
|
+
|
|
4
|
+
describe MongoOplogBackup do
|
|
5
|
+
it 'should have a version number' do
|
|
6
|
+
MongoOplogBackup::VERSION.should_not be_nil
|
|
7
|
+
end
|
|
8
|
+
|
|
9
|
+
let(:backup) { MongoOplogBackup::Backup.new(MongoOplogBackup::Config.new dir: 'spec-tmp/backup') }
|
|
10
|
+
|
|
11
|
+
before(:all) do
|
|
12
|
+
# We need one entry in the oplog to start with
|
|
13
|
+
SESSION.with(safe: true) do |session|
|
|
14
|
+
session['test'].insert({a: 1})
|
|
15
|
+
end
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
it 'should get the latest oplog entry' do
|
|
19
|
+
ts1 = backup.latest_oplog_timestamp
|
|
20
|
+
ts2 = backup.latest_oplog_timestamp_moped
|
|
21
|
+
|
|
22
|
+
ts1.should == ts2
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
it 'should error on latest oplog entry with invalid port' do
|
|
26
|
+
b2 = MongoOplogBackup::Backup.new(MongoOplogBackup::Config.new({
|
|
27
|
+
dir: 'spec-tmp/backup', port: '12345'}))
|
|
28
|
+
-> { b2.latest_oplog_timestamp }.should raise_error
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
it 'should error on latest oplog entry with invalid password' do
|
|
32
|
+
b2 = MongoOplogBackup::Backup.new(MongoOplogBackup::Config.new({
|
|
33
|
+
dir: 'spec-tmp/backup', username: 'foo', password: '123'}))
|
|
34
|
+
-> { b2.latest_oplog_timestamp }.should raise_error
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
it "should perform an oplog backup" do
|
|
39
|
+
first = backup.latest_oplog_timestamp
|
|
40
|
+
first.should_not be_nil
|
|
41
|
+
SESSION.with(safe: true) do |session|
|
|
42
|
+
5.times do
|
|
43
|
+
session['test'].insert({a: 1})
|
|
44
|
+
end
|
|
45
|
+
end
|
|
46
|
+
last = backup.latest_oplog_timestamp
|
|
47
|
+
result = backup.backup_oplog(start: first, backup: 'backup1')
|
|
48
|
+
file = result[:file]
|
|
49
|
+
timestamps = MongoOplogBackup::Oplog.oplog_timestamps(file)
|
|
50
|
+
timestamps.count.should == 6
|
|
51
|
+
timestamps.first.should == first
|
|
52
|
+
timestamps.last.should == last
|
|
53
|
+
|
|
54
|
+
end
|
|
55
|
+
end
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
require 'spec_helper'
|
|
2
|
+
|
|
3
|
+
describe Enumerable do
|
|
4
|
+
it 'should define sorted?' do
|
|
5
|
+
[1, 2, 3, 4, 6].sorted?.should == true
|
|
6
|
+
[1, 2, 3, 6, 4].sorted?.should == false
|
|
7
|
+
[1, 2, 3, 4, 4].sorted?.should == true
|
|
8
|
+
[6, 4, 3, 2, 1].sorted?.should == false
|
|
9
|
+
[1].sorted?.should == true
|
|
10
|
+
[].sorted?.should == true
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
it 'should define increasing?' do
|
|
14
|
+
[1, 2, 3, 4, 6].increasing?.should == true
|
|
15
|
+
[1, 2, 3, 6, 4].increasing?.should == false
|
|
16
|
+
[1, 2, 3, 4, 4].increasing?.should == false
|
|
17
|
+
[6, 4, 3, 2, 1].increasing?.should == false
|
|
18
|
+
[1].increasing?.should == true
|
|
19
|
+
[].increasing?.should == true
|
|
20
|
+
end
|
|
21
|
+
end
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
data/spec/oplog_spec.rb
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
require 'spec_helper'
|
|
2
|
+
require 'fileutils'
|
|
3
|
+
|
|
4
|
+
describe MongoOplogBackup::Oplog do
|
|
5
|
+
let(:oplog1) { 'spec/fixtures/oplog-1408088734:1-1408088740:1.bson'}
|
|
6
|
+
let(:oplog2) { 'spec/fixtures/oplog-1408088740:1-1408088810:1.bson'}
|
|
7
|
+
let(:oplog3) { 'spec/fixtures/oplog-1408088810:1-1408088928:1.bson'}
|
|
8
|
+
let(:oplog_merged) { 'spec/fixtures/oplog-merged.bson'}
|
|
9
|
+
|
|
10
|
+
it 'should extract oplog timestamps' do
|
|
11
|
+
timestamps = MongoOplogBackup::Oplog.oplog_timestamps(oplog1)
|
|
12
|
+
timestamps.should == [
|
|
13
|
+
BSON::Timestamp.new(1408088734, 1),
|
|
14
|
+
BSON::Timestamp.new(1408088738, 1),
|
|
15
|
+
BSON::Timestamp.new(1408088739, 1),
|
|
16
|
+
BSON::Timestamp.new(1408088740, 1)
|
|
17
|
+
]
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
it 'should merge oplogs' do
|
|
21
|
+
merged_out = 'spec-tmp/oplog-merged.bson'
|
|
22
|
+
MongoOplogBackup::Oplog.merge(merged_out, [oplog1, oplog2, oplog3])
|
|
23
|
+
|
|
24
|
+
expected_timestamps =
|
|
25
|
+
MongoOplogBackup::Oplog.oplog_timestamps(oplog1) +
|
|
26
|
+
MongoOplogBackup::Oplog.oplog_timestamps(oplog2) +
|
|
27
|
+
MongoOplogBackup::Oplog.oplog_timestamps(oplog3)
|
|
28
|
+
|
|
29
|
+
expected_timestamps.uniq!
|
|
30
|
+
expected_timestamps.sort! # Not sure if uniq! modifies the order
|
|
31
|
+
|
|
32
|
+
actual_timestamps = MongoOplogBackup::Oplog.oplog_timestamps(merged_out)
|
|
33
|
+
actual_timestamps.should == expected_timestamps
|
|
34
|
+
|
|
35
|
+
merged_out.should be_same_oplog_as oplog_merged
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
it 'should parse timestamps from a filename' do
|
|
39
|
+
timestamps = MongoOplogBackup::Oplog.timestamps_from_filename('some/oplog-1408088734:1-1408088740:52.bson')
|
|
40
|
+
timestamps.should == {
|
|
41
|
+
first: BSON::Timestamp.new(1408088734, 1),
|
|
42
|
+
last: BSON::Timestamp.new(1408088740, 52)
|
|
43
|
+
}
|
|
44
|
+
end
|
|
45
|
+
it 'should sort oplogs in a folder' do
|
|
46
|
+
oplogs = MongoOplogBackup::Oplog.find_oplogs('spec/fixtures')
|
|
47
|
+
oplogs.should == [oplog1, oplog2, oplog3]
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
it "should merge a backup folder" do
|
|
51
|
+
FileUtils.mkdir_p 'spec-tmp/backup'
|
|
52
|
+
FileUtils.cp_r Dir['spec/fixtures/oplog-*.bson'], 'spec-tmp/backup/'
|
|
53
|
+
|
|
54
|
+
MongoOplogBackup::Oplog.merge_backup('spec-tmp/backup')
|
|
55
|
+
|
|
56
|
+
'spec-tmp/backup/dump/oplog.bson'.should be_same_oplog_as oplog_merged
|
|
57
|
+
end
|
|
58
|
+
end
|
data/spec/spec_helper.rb
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
|
|
2
|
+
require 'mongo_oplog_backup'
|
|
3
|
+
require 'fileutils'
|
|
4
|
+
|
|
5
|
+
FileUtils.rm_rf 'test.log'
|
|
6
|
+
MongoOplogBackup.log = Logger.new('test.log')
|
|
7
|
+
|
|
8
|
+
#https://gist.github.com/mattwynne/736421
|
|
9
|
+
RSpec::Matchers.define(:be_same_file_as) do |exected_file_path|
|
|
10
|
+
match do |actual_file_path|
|
|
11
|
+
md5_hash(actual_file_path).should == md5_hash(exected_file_path)
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def md5_hash(file_path)
|
|
15
|
+
Digest::MD5.hexdigest(File.read(file_path))
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
RSpec::Matchers.define(:be_same_oplog_as) do |exected_file_path|
|
|
20
|
+
match do |actual_file_path|
|
|
21
|
+
timestamps(actual_file_path).should == timestamps(exected_file_path)
|
|
22
|
+
actual_file_path.should be_same_file_as exected_file_path
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
failure_message do |actual_file_path|
|
|
26
|
+
ets = timestamps(exected_file_path).join("\n")
|
|
27
|
+
ats = timestamps(actual_file_path).join("\n")
|
|
28
|
+
"expected that #{actual_file_path} would be the same as #{exected_file_path}\n" +
|
|
29
|
+
"Expected timestamps:\n#{ets}\n" +
|
|
30
|
+
"Actual timestamps:\n#{ats}"
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def timestamps(file_path)
|
|
34
|
+
MongoOplogBackup::Oplog.oplog_timestamps(file_path)
|
|
35
|
+
end
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
RSpec.configure do |config|
|
|
39
|
+
config.expect_with :rspec do |c|
|
|
40
|
+
c.syntax = :should
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
config.before(:each) do
|
|
44
|
+
FileUtils.mkdir_p 'spec-tmp'
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
config.after(:each) do
|
|
48
|
+
FileUtils.rm_rf 'spec-tmp'
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
require 'moped'
|
|
53
|
+
SESSION = Moped::Session.new([ "127.0.0.1:27017" ])
|
|
54
|
+
SESSION.use 'backup-test'
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
require 'spec_helper'
|
|
2
|
+
|
|
3
|
+
describe MongoOplogBackup::Ext::Timestamp do
|
|
4
|
+
it 'should be comparable' do
|
|
5
|
+
a = BSON::Timestamp.new(1408004593, 1)
|
|
6
|
+
b = BSON::Timestamp.new(1408004593, 2)
|
|
7
|
+
c = BSON::Timestamp.new(1408004594, 1)
|
|
8
|
+
(a <=> a).should == 0
|
|
9
|
+
(b <=> b).should == 0
|
|
10
|
+
(a <=> b).should == -1
|
|
11
|
+
(b <=> a).should == 1
|
|
12
|
+
(b <=> c).should == -1
|
|
13
|
+
(a <=> c).should == -1
|
|
14
|
+
(c <=> a).should == 1
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
it "should have a consistent hash and eql?" do
|
|
18
|
+
a1 = BSON::Timestamp.new(1408004593, 1)
|
|
19
|
+
a2 = BSON::Timestamp.new(1408004593, 1)
|
|
20
|
+
b = BSON::Timestamp.new(1408004593, 2)
|
|
21
|
+
c = BSON::Timestamp.new(1408004594, 1)
|
|
22
|
+
|
|
23
|
+
a1.hash.should == a2.hash
|
|
24
|
+
a1.hash.should_not == b.hash
|
|
25
|
+
a1.hash.should_not == c.hash
|
|
26
|
+
|
|
27
|
+
a1.eql?(a2).should == true
|
|
28
|
+
a1.eql?(b).should == false
|
|
29
|
+
a1.eql?(c).should == false
|
|
30
|
+
|
|
31
|
+
(a1 == a2).should == true
|
|
32
|
+
(a1 == b).should == false
|
|
33
|
+
(a1 == c).should == false
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
it 'should define from_json' do
|
|
37
|
+
json = {"t" => 1408004593, "i" => 20}
|
|
38
|
+
ts = BSON::Timestamp.from_json(json)
|
|
39
|
+
ts.seconds.should == 1408004593
|
|
40
|
+
ts.increment.should == 20
|
|
41
|
+
ts.as_json.should == json
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
it 'should define to_s' do
|
|
45
|
+
ts = BSON::Timestamp.new(1408004593, 2)
|
|
46
|
+
ts.to_s.should == '1408004593:2'
|
|
47
|
+
"#{ts}".should == '1408004593:2'
|
|
48
|
+
end
|
|
49
|
+
end
|
metadata
ADDED
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: mongo-oplog-backup
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
version: 0.0.1
|
|
5
|
+
platform: ruby
|
|
6
|
+
authors:
|
|
7
|
+
- Ralf Kistner
|
|
8
|
+
autorequire:
|
|
9
|
+
bindir: bin
|
|
10
|
+
cert_chain: []
|
|
11
|
+
date: 2014-08-15 00:00:00.000000000 Z
|
|
12
|
+
dependencies:
|
|
13
|
+
- !ruby/object:Gem::Dependency
|
|
14
|
+
name: bson
|
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
|
16
|
+
requirements:
|
|
17
|
+
- - "~>"
|
|
18
|
+
- !ruby/object:Gem::Version
|
|
19
|
+
version: '2.3'
|
|
20
|
+
type: :runtime
|
|
21
|
+
prerelease: false
|
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
23
|
+
requirements:
|
|
24
|
+
- - "~>"
|
|
25
|
+
- !ruby/object:Gem::Version
|
|
26
|
+
version: '2.3'
|
|
27
|
+
- !ruby/object:Gem::Dependency
|
|
28
|
+
name: slop
|
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
|
30
|
+
requirements:
|
|
31
|
+
- - "~>"
|
|
32
|
+
- !ruby/object:Gem::Version
|
|
33
|
+
version: '3.6'
|
|
34
|
+
type: :runtime
|
|
35
|
+
prerelease: false
|
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
37
|
+
requirements:
|
|
38
|
+
- - "~>"
|
|
39
|
+
- !ruby/object:Gem::Version
|
|
40
|
+
version: '3.6'
|
|
41
|
+
- !ruby/object:Gem::Dependency
|
|
42
|
+
name: bundler
|
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
|
44
|
+
requirements:
|
|
45
|
+
- - "~>"
|
|
46
|
+
- !ruby/object:Gem::Version
|
|
47
|
+
version: '1.5'
|
|
48
|
+
type: :development
|
|
49
|
+
prerelease: false
|
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
51
|
+
requirements:
|
|
52
|
+
- - "~>"
|
|
53
|
+
- !ruby/object:Gem::Version
|
|
54
|
+
version: '1.5'
|
|
55
|
+
- !ruby/object:Gem::Dependency
|
|
56
|
+
name: rake
|
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
|
58
|
+
requirements:
|
|
59
|
+
- - ">="
|
|
60
|
+
- !ruby/object:Gem::Version
|
|
61
|
+
version: '0'
|
|
62
|
+
type: :development
|
|
63
|
+
prerelease: false
|
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
65
|
+
requirements:
|
|
66
|
+
- - ">="
|
|
67
|
+
- !ruby/object:Gem::Version
|
|
68
|
+
version: '0'
|
|
69
|
+
- !ruby/object:Gem::Dependency
|
|
70
|
+
name: rspec
|
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
|
72
|
+
requirements:
|
|
73
|
+
- - "~>"
|
|
74
|
+
- !ruby/object:Gem::Version
|
|
75
|
+
version: '3.0'
|
|
76
|
+
type: :development
|
|
77
|
+
prerelease: false
|
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
79
|
+
requirements:
|
|
80
|
+
- - "~>"
|
|
81
|
+
- !ruby/object:Gem::Version
|
|
82
|
+
version: '3.0'
|
|
83
|
+
- !ruby/object:Gem::Dependency
|
|
84
|
+
name: moped
|
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
|
86
|
+
requirements:
|
|
87
|
+
- - "~>"
|
|
88
|
+
- !ruby/object:Gem::Version
|
|
89
|
+
version: '2.0'
|
|
90
|
+
type: :development
|
|
91
|
+
prerelease: false
|
|
92
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
93
|
+
requirements:
|
|
94
|
+
- - "~>"
|
|
95
|
+
- !ruby/object:Gem::Version
|
|
96
|
+
version: '2.0'
|
|
97
|
+
description: Periodically backup new sections of the oplog for incremental backups.
|
|
98
|
+
email:
|
|
99
|
+
- ralf@journeyapps.com
|
|
100
|
+
executables:
|
|
101
|
+
- mongo-oplog-backup
|
|
102
|
+
extensions: []
|
|
103
|
+
extra_rdoc_files: []
|
|
104
|
+
files:
|
|
105
|
+
- ".gitignore"
|
|
106
|
+
- ".rspec"
|
|
107
|
+
- ".ruby-version"
|
|
108
|
+
- ".travis.yml"
|
|
109
|
+
- Gemfile
|
|
110
|
+
- LICENSE.txt
|
|
111
|
+
- README.md
|
|
112
|
+
- Rakefile
|
|
113
|
+
- bin/mongo-oplog-backup
|
|
114
|
+
- lib/mongo_oplog_backup.rb
|
|
115
|
+
- lib/mongo_oplog_backup/backup.rb
|
|
116
|
+
- lib/mongo_oplog_backup/config.rb
|
|
117
|
+
- lib/mongo_oplog_backup/ext/enumerable.rb
|
|
118
|
+
- lib/mongo_oplog_backup/ext/timestamp.rb
|
|
119
|
+
- lib/mongo_oplog_backup/oplog.rb
|
|
120
|
+
- lib/mongo_oplog_backup/version.rb
|
|
121
|
+
- mongo-oplog-backup.gemspec
|
|
122
|
+
- oplog-last-timestamp.js
|
|
123
|
+
- spec/backup_spec.rb
|
|
124
|
+
- spec/enumerable_spec.rb
|
|
125
|
+
- spec/fixtures/oplog-1408088734:1-1408088740:1.bson
|
|
126
|
+
- spec/fixtures/oplog-1408088740:1-1408088810:1.bson
|
|
127
|
+
- spec/fixtures/oplog-1408088810:1-1408088928:1.bson
|
|
128
|
+
- spec/fixtures/oplog-merged.bson
|
|
129
|
+
- spec/oplog_spec.rb
|
|
130
|
+
- spec/spec_helper.rb
|
|
131
|
+
- spec/timestamp_spec.rb
|
|
132
|
+
homepage: ''
|
|
133
|
+
licenses:
|
|
134
|
+
- MIT
|
|
135
|
+
metadata: {}
|
|
136
|
+
post_install_message:
|
|
137
|
+
rdoc_options: []
|
|
138
|
+
require_paths:
|
|
139
|
+
- lib
|
|
140
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
141
|
+
requirements:
|
|
142
|
+
- - ">="
|
|
143
|
+
- !ruby/object:Gem::Version
|
|
144
|
+
version: '0'
|
|
145
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
146
|
+
requirements:
|
|
147
|
+
- - ">="
|
|
148
|
+
- !ruby/object:Gem::Version
|
|
149
|
+
version: '0'
|
|
150
|
+
requirements: []
|
|
151
|
+
rubyforge_project:
|
|
152
|
+
rubygems_version: 2.2.2
|
|
153
|
+
signing_key:
|
|
154
|
+
specification_version: 4
|
|
155
|
+
summary: Incremental backups for MongoDB using the oplog.
|
|
156
|
+
test_files:
|
|
157
|
+
- spec/backup_spec.rb
|
|
158
|
+
- spec/enumerable_spec.rb
|
|
159
|
+
- spec/fixtures/oplog-1408088734:1-1408088740:1.bson
|
|
160
|
+
- spec/fixtures/oplog-1408088740:1-1408088810:1.bson
|
|
161
|
+
- spec/fixtures/oplog-1408088810:1-1408088928:1.bson
|
|
162
|
+
- spec/fixtures/oplog-merged.bson
|
|
163
|
+
- spec/oplog_spec.rb
|
|
164
|
+
- spec/spec_helper.rb
|
|
165
|
+
- spec/timestamp_spec.rb
|