elasticsearch_s3_backup 1.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +9 -0
- data/.rspec +2 -0
- data/.travis.yml +4 -0
- data/Gemfile +4 -0
- data/LICENSE.txt +21 -0
- data/README.md +48 -0
- data/Rakefile +6 -0
- data/bin/console +14 -0
- data/bin/setup +7 -0
- data/elasticsearch_s3_backup.gemspec +40 -0
- data/exe/es_s3_backup +4 -0
- data/lib/elasticsearch_s3_backup.rb +316 -0
- data/lib/elasticsearch_s3_backup/version.rb +5 -0
- metadata +172 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 1a2652e8865ae1bae268503eacb635d2ce586592
|
4
|
+
data.tar.gz: 11c55edfc00bdc8abb85aff511c939fe0eb4d9e6
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: e7e337aa678f99dba4ff872761039f15fa511c6d7246d955f562d52a46889e83bd0789ae15ef841419c6f2f8f424c0a757b1c829c0992683050fdf077a85f8fb
|
7
|
+
data.tar.gz: c27135eab4c5871cee1d48b0ed6f12f516040bf32049d6ac29dd017db3816223f14d1c91d4286052d03b68abfca4fc04ebe4fb259ad27737441396c024975234
|
data/.gitignore
ADDED
data/.rspec
ADDED
data/.travis.yml
ADDED
data/Gemfile
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
The MIT License (MIT)
|
2
|
+
|
3
|
+
Copyright (c) 2015 Eric Herot
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in
|
13
|
+
all copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
21
|
+
THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,48 @@
|
|
1
|
+
# ElasticsearchS3Backup
|
2
|
+
|
3
|
+
This gem creates a backup of your ElasticSearch cache and uploads it to S3.
|
4
|
+
|
5
|
+
## Installation
|
6
|
+
|
7
|
+
Install it using:
|
8
|
+
|
9
|
+
$ gem install elasticsearch_s3_backup
|
10
|
+
|
11
|
+
## Configuration
|
12
|
+
|
13
|
+
Create a YAML file called `/etc/s3_backup.yml` and give it the following contents:
|
14
|
+
|
15
|
+
---
|
16
|
+
notification_email: @@NOTIFICATION_EMAIL_ADDRESS@@
|
17
|
+
test_size: 100
|
18
|
+
log: "/var/log/s3_backup/s3_backup.log"
|
19
|
+
new_repo_params:
|
20
|
+
bucket: @@BACKUPS_S3_BUCKET@@
|
21
|
+
max_snapshot_bytes_per_sec: 100mb
|
22
|
+
max_restore_bytes_per_sec: 500mb
|
23
|
+
env: stage
|
24
|
+
pagerduty_api_key: @@YOUR_PAGERDUTY_API_KEY@@
|
25
|
+
node_name: @@THIS_NODE'S_NAME@@
|
26
|
+
elasticsearch_auth_file: "/usr/local/elasticsearch/password"
|
27
|
+
cluster_name: @@YOUR_CLUSTER_NAME@@
|
28
|
+
|
29
|
+
## Usage
|
30
|
+
|
31
|
+
Just run the command:
|
32
|
+
|
33
|
+
$ es_s3_backup
|
34
|
+
|
35
|
+
## Development
|
36
|
+
|
37
|
+
After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
|
38
|
+
|
39
|
+
To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
|
40
|
+
|
41
|
+
## Contributing
|
42
|
+
|
43
|
+
Bug reports and pull requests are welcome on GitHub at https://github.com/evertrue/elasticsearch_s3_backup.
|
44
|
+
|
45
|
+
## License
|
46
|
+
|
47
|
+
The gem is available as open source under the terms of the [MIT License](http://opensource.org/licenses/MIT).
|
48
|
+
|
data/Rakefile
ADDED
data/bin/console
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require "bundler/setup"
|
4
|
+
require "elasticsearch_s3_backup"
|
5
|
+
|
6
|
+
# You can add fixtures and/or initialization code here to make experimenting
|
7
|
+
# with your gem easier. You can also use a different console, if you like.
|
8
|
+
|
9
|
+
# (If you use this, don't forget to add pry to your Gemfile!)
|
10
|
+
# require "pry"
|
11
|
+
# Pry.start
|
12
|
+
|
13
|
+
require "irb"
|
14
|
+
IRB.start
|
data/bin/setup
ADDED
@@ -0,0 +1,40 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
require 'elasticsearch_s3_backup/version'
|
5
|
+
|
6
|
+
Gem::Specification.new do |spec|
|
7
|
+
spec.name = 'elasticsearch_s3_backup'
|
8
|
+
spec.version = EverTools::ElasticsearchS3Backup::VERSION
|
9
|
+
spec.authors = ['Eric Herot']
|
10
|
+
spec.email = ['eric.github@herot.com']
|
11
|
+
|
12
|
+
spec.summary = 'Backs up ElasticSearch to S3'
|
13
|
+
spec.description = spec.description
|
14
|
+
spec.homepage = 'https://nerds.evertrue.com'
|
15
|
+
spec.license = 'MIT'
|
16
|
+
|
17
|
+
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
|
18
|
+
# delete this section to allow pushing this gem to any host.
|
19
|
+
if spec.respond_to?(:metadata)
|
20
|
+
spec.metadata['allowed_push_host'] = 'https://rubygems.org'
|
21
|
+
else
|
22
|
+
fail 'RubyGems 2.0 or newer is required to protect against public gem ' \
|
23
|
+
'pushes.'
|
24
|
+
end
|
25
|
+
|
26
|
+
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
|
27
|
+
spec.bindir = 'exe'
|
28
|
+
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
|
29
|
+
spec.require_paths = ['lib']
|
30
|
+
|
31
|
+
spec.add_development_dependency 'bundler', '~> 1.10'
|
32
|
+
spec.add_development_dependency 'rake', '~> 10.0'
|
33
|
+
spec.add_development_dependency 'rspec'
|
34
|
+
|
35
|
+
spec.add_dependency 'activesupport'
|
36
|
+
spec.add_dependency 'unirest'
|
37
|
+
spec.add_dependency 'faker'
|
38
|
+
spec.add_dependency 'pagerduty'
|
39
|
+
spec.add_dependency 'sentry-raven'
|
40
|
+
end
|
data/exe/es_s3_backup
ADDED
@@ -0,0 +1,316 @@
|
|
1
|
+
require 'elasticsearch_s3_backup/version'
|
2
|
+
require 'active_support/time'
|
3
|
+
require 'unirest'
|
4
|
+
require 'logger'
|
5
|
+
require 'faker'
|
6
|
+
require 'pagerduty'
|
7
|
+
require 'yaml'
|
8
|
+
require 'sentry-raven'
|
9
|
+
|
10
|
+
module EverTools
|
11
|
+
class ElasticsearchS3Backup
|
12
|
+
def conf
|
13
|
+
@conf ||= YAML.load_file('/etc/s3_backup.yml')
|
14
|
+
end
|
15
|
+
|
16
|
+
def pagerduty
|
17
|
+
@pagerduty ||= Pagerduty.new conf['pagerduty_api_key']
|
18
|
+
end
|
19
|
+
|
20
|
+
def configure_sentry!
|
21
|
+
Raven.configure { config.dsn = conf['sentry_dsn'] }
|
22
|
+
end
|
23
|
+
|
24
|
+
def auth
|
25
|
+
@auth ||= File.read(conf['elasticsearch_auth_file']).strip.split ':'
|
26
|
+
end
|
27
|
+
|
28
|
+
def initialize
|
29
|
+
Unirest.default_header 'Accept', 'application/json'
|
30
|
+
Unirest.default_header 'Content-Type', 'application/json'
|
31
|
+
Unirest.timeout 30
|
32
|
+
|
33
|
+
@url = 'http://localhost:9200'
|
34
|
+
|
35
|
+
@backup_index = 'backup_test'
|
36
|
+
@restore_index = 'restore_test'
|
37
|
+
|
38
|
+
now = Time.new.utc
|
39
|
+
@monthly = now.strftime '%m-%Y'
|
40
|
+
@datetime = now.strftime '%m-%d_%H%M'
|
41
|
+
|
42
|
+
@monthly_snap_url = "#{@url}/_snapshot/#{@monthly}"
|
43
|
+
|
44
|
+
@backup_timeout = 1.hours
|
45
|
+
end
|
46
|
+
|
47
|
+
def logger
|
48
|
+
# Set up logging
|
49
|
+
@logger ||= Logger.new(conf['log']).tap do |l|
|
50
|
+
l.level = Logger::INFO
|
51
|
+
l.progname = 's3_backup'
|
52
|
+
l.formatter =
|
53
|
+
proc do |severity, datetime, progname, msg|
|
54
|
+
"#{datetime.utc} [#{progname}] #{severity}: #{msg}\n"
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
# rubocop:disable Metrics/AbcSize
|
60
|
+
def es_api(method, uri, params = {})
|
61
|
+
tries = 3
|
62
|
+
begin
|
63
|
+
r = Unirest.send(method, uri, params)
|
64
|
+
case r.code
|
65
|
+
when 200..299
|
66
|
+
return r
|
67
|
+
when 400..499
|
68
|
+
logger.debug "#{method} request to #{uri} received #{r.code} (params: #{params.inspect})\n" \
|
69
|
+
"Body:\n" \
|
70
|
+
"#{r.body}\n"
|
71
|
+
return r
|
72
|
+
end
|
73
|
+
# byebug
|
74
|
+
fail "#{method.upcase} request to #{uri} failed (params: #{params.inspect})\n" \
|
75
|
+
"Response code: #{r.code}\n" \
|
76
|
+
"Body:\n" \
|
77
|
+
"#{r.body}\n"
|
78
|
+
rescue RuntimeError => e
|
79
|
+
tries -= 1
|
80
|
+
retry if e.message == 'Request Timeout' && tries > 0
|
81
|
+
raise e
|
82
|
+
end
|
83
|
+
end
|
84
|
+
# rubocop:enable Metrics/AbcSize
|
85
|
+
|
86
|
+
def master?
|
87
|
+
es_api(:get, "#{@url}/_cat/master").body[0]['node'] == conf['node_name']
|
88
|
+
end
|
89
|
+
|
90
|
+
# Check if an index exists
|
91
|
+
def index?(uri)
|
92
|
+
es_api(:get, "#{@url}/#{uri}").code == 200
|
93
|
+
end
|
94
|
+
|
95
|
+
# Check if a backup repo exists
|
96
|
+
def repo?
|
97
|
+
es_api(:get, @monthly_snap_url).code == 200
|
98
|
+
end
|
99
|
+
|
100
|
+
def notify(e)
|
101
|
+
pagerduty.trigger(
|
102
|
+
'prod Elasticsearch S3 failed',
|
103
|
+
client: conf['node_name'],
|
104
|
+
details: "#{e.message}\n\n#{e.backtrace}"
|
105
|
+
)
|
106
|
+
end
|
107
|
+
|
108
|
+
def insert_test_data
|
109
|
+
# Generate some test data using Faker
|
110
|
+
#
|
111
|
+
# Uses Bitcoin addresses for their random, hash-like nature
|
112
|
+
# Creates the `backup_test` index if necessary
|
113
|
+
# Updates the set of `dummy` documents in the `backup_test` on every run
|
114
|
+
|
115
|
+
logger.info 'Generating test data using Faker…'
|
116
|
+
conf['test_size'].times do |i|
|
117
|
+
es_api(
|
118
|
+
:put,
|
119
|
+
"#{@url}/#{@backup_index}/dummy/#{i}",
|
120
|
+
parameters: {
|
121
|
+
test_value: Faker::Bitcoin.address
|
122
|
+
}.to_json
|
123
|
+
)
|
124
|
+
end
|
125
|
+
end
|
126
|
+
|
127
|
+
def delete_index(index)
|
128
|
+
logger.info "Deleting index: #{index}"
|
129
|
+
es_api(
|
130
|
+
:delete,
|
131
|
+
"#{@url}/#{index}",
|
132
|
+
auth: { user: auth.first, password: auth.last }
|
133
|
+
)
|
134
|
+
end
|
135
|
+
|
136
|
+
def create_repo
|
137
|
+
new_repo_params = conf['new_repo_params'].merge(
|
138
|
+
type: 's3',
|
139
|
+
settings: {
|
140
|
+
base_path: "/elasticsearch/#{cluster_name}/#{conf['env']}/#{@monthly}",
|
141
|
+
server_side_encryption: true
|
142
|
+
}
|
143
|
+
)
|
144
|
+
|
145
|
+
logger.info 'Creating a new monthly ES backup repo…'
|
146
|
+
es_api(
|
147
|
+
:put,
|
148
|
+
@monthly_snap_url,
|
149
|
+
parameters: new_repo_params.to_json
|
150
|
+
)
|
151
|
+
end
|
152
|
+
|
153
|
+
def valid_date?(date)
|
154
|
+
Time.strptime(date, '%m-%Y') rescue false
|
155
|
+
end
|
156
|
+
|
157
|
+
def dated_repos
|
158
|
+
es_api(:get, "#{@url}/_snapshot").body.keys.select { |r| valid_date? r }
|
159
|
+
end
|
160
|
+
|
161
|
+
def remove_expired_backups
|
162
|
+
# Remove 3 month old repos
|
163
|
+
logger.info "Removing backups older than #{3.months.ago.strftime '%m-%Y'}"
|
164
|
+
dated_repos.select { |b| Time.strptime(b, '%m-%Y') < 3.months.ago }.each do |repo|
|
165
|
+
logger.info "Removing #{repo}"
|
166
|
+
es_api :delete, "#{@url}/_snapshot/#{repo}"
|
167
|
+
end
|
168
|
+
end
|
169
|
+
|
170
|
+
# rubocop:disable Metrics/AbcSize
|
171
|
+
def snapshot
|
172
|
+
backup_uri = "#{@monthly_snap_url}/#{@datetime}"
|
173
|
+
status_req = es_api :get, "#{backup_uri}/_status"
|
174
|
+
if status_req.code == 404 ||
|
175
|
+
status_req.body['snapshots'].empty?
|
176
|
+
fail "Could not find the backup I just created (#{backup_uri})"
|
177
|
+
end
|
178
|
+
snapshot = status_req.body['snapshots'].first
|
179
|
+
logger.info "Backup state: #{snapshot['state']} " \
|
180
|
+
"(finished shard #{snapshot['shards_stats']['done']} of " \
|
181
|
+
"#{snapshot['shards_stats']['total']})"
|
182
|
+
end
|
183
|
+
# rubocop:enable Metrics/AbcSize
|
184
|
+
|
185
|
+
def backup_complete?
|
186
|
+
case snapshot['state']
|
187
|
+
when 'IN_PROGRESS', 'STARTED'
|
188
|
+
return false
|
189
|
+
when 'SUCCESS'
|
190
|
+
return true
|
191
|
+
end
|
192
|
+
fail "Backup failed!\n" \
|
193
|
+
"State: #{snapshot['state']}\n" \
|
194
|
+
"Response Body: #{status_req.body}"
|
195
|
+
end
|
196
|
+
|
197
|
+
def verify_create!
|
198
|
+
# Check the status of the backup for up to an hour
|
199
|
+
backup_start_time = Time.now.utc
|
200
|
+
until Time.now.utc > (backup_start_time + @backup_timeout)
|
201
|
+
return true if backup_complete?
|
202
|
+
# Don't hammer the status endpoint
|
203
|
+
sleep 15
|
204
|
+
end
|
205
|
+
|
206
|
+
fail 'Create timed out'
|
207
|
+
end
|
208
|
+
|
209
|
+
def make_new_backup
|
210
|
+
# Make a backup (full on new month, incremental otherwise)
|
211
|
+
logger.info "Starting a new backup (#{@monthly_snap_url}/#{@datetime})…"
|
212
|
+
es_api :put, "#{@monthly_snap_url}/#{@datetime}"
|
213
|
+
|
214
|
+
# Give the new backup time to show up
|
215
|
+
sleep 5
|
216
|
+
|
217
|
+
verify_create!
|
218
|
+
end
|
219
|
+
|
220
|
+
def restore_test_index
|
221
|
+
# Restore just the backup_test index to a new index
|
222
|
+
logger.info 'Restoring the backup_test index…'
|
223
|
+
es_api(
|
224
|
+
:post,
|
225
|
+
"#{@monthly_snap_url}/#{@datetime}/_restore",
|
226
|
+
parameters: {
|
227
|
+
indices: @backup_index,
|
228
|
+
rename_pattern: @backup_index,
|
229
|
+
rename_replacement: @restore_index
|
230
|
+
}.to_json
|
231
|
+
)
|
232
|
+
verify_restored_index!
|
233
|
+
end
|
234
|
+
|
235
|
+
def index_shards(index)
|
236
|
+
r = es_api(:get, "#{@url}/#{index}/_status")
|
237
|
+
fail "Index #{index} not found" if r.code == 404
|
238
|
+
r.body.fetch('indices', {}).fetch(index, {})['shards']
|
239
|
+
end
|
240
|
+
|
241
|
+
def index_online?(index)
|
242
|
+
shards = index_shards(index)
|
243
|
+
shards && shards.select { |_k, v| v.find { |n| n['state'] != 'STARTED' } }.empty?
|
244
|
+
end
|
245
|
+
|
246
|
+
def index_item(index, id)
|
247
|
+
es_api(:get, "#{@url}/#{index}/dummy/#{id}").body
|
248
|
+
end
|
249
|
+
|
250
|
+
def wait_for_index(index)
|
251
|
+
until index_online? index
|
252
|
+
logger.info 'Waiting for restored index to be available…'
|
253
|
+
sleep 1
|
254
|
+
end
|
255
|
+
end
|
256
|
+
|
257
|
+
def compare_index_item!(i)
|
258
|
+
# Loop until the restored version is available
|
259
|
+
until index_item(@restore_index, i)['found']
|
260
|
+
logger.info 'Waiting for restored index to be available…'
|
261
|
+
sleep 1
|
262
|
+
end
|
263
|
+
|
264
|
+
backup_item = index_item(@backup_index, i)['_source']['test_value']
|
265
|
+
restore_item = index_item(@restore_index, i)['_source']['test_value']
|
266
|
+
|
267
|
+
(backup_item == restore_item) ||
|
268
|
+
fail("Item #{i} in test restore doesn’t match.\n" \
|
269
|
+
"Original: #{backup_item}\n" \
|
270
|
+
"Restored: #{restore_item}")
|
271
|
+
end
|
272
|
+
|
273
|
+
def verify_restored_index!
|
274
|
+
# Compare each doc in the original backup_test index to the restored index
|
275
|
+
|
276
|
+
logger.info "Verifying the newly-restored #{@backup_index}…"
|
277
|
+
wait_for_index @restore_index
|
278
|
+
|
279
|
+
conf['test_size'].times { |i| compare_index_item! i }
|
280
|
+
|
281
|
+
logger.info 'Successfully verified the test data!'
|
282
|
+
end
|
283
|
+
|
284
|
+
# rubocop:disable Metrics/AbcSize, Lint/RescueException
|
285
|
+
def run
|
286
|
+
unless master?
|
287
|
+
logger.info 'This node is not the currently elected master, aborting ' \
|
288
|
+
'backup.'
|
289
|
+
exit 0
|
290
|
+
end
|
291
|
+
|
292
|
+
configure_sentry!
|
293
|
+
|
294
|
+
# Remove the previous `restore_test` index, to avoid a race condition
|
295
|
+
# with checking the restored copy of this index
|
296
|
+
delete_index @restore_index if index? @restore_index
|
297
|
+
|
298
|
+
insert_test_data
|
299
|
+
|
300
|
+
# Create a new repo if none exists (typically at beginning of month)
|
301
|
+
create_repo unless repo?
|
302
|
+
make_new_backup
|
303
|
+
restore_test_index
|
304
|
+
|
305
|
+
remove_expired_backups
|
306
|
+
logger.info 'Finished'
|
307
|
+
rescue Exception => e # Need to rescue "Exception" so that Sentry gets it
|
308
|
+
notify e if conf['env'] == 'prod'
|
309
|
+
logger.fatal e.message
|
310
|
+
logger.fatal e.backtrace
|
311
|
+
Raven.capture_exception(e)
|
312
|
+
raise e
|
313
|
+
end
|
314
|
+
# rubocop:enable Metrics/AbcSize, Lint/RescueException
|
315
|
+
end
|
316
|
+
end
|
metadata
ADDED
@@ -0,0 +1,172 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: elasticsearch_s3_backup
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 1.0.1
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Eric Herot
|
8
|
+
autorequire:
|
9
|
+
bindir: exe
|
10
|
+
cert_chain: []
|
11
|
+
date: 2015-08-24 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: bundler
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '1.10'
|
20
|
+
type: :development
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - "~>"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '1.10'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: rake
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '10.0'
|
34
|
+
type: :development
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '10.0'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: rspec
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - ">="
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0'
|
48
|
+
type: :development
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - ">="
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: activesupport
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - ">="
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '0'
|
62
|
+
type: :runtime
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - ">="
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '0'
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: unirest
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - ">="
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '0'
|
76
|
+
type: :runtime
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - ">="
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '0'
|
83
|
+
- !ruby/object:Gem::Dependency
|
84
|
+
name: faker
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - ">="
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: '0'
|
90
|
+
type: :runtime
|
91
|
+
prerelease: false
|
92
|
+
version_requirements: !ruby/object:Gem::Requirement
|
93
|
+
requirements:
|
94
|
+
- - ">="
|
95
|
+
- !ruby/object:Gem::Version
|
96
|
+
version: '0'
|
97
|
+
- !ruby/object:Gem::Dependency
|
98
|
+
name: pagerduty
|
99
|
+
requirement: !ruby/object:Gem::Requirement
|
100
|
+
requirements:
|
101
|
+
- - ">="
|
102
|
+
- !ruby/object:Gem::Version
|
103
|
+
version: '0'
|
104
|
+
type: :runtime
|
105
|
+
prerelease: false
|
106
|
+
version_requirements: !ruby/object:Gem::Requirement
|
107
|
+
requirements:
|
108
|
+
- - ">="
|
109
|
+
- !ruby/object:Gem::Version
|
110
|
+
version: '0'
|
111
|
+
- !ruby/object:Gem::Dependency
|
112
|
+
name: sentry-raven
|
113
|
+
requirement: !ruby/object:Gem::Requirement
|
114
|
+
requirements:
|
115
|
+
- - ">="
|
116
|
+
- !ruby/object:Gem::Version
|
117
|
+
version: '0'
|
118
|
+
type: :runtime
|
119
|
+
prerelease: false
|
120
|
+
version_requirements: !ruby/object:Gem::Requirement
|
121
|
+
requirements:
|
122
|
+
- - ">="
|
123
|
+
- !ruby/object:Gem::Version
|
124
|
+
version: '0'
|
125
|
+
description: ''
|
126
|
+
email:
|
127
|
+
- eric.github@herot.com
|
128
|
+
executables:
|
129
|
+
- es_s3_backup
|
130
|
+
extensions: []
|
131
|
+
extra_rdoc_files: []
|
132
|
+
files:
|
133
|
+
- ".gitignore"
|
134
|
+
- ".rspec"
|
135
|
+
- ".travis.yml"
|
136
|
+
- Gemfile
|
137
|
+
- LICENSE.txt
|
138
|
+
- README.md
|
139
|
+
- Rakefile
|
140
|
+
- bin/console
|
141
|
+
- bin/setup
|
142
|
+
- elasticsearch_s3_backup.gemspec
|
143
|
+
- exe/es_s3_backup
|
144
|
+
- lib/elasticsearch_s3_backup.rb
|
145
|
+
- lib/elasticsearch_s3_backup/version.rb
|
146
|
+
homepage: https://nerds.evertrue.com
|
147
|
+
licenses:
|
148
|
+
- MIT
|
149
|
+
metadata:
|
150
|
+
allowed_push_host: https://rubygems.org
|
151
|
+
post_install_message:
|
152
|
+
rdoc_options: []
|
153
|
+
require_paths:
|
154
|
+
- lib
|
155
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
156
|
+
requirements:
|
157
|
+
- - ">="
|
158
|
+
- !ruby/object:Gem::Version
|
159
|
+
version: '0'
|
160
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
161
|
+
requirements:
|
162
|
+
- - ">="
|
163
|
+
- !ruby/object:Gem::Version
|
164
|
+
version: '0'
|
165
|
+
requirements: []
|
166
|
+
rubyforge_project:
|
167
|
+
rubygems_version: 2.4.5
|
168
|
+
signing_key:
|
169
|
+
specification_version: 4
|
170
|
+
summary: Backs up ElasticSearch to S3
|
171
|
+
test_files: []
|
172
|
+
has_rdoc:
|