elastic-util 0.1.3 → 0.1.8

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
- SHA1:
3
- metadata.gz: 86b2acc63f7a0ca1b6b9664c0f88f124e87e93cd
4
- data.tar.gz: c96da7fa8d85ba9c2c3555d61165e4804ef6b6ea
2
+ SHA256:
3
+ metadata.gz: b30dc1de09489fede6a09227ff9a8a292f416fdcf8e9eb041bcb46c08cbd6767
4
+ data.tar.gz: 5b604aafcae78e703530acec175bf1235178cb784711c2bcb8a665b9849c7792
5
5
  SHA512:
6
- metadata.gz: f34401fb4c4e3b103a1943fc6a5911a0ae8f6289679c86c403c4737fb60c62501cfc7d563b92ec44dc2d8be48136e8a2f35550f53f0cc5928e3b9097827d95b9
7
- data.tar.gz: ca82e26dc8f73e6ef7437655ded818727a65934797b94f79500db39021ed453362f5fa793ecb4c607721e30a173d5bb367e2311239f9dd53e2eb56de04862c23
6
+ metadata.gz: 4d4f94be2ca1d450ea260ab4d6f8450e8dfb8025f52f72281a932dc33480fdc0a965175166d9aa50000d386a59c8b65347b02aac6472e68432f663798f2d2e0b
7
+ data.tar.gz: '0029396b7a2bc8960f10532a8bdec303e4e3e813c8acbbab795c4dffbaf0e36cb707cf311acd363a5c34237b3ce7ea0454a37c1d261d491cc6b6919a64c6da8e'
@@ -0,0 +1,14 @@
1
+ # History
2
+
3
+ This is a changelog for the changes made to the `elastic-util` gem.
4
+
5
+ ### v0.1.8
6
+ * improved `--help` output and error messages
7
+
8
+ ### v0.1.7
9
+ * basic auth support inside the url argument using the format username:password@url
10
+ * https support
11
+
12
+ ### v0.1.6
13
+ * New backup option --replace-types
14
+
@@ -3,17 +3,25 @@ require 'elastic-util'
3
3
  require 'optparse'
4
4
 
5
5
  prog_name = "elastic-util" # $0.split('/').last
6
- usage = "Usage: #{prog_name} [backup|restore] [options]"
6
+ prog_usage = "usage: #{prog_name} [backup|restore] [url] [directory] [options]"
7
7
  args = ARGV.dup
8
8
  command_name = args.shift
9
9
 
10
10
  case command_name
11
11
 
12
12
  when "backup"
13
-
13
+ banner = <<-EOT
14
+ usage: #{prog_name} #{command_name} [url] [directory] [options]
15
+ EOT
16
+ footer = <<-EOT
17
+ The backup command provides way to dump Elasticsearch index data.
18
+ The /_search API is used to paginate requests for each index being exported.
19
+ The data is written to specified directory, with a file for each api request made.
20
+ Example: #{prog_name} #{command_name} http://localhost:9300 /tmp/local-elastic-data
21
+ EOT
14
22
  options = {}
15
23
  optparse = OptionParser.new do |opts|
16
- opts.banner = "Usage: #{prog_name} #{command_name} [url] [directory] [options]"
24
+ opts.banner = banner
17
25
  opts.on('--indices x,y,z', Array, "The indices to backup. Default is all.") do |val|
18
26
  options[:indices] = val.collect {|it| it.strip }
19
27
  end
@@ -23,6 +31,15 @@ when "backup"
23
31
  opts.on('--exclude-fields x,y,z', Array, "The fields to exclude from backup. Default is '_id'.") do |val|
24
32
  options[:exclude_fields] = val.collect {|it| it.strip }
25
33
  end
34
+ opts.on('--replace-types type1:_doc,type2:_doc', Array, "Replace certain types with a different type.") do |val|
35
+ options[:replace_types] = {}
36
+ val.each do |it|
37
+ pair = it.split(":").collect {|p| p.strip }
38
+ if pair.size == 2
39
+ options[:replace_types][pair[0]] = pair[1]
40
+ end
41
+ end
42
+ end
26
43
  opts.on( '-s', '--size NUMBER', "The size api parameter. This dicates the size of the files and api payloads. Default is 1000." ) do |val|
27
44
  options[:size] = val.to_i
28
45
  end
@@ -35,27 +52,24 @@ when "backup"
35
52
  opts.on( '-q', '--quiet', "Don't print to stdout. Default is false." ) do |val|
36
53
  options[:quiet] = true
37
54
  end
55
+ opts.on( '-d', '--dry-run', "Don't actually backup data, just print what would have happened. Default is false." ) do |val|
56
+ options[:dry] = true
57
+ end
38
58
  opts.on('-h', '--help', "Prints this help" ) do
39
- puts opts
40
- exit
59
+ puts opts, footer
60
+ exit 0
41
61
  end
42
62
  end
43
63
  optparse.parse!(args)
44
- url = args[0]
45
- backup_directory = args[1]
46
- if url.nil? || url.empty?
47
- $stderr.puts "#{prog_name}: missing required argument [url]"
48
- $stderr.puts optparse
49
- exit 1
50
- end
51
- if backup_directory.nil? || backup_directory.empty?
52
- $stderr.puts "#{prog_name}: missing required argument [directory]"
53
- $stderr.puts optparse
64
+ if args.count != 2
65
+ $stderr.puts "#{prog_name}: wrong number of arguments. Expected 2 and got #{args.count}: #{args.join(', ')}"
66
+ # $stderr.puts optparse
67
+ $stderr.puts banner
68
+ $stderr.puts "See `#{prog_name} #{command_name} --help` for more usage information."
54
69
  exit 1
55
70
  end
56
-
57
71
  begin
58
- result = ElasticUtil.backup(url, backup_directory, options)
72
+ result = ElasticUtil.backup(args[0], args[1], options)
59
73
  exit 0
60
74
  rescue ElasticUtil::Error => err
61
75
  $stderr.puts "#{prog_name}: #{err.message}"
@@ -65,34 +79,39 @@ when "backup"
65
79
 
66
80
 
67
81
  when "restore"
68
-
82
+ banner = <<-EOT
83
+ usage: #{prog_name} #{command_name} [url] [directory] [options]
84
+ EOT
85
+ footer = <<-EOT
86
+ The restore command provides way to restore Elasticsearch index data.
87
+ The /_bulk API is used to upload index data from the specified directory.
88
+ The directory should contain data generated by the `#{prog_name} backup` command.
89
+ e.g. #{prog_name} #{command_name} http://localhost:9400 /tmp/local-elastic-data
90
+ EOT
69
91
  options = {}
70
92
  optparse = OptionParser.new do |opts|
71
- opts.banner = "Usage: #{prog_name} #{command_name} [url] [directory] [options]"
93
+ opts.banner = banner
72
94
  opts.on( '-q', '--quiet', "Don't print to stdout. Default is false." ) do |val|
73
95
  options[:quiet] = true
74
96
  end
97
+ opts.on( '-d', '--dry-run', "Don't actually restore data, just print what would have happened. Default is false." ) do |val|
98
+ options[:dry] = true
99
+ end
75
100
  opts.on('-h', '--help', "Prints this help" ) do
76
- puts opts
77
- exit
101
+ puts opts, footer
102
+ exit 0
78
103
  end
79
104
  end
80
105
  optparse.parse!(args)
81
- url = args[0]
82
- backup_directory = args[1]
83
- if url.nil? || url.empty?
84
- $stderr.puts "#{prog_name}: missing required argument [url]"
85
- $stderr.puts optparse
86
- exit 1
87
- end
88
- if backup_directory.nil? || backup_directory.empty?
89
- $stderr.puts "#{prog_name}: missing required argument [directory]"
90
- $stderr.puts optparse
106
+ if args.count != 2
107
+ $stderr.puts "#{prog_name}: wrong number of arguments. Expected 2 and got #{args.count}: #{args.join(', ')}"
108
+ # $stderr.puts optparse
109
+ $stderr.puts banner
110
+ $stderr.puts "See `#{prog_name} #{command_name} --help` for more usage information."
91
111
  exit 1
92
112
  end
93
-
94
113
  begin
95
- result = ElasticUtil.restore(url, backup_directory, options)
114
+ result = ElasticUtil.restore(args[0], args[1], options)
96
115
  exit 0
97
116
  rescue ElasticUtil::Error => err
98
117
  $stderr.puts "#{prog_name}: #{err.message}"
@@ -100,10 +119,28 @@ when "restore"
100
119
  exit 1
101
120
  end
102
121
 
103
- when "-v"
122
+ when "-h", "--help"
123
+ puts prog_usage
124
+ puts <<-EOT
125
+ The elastic-util command provides way to dump and restore Elasticsearch index data.
126
+ Example: #{prog_name} backup http://localhost:9300 /tmp/local-elastic-data
127
+ #{prog_name} restore http://localhost:9400 /tmp/local-elastic-data
128
+ EOT
129
+ exit 0
130
+ when "-v","--version","version"
104
131
  puts ElasticUtil::VERSION
105
- else
106
- $stderr.puts usage
132
+ when "", nil
133
+ $stderr.puts "#{prog_name}: missing required argument [backup|restore]"
134
+ # $stderr.puts "Commands:"
135
+ # $stderr.puts "\tbackup"
136
+ # $stderr.puts "\trestore"
137
+ $stderr.puts prog_usage
138
+ $stderr.puts "See `#{prog_name} #{command_name} --help` for more usage information."
107
139
  exit 1
140
+ else
141
+ # $stderr.puts "#{prog_name}: '#{command_name}' is not a recognized elastic-util command. See `#{prog_name} --help`"
142
+ $stderr.puts "#{prog_name}: '#{command_name}' is not a recognized elastic-util command."
143
+ $stderr.puts prog_usage
144
+ exit 3
108
145
  end
109
146
 
@@ -9,7 +9,7 @@ Gem::Specification.new do |spec|
9
9
  spec.authors = ["James Dickson"]
10
10
  spec.email = ["dickson.james@gmail.com"]
11
11
  spec.summary = "Provides backup and restore for ElasticSearch data"
12
- spec.description = "ElasticUtil uses ElasticSearch's scroll and _bulk APIs to dump and restore indices"
12
+ spec.description = "ElasticUtil uses ElasticSearch's /_search and /_bulk APIs to dump and restore indices"
13
13
  #spec.homepage = "http://www.elastic-util.com"
14
14
  spec.license = "MIT"
15
15
 
@@ -1,18 +1,19 @@
1
1
  # encoding: utf-8
2
2
  require 'net/http'
3
+ require 'openssl'
3
4
  require 'json'
4
5
  require 'fileutils'
5
6
 
6
- # This module provides a way to backup and restore elasticsearch data.
7
+ # This module provides a way to backup and restore Elasticsearch data.
7
8
  #
8
9
  # @example Backup data from one elasticsearch cluster and restore it to another.
9
10
  #
10
- # ElasticUtil.backup('http://localhost:9200', '/tmp/mybackup', {size:5000})
11
- # ElasticUtil.restore('http://localhost:9201', '/tmp/mybackup')
11
+ # ElasticUtil.backup('http://localhost:9300', '/tmp/local-elastic-data', {size:5000})
12
+ # ElasticUtil.restore('http://localhost:9301', '/tmp/local-elastic-data')
12
13
  #
13
14
  module ElasticUtil
14
15
 
15
- VERSION = "0.1.3"
16
+ VERSION = "0.1.8"
16
17
 
17
18
  # The name of the data directory, relative to the user provided backup directory.
18
19
  DUMP_DIR = "es_data"
@@ -20,29 +21,29 @@ module ElasticUtil
20
21
  # A class to be raised for any known error condition.
21
22
  class Error < StandardError; end
22
23
 
23
- # Backup elasticsearch data to a local directory.
24
+ # Backup Elasticsearch data to a local directory.
24
25
  #
25
26
  # This uses ElasticSearch's scroll api to fetch all records for indices
26
27
  # and write the data to a local directory. The files it generates are given a
27
28
  # .json.data extension. They are not valid JSON files, but rather are in the
28
29
  # format expected by ElasticSearch's _bulk api.
29
30
  #
30
- # So #restore simply has to POST the contents of each file.
31
+ # So #restore simply has to POST the contents of each file as Content-Type: application/x-ndjson
31
32
  #
32
- # Use the :size option to change the number or results to fetch at once,
33
- # and also the size of the data files generated.
34
- # The latter correlates to the of the the api requests made in #restore.
33
+ # Use the :size option to change the number of results to fetch at once and also the size of the data files generated.
34
+ # Increasing size means larger files and fewer api requests for both #backup and the subsequent #restore of that data.
35
35
  #
36
- # @example Backup default elasticsearch running locally.
36
+ # @example Backup default Elasticsearch running locally.
37
37
  #
38
- # ElasticUtil.backup('http://localhost:9200', '/tmp/mybackup')
38
+ # ElasticUtil.backup('http://localhost:9300', '/tmp/local-elastic-data')
39
39
  #
40
- # @param [String] url The url of the elasticsearch cluster eg. 'http://localhost:9200'
40
+ # @param [String] url The url of the Elasticsearch cluster eg. 'http://localhost:9300'
41
41
  # @param [String] backup_dir The local directory to store data in. eg. '/tmp/es2.4'
42
42
  # @param [Hash] opts The options for this backup.
43
43
  # @option opts [Array] :indices The indices to backup. Default is all.
44
44
  # @option opts [Array] :exclude_indices Exclude certain indexes.
45
45
  # @option opts [Array] :exclude_fields Exclude certain fields. Default is ['_id'].
46
+ # @option opts [Array] :replace_types Replace certain types with a different type, separated by a colon. eg. 'type1:type2' or 'stat:_doc'
46
47
  # @option opts [String] :scroll The scroll api parameter, Default is '5m'.
47
48
  # @option opts [Integer] :size The size api parameter. Default is 1000.
48
49
  # @option opts [true] :force Delete existing backup directory instead of erroring. Default is false.
@@ -52,29 +53,32 @@ module ElasticUtil
52
53
  #
53
54
  def self.backup(url, backup_dir, opts={})
54
55
  start_time = Time.now
56
+ url = url.strip.chomp("/")
55
57
  backup_dir = backup_dir.strip
56
58
  path = File.join(backup_dir.strip, DUMP_DIR)
57
59
  indices = []
58
-
60
+
61
+ if opts[:dry]
62
+ puts "(DRY RUN) Started backup" unless opts[:quiet]
63
+ else
64
+ puts "Started backup" unless opts[:quiet]
65
+ end
66
+
59
67
  # ping it first
68
+ response = nil
60
69
  uri = URI(url)
61
- response = Net::HTTP.get_response(uri)
62
- http = Net::HTTP.new(uri.host, uri.port)
63
- http.read_timeout = 5
64
- http.open_timeout = 5
65
- response = http.start() {|http|
66
- http.get("/")
67
- }
70
+ response = api_get(uri)
68
71
  if !response.is_a?(Net::HTTPSuccess)
69
- raise Error, "Unable to reach elasticsearch at url '#{url}'!\n#{response.inspect}\n#{response.body.to_s}"
72
+ raise Error, "Unable to reach Elasticsearch at url '#{url}'!\n#{response.inspect}\n#{response.body.to_s}"
70
73
  end
71
74
 
72
75
  # determine indices to backup, default is everything.
73
76
  if opts[:indices]
74
77
  indices = opts[:indices]
75
78
  else
79
+ response = nil
76
80
  uri = URI(url + "/_cat/indices?format=json")
77
- response = Net::HTTP.get_response(uri)
81
+ response = api_get(uri)
78
82
  if !response.is_a?(Net::HTTPSuccess)
79
83
  raise Error, "HTTP request failure!\n#{response.inspect}\n#{response.body.to_s}"
80
84
  end
@@ -109,6 +113,14 @@ module ElasticUtil
109
113
  end
110
114
  FileUtils.mkdir_p(path)
111
115
 
116
+ if opts[:dry]
117
+ indices.each_with_index do |index_name, i|
118
+ puts "(#{i+1}/#{indices.size}) backing up index #{index_name}" unless opts[:quiet]
119
+ end
120
+ puts "(DRY RUN) Finished backup of Elasticsearch #{url} to directory #{backup_dir} (took #{(Time.now-start_time).round(3)}s)" unless opts[:quiet]
121
+ return 0
122
+ end
123
+
112
124
  # dump data
113
125
  indices.each_with_index do |index_name, i|
114
126
  puts "(#{i+1}/#{indices.size}) backing up index #{index_name}" unless opts[:quiet]
@@ -123,7 +135,7 @@ module ElasticUtil
123
135
  }
124
136
  uri.query = URI.encode_www_form(params)
125
137
  # puts "HTTP REQUEST #{uri.inspect}"
126
- response = Net::HTTP.get_response(uri)
138
+ response = api_get(uri)
127
139
  if !response.is_a?(Net::HTTPSuccess)
128
140
  raise Error, "HTTP request failure!\n#{response.inspect}\n#{response.body.to_s}"
129
141
  end
@@ -131,7 +143,7 @@ module ElasticUtil
131
143
  raise Error, "No scroll_id returned in response:\n#{response.inspect}" unless json_response['_scroll_id']
132
144
  scroll_id = json_response['_scroll_id']
133
145
  hits = json_response['hits']['hits']
134
- save_bulk_data(path, hits)
146
+ save_bulk_data(path, hits, nil, opts)
135
147
 
136
148
  file_index = 1
137
149
  # scroll requests
@@ -143,7 +155,7 @@ module ElasticUtil
143
155
  }
144
156
  uri.query = URI.encode_www_form(params)
145
157
  # puts "HTTP REQUEST #{uri.inspect}"
146
- response = Net::HTTP.get_response(uri)
158
+ response = api_get(uri)
147
159
  if !response.is_a?(Net::HTTPSuccess)
148
160
  raise Error, "HTTP request failure!\n#{response.inspect}\n#{response.body.to_s}"
149
161
  end
@@ -151,27 +163,23 @@ module ElasticUtil
151
163
  raise Error, "No scroll_id returned in response:\n#{response.inspect}\n#{response.body.to_s}" unless json_response['_scroll_id']
152
164
  scroll_id = json_response['_scroll_id']
153
165
  hits = json_response['hits']['hits']
154
- if file_index > 0
155
- save_bulk_data(path, hits, file_index)
156
- else
157
- save_bulk_data(path, hits)
158
- end
166
+ save_bulk_data(path, hits, file_index, opts)
159
167
  file_index += 1
160
168
  end
161
169
  end
162
170
 
163
- puts "Finished backup of elasticsearch #{url} to directory #{backup_dir} (took #{(Time.now-start_time).round(3)}s)" unless opts[:quiet]
171
+ puts "Finished backup of Elasticsearch #{url} to directory #{backup_dir} (took #{(Time.now-start_time).round(3)}s)" unless opts[:quiet]
164
172
  return true
165
173
  end
166
174
 
167
- # Restore elasticsearch data from a backup.
175
+ # Restore Elasticsearch data from a backup.
168
176
  # This will do a POST to the _bulk api for each file in the backup directory.
169
177
  #
170
178
  # @example Restore local cluster with our backup.
171
179
  #
172
- # ElasticUtil.restore('http://localhost:9201', '/tmp/mybackup')
180
+ # ElasticUtil.restore('http://localhost:9301', '/tmp/local-elastic-data')
173
181
  #
174
- # @param [String] url The url of the elasticsearch cluster eg. 'http://localhost:9200'.
182
+ # @param [String] url The url of the Elasticsearch cluster eg. 'http://localhost:9200'.
175
183
  # @param [String] backup_dir The backup directory.
176
184
  # @param [Hash] opts The options for this backup.
177
185
  # @option opts [true] :quiet Don't print anything. Default is false.
@@ -180,9 +188,16 @@ module ElasticUtil
180
188
  #
181
189
  def self.restore(url, backup_dir, opts={})
182
190
  start_time = Time.now
191
+ url = url.strip.chomp("/")
183
192
  backup_dir = backup_dir.strip
184
193
  path = File.join(backup_dir.strip, DUMP_DIR)
185
194
 
195
+ if opts[:dry]
196
+ puts "(DRY RUN) Started restore" unless opts[:quiet]
197
+ else
198
+ puts "Started restore" unless opts[:quiet]
199
+ end
200
+
186
201
  # validate backup path
187
202
  if !Dir.exists?(path)
188
203
  raise Error, "backup path '#{backup_dir}' does not exist!"
@@ -190,16 +205,9 @@ module ElasticUtil
190
205
 
191
206
  # ping it first
192
207
  uri = URI(url)
193
- response = Net::HTTP.get_response(uri)
194
- http = Net::HTTP.new(uri.host, uri.port)
195
- http.read_timeout = 5
196
- http.open_timeout = 5
197
- response = http.start() {|http|
198
- http.get("/")
199
- }
200
-
208
+ response = api_get(uri)
201
209
  if !response.is_a?(Net::HTTPSuccess)
202
- raise Error, "Unable to reach elasticsearch at url '#{url}'!\n#{response.inspect}\n#{response.body.to_s}"
210
+ raise Error, "Unable to reach Elasticsearch at url '#{url}'!\n#{response.inspect}\n#{response.body.to_s}"
203
211
  end
204
212
 
205
213
  # find files to import
@@ -210,19 +218,26 @@ module ElasticUtil
210
218
  puts "Found #{found_files.size} files to import" unless opts[:quiet]
211
219
  end
212
220
 
221
+ if opts[:dry]
222
+ found_files.each_with_index do |file, i|
223
+ puts "(#{i+1}/#{found_files.size}) bulk importing file #{file}" unless opts[:quiet]
224
+ end
225
+ puts "(DRY RUN) Finished restore of Elasticsearch #{url} with backup #{backup_dir} (took #{(Time.now-start_time).round(3)}s)" unless opts[:quiet]
226
+ return 0
227
+ end
228
+
213
229
  # bulk api request for each file
214
230
  found_files.each_with_index do |file, i|
215
231
  puts "(#{i+1}/#{found_files.size}) bulk importing file #{file}" unless opts[:quiet]
216
232
  payload = File.read(file)
217
- # uri = URI(url)
218
- http = Net::HTTP.new(uri.host, uri.port)
219
- response = http.post("/_bulk", payload)
233
+ uri = URI(url + "/_bulk")
234
+ response = api_post(uri, payload, {:headers => {"Content-Type" => "application/x-ndjson"} })
220
235
  if !response.is_a?(Net::HTTPSuccess)
221
236
  raise Error, "HTTP request failure!\n#{response.inspect}\n#{response.body.to_s}"
222
237
  end
223
238
  end
224
239
 
225
- puts "Finished restore of elasticsearch #{url} with backup #{backup_dir} (took #{(Time.now-start_time).round(3)}s)" unless opts[:quiet]
240
+ puts "Finished restore of Elasticsearch #{url} with backup #{backup_dir} (took #{(Time.now-start_time).round(3)}s)" unless opts[:quiet]
226
241
  return true
227
242
  end
228
243
 
@@ -235,8 +250,12 @@ module ElasticUtil
235
250
  FileUtils.mkdir_p(dir_name)
236
251
  file_name = File.join(dir_name, index_type) + (file_index ? "_#{file_index}" : "") + ".json.data"
237
252
  # prepare record for bulk api injection
253
+ doc_type = hit['_type']
254
+ if opts[:replace_types] && opts[:replace_types][doc_type]
255
+ doc_type = opts[:replace_types][doc_type]
256
+ end
238
257
  action_json = {'index' => {
239
- '_index' => hit['_index'], '_type' => hit['_type'], '_id' => hit['_id']
258
+ '_index' => hit['_index'], '_type' => doc_type, '_id' => hit['_id']
240
259
  } }
241
260
  source_json = hit['_source']
242
261
  if opts[:exclude_fields] && source_json
@@ -244,6 +263,7 @@ module ElasticUtil
244
263
  source_json.delete(field)
245
264
  end
246
265
  end
266
+
247
267
  File.open(file_name, 'a') do |file|
248
268
  file.write JSON.generate(action_json) + "\n" + JSON.generate(source_json) + "\n"
249
269
  end
@@ -251,4 +271,58 @@ module ElasticUtil
251
271
  end
252
272
  end
253
273
 
274
+ def self.exec_request(uri, http_method="GET", opts={})
275
+ # parse request URI and options
276
+ uri = uri.is_a?(URI) ? uri : URI(uri)
277
+ http_method = http_method.to_s.upcase
278
+ headers = opts[:headers] || {}
279
+ payload = opts[:payload] || opts[:body]
280
+ http = Net::HTTP.new(uri.host, uri.port)
281
+ if uri.scheme == 'https'
282
+ http.use_ssl = true
283
+ # todo: always ignore ssl errors for now, but this should be an option
284
+ # http.verify_mode = OpenSSL::SSL::VERIFY_PEER
285
+ http.verify_mode = OpenSSL::SSL::VERIFY_NONE
286
+ end
287
+ http.read_timeout = opts[:read_timeout] || (60*15)
288
+ http.open_timeout = opts[:open_timeout] || 10
289
+ request = nil
290
+ if http_method == "GET"
291
+ request = Net::HTTP::Get.new uri.request_uri
292
+ elsif http_method == "POST"
293
+ request = Net::HTTP::Post.new uri.request_uri
294
+ request.body = payload if payload
295
+ elsif http_method == "PUT"
296
+ request = Net::HTTP::Put.new uri.request_uri
297
+ request.body = payload if payload
298
+ elsif http_method == "DELETE"
299
+ request = Net::HTTP::Delete.new uri.request_uri
300
+ else
301
+ raise "HTTP method is unknown: '#{http_method}'"
302
+ end
303
+ # set headers
304
+ headers.each { |k,v| request[k] = v }
305
+ # todo: set default Accept: application/json (probably, right?)
306
+ # set default Content-Type
307
+ if payload && headers['Content-Type'].nil?
308
+ headers['Content-Type'] = "application/json"
309
+ end
310
+ # set basic auth
311
+ if uri.user
312
+ request.basic_auth uri.user, uri.password
313
+ end
314
+ # execute request
315
+ response = http.request(request)
316
+ # return the resulting Net::HTTPResponse
317
+ return response
318
+ end
319
+
320
+ def self.api_get(uri, opts={})
321
+ exec_request(uri, "GET", opts={})
322
+ end
323
+
324
+ def self.api_post(uri, payload, opts={})
325
+ exec_request(uri, "POST", opts.merge({payload:payload}))
326
+ end
327
+
254
328
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: elastic-util
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.3
4
+ version: 0.1.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - James Dickson
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-03-08 00:00:00.000000000 Z
11
+ date: 2020-06-22 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -38,8 +38,8 @@ dependencies:
38
38
  - - ">="
39
39
  - !ruby/object:Gem::Version
40
40
  version: '0'
41
- description: ElasticUtil uses ElasticSearch's scroll and _bulk APIs to dump and restore
42
- indices
41
+ description: ElasticUtil uses ElasticSearch's /_search and /_bulk APIs to dump and
42
+ restore indices
43
43
  email:
44
44
  - dickson.james@gmail.com
45
45
  executables:
@@ -49,6 +49,7 @@ extra_rdoc_files: []
49
49
  files:
50
50
  - ".gitignore"
51
51
  - Gemfile
52
+ - HISTORY.md
52
53
  - README.md
53
54
  - Rakefile
54
55
  - bin/elastic-util
@@ -75,7 +76,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
75
76
  version: '0'
76
77
  requirements: []
77
78
  rubyforge_project:
78
- rubygems_version: 2.4.8
79
+ rubygems_version: 2.7.6
79
80
  signing_key:
80
81
  specification_version: 4
81
82
  summary: Provides backup and restore for ElasticSearch data