elasticsearch-transport 7.14.0 → 7.16.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 88ee4e1ccbc2d04cc4e6581a80f8ab3e0ab8f09eae398eeacd395709eb103c40
4
- data.tar.gz: b0314674f2e91da921549f4694a1e885d0cf51b5ca7ef9201aef3f063243c346
3
+ metadata.gz: 86cb8f5de5ca5fc4270376d791ea8bd03252e5bbed8494d232da2eea8512434a
4
+ data.tar.gz: 27a6b7bfeea33bfa6e56ddd4a01109a1864a48685b54558707fe9af15ae73948
5
5
  SHA512:
6
- metadata.gz: 95a622b3a2e4ab25c5b9eb4e35fab83b6dd7aae8896ccbfe75ad5c52c1f6b7b3ffe04582d5b6106c4c6f2ea9a3e9cbc7b8a6182a58fd5c1a0c1b65168547b35b
7
- data.tar.gz: 8d59c0dfd2bd0de4744e59ebba0d6e600bbd39f8e13486e901fb4e01f6a221477c7dd0205f3201767b26dd5ec5d0c6133210fb4a7aba37553f1c4ed4c5f56d08
6
+ metadata.gz: d2c129c4eeeb76a83d3fea12764ec135ad768920cd20c7c447a768708ab324db32482bdf616197e0674fe72a62ec74422b35774117a2d22cd0de8cc1520df20d
7
+ data.tar.gz: 586f14ab09c77a7a257667131d692dfd056e06ea66ccd3f53194985113043dec32e0fab5cf7fb3ed3d1a84036df02127df051c589ca137c6412ea8456a238a68
@@ -26,12 +26,12 @@ Gem::Specification.new do |s|
26
26
  s.authors = ['Karel Minarik']
27
27
  s.email = ['karel.minarik@elasticsearch.org']
28
28
  s.summary = 'Ruby client for Elasticsearch.'
29
- s.homepage = 'https://www.elastic.co/guide/en/elasticsearch/client/ruby-api/7.x/index.html'
29
+ s.homepage = 'https://www.elastic.co/guide/en/elasticsearch/client/ruby-api/7.16/index.html'
30
30
  s.license = 'Apache-2.0'
31
31
  s.metadata = {
32
- 'homepage_uri' => 'https://www.elastic.co/guide/en/elasticsearch/client/ruby-api/7.x/index.html',
33
- 'changelog_uri' => 'https://github.com/elastic/elasticsearch-ruby/blob/7.x/CHANGELOG.md',
34
- 'source_code_uri' => 'https://github.com/elastic/elasticsearch-ruby/tree/7.x/elasticsearch-transport',
32
+ 'homepage_uri' => 'https://www.elastic.co/guide/en/elasticsearch/client/ruby-api/7.16/index.html',
33
+ 'changelog_uri' => 'https://github.com/elastic/elasticsearch-ruby/blob/7.16/CHANGELOG.md',
34
+ 'source_code_uri' => 'https://github.com/elastic/elasticsearch-ruby/tree/7.16/elasticsearch-transport',
35
35
  'bug_tracker_uri' => 'https://github.com/elastic/elasticsearch-ruby/issues'
36
36
  }
37
37
  s.files = `git ls-files`.split($/)
@@ -92,6 +92,8 @@ module Elasticsearch
92
92
  #
93
93
  # @option arguments [Boolean,Number] :retry_on_failure Retry X times when request fails before raising and
94
94
  # exception (false by default)
95
+ # @option arguments [Number] :delay_on_retry Delay in milliseconds between each retry (0 by default)
96
+ #
95
97
  # @option arguments Array<Number> :retry_on_status Retry when specific status codes are returned
96
98
  #
97
99
  # @option arguments [Boolean] :reload_on_failure Reload connections after failure (false by default)
@@ -126,6 +128,7 @@ module Elasticsearch
126
128
  # if you're using X-Opaque-Id
127
129
  # @option enable_meta_header [Boolean] :enable_meta_header Enable sending the meta data header to Cloud.
128
130
  # (Default: true)
131
+ # @option ca_fingerprint [String] :ca_fingerprint provide this value to only trust certificates that are signed by a specific CA certificate
129
132
  #
130
133
  # @yield [faraday] Access and configure the `Faraday::Connection` instance directly with a block
131
134
  #
@@ -136,6 +139,7 @@ module Elasticsearch
136
139
  @arguments[:tracer] ||= @arguments[:trace] ? DEFAULT_TRACER.call() : nil
137
140
  @arguments[:reload_connections] ||= false
138
141
  @arguments[:retry_on_failure] ||= false
142
+ @arguments[:delay_on_retry] ||= 0
139
143
  @arguments[:reload_on_failure] ||= false
140
144
  @arguments[:randomize_hosts] ||= false
141
145
  @arguments[:transport_options] ||= {}
@@ -156,6 +160,7 @@ module Elasticsearch
156
160
 
157
161
  @send_get_body_as = @arguments[:send_get_body_as] || 'GET'
158
162
  @opaque_id_prefix = @arguments[:opaque_id_prefix] || nil
163
+ @ca_fingerprint = @arguments.delete(:ca_fingerprint)
159
164
 
160
165
  if @arguments[:request_timeout]
161
166
  @arguments[:transport_options][:request] = { timeout: @arguments[:request_timeout] }
@@ -188,6 +193,7 @@ module Elasticsearch
188
193
  opaque_id = @opaque_id_prefix ? "#{@opaque_id_prefix}#{opaque_id}" : opaque_id
189
194
  headers.merge!('X-Opaque-Id' => opaque_id)
190
195
  end
196
+ validate_ca_fingerprints if @ca_fingerprint
191
197
  transport.perform_request(method, path, params, body, headers)
192
198
  end
193
199
 
@@ -211,6 +217,31 @@ module Elasticsearch
211
217
  )
212
218
  end
213
219
 
220
+ def validate_ca_fingerprints
221
+ transport.connections.connections.each do |connection|
222
+ unless connection.host[:scheme] == 'https'
223
+ raise Elasticsearch::Transport::Transport::Error, 'CA fingerprinting can\'t be configured over http'
224
+ end
225
+
226
+ next if connection.verified
227
+
228
+ ctx = OpenSSL::SSL::SSLContext.new
229
+ socket = TCPSocket.new(connection.host[:host], connection.host[:port])
230
+ ssl = OpenSSL::SSL::SSLSocket.new(socket, ctx)
231
+ ssl.connect
232
+ cert_store = ssl.peer_cert_chain
233
+ matching_certs = cert_store.select do |cert|
234
+ OpenSSL::Digest::SHA256.hexdigest(cert.to_der).upcase == @ca_fingerprint.upcase.gsub(':', '')
235
+ end
236
+ if matching_certs.empty?
237
+ raise Elasticsearch::Transport::Transport::Error,
238
+ 'Server certificate CA fingerprint does not match the value configured in ca_fingerprint'
239
+ end
240
+
241
+ connection.verified = true
242
+ end
243
+ end
244
+
214
245
  def add_header(header)
215
246
  headers = @arguments[:transport_options]&.[](:headers) || {}
216
247
  headers.merge!(header)
@@ -54,6 +54,7 @@ module Elasticsearch
54
54
  @options = arguments[:options] || {}
55
55
  @options[:http] ||= {}
56
56
  @options[:retry_on_status] ||= []
57
+ @options[:delay_on_retry] ||= 0
57
58
 
58
59
  @block = block
59
60
  @compression = !!@options[:compression]
@@ -223,7 +224,7 @@ module Elasticsearch
223
224
  # @api private
224
225
  #
225
226
  def __convert_to_json(o=nil, options={})
226
- o = o.is_a?(String) ? o : serializer.dump(o, options)
227
+ o.is_a?(String) ? o : serializer.dump(o, options)
227
228
  end
228
229
 
229
230
  # Returns a full URL based on information from host
@@ -264,6 +265,7 @@ module Elasticsearch
264
265
  start = Time.now
265
266
  tries = 0
266
267
  reload_on_failure = opts.fetch(:reload_on_failure, @options[:reload_on_failure])
268
+ delay_on_retry = opts.fetch(:delay_on_retry, @options[:delay_on_retry])
267
269
 
268
270
  max_retries = if opts.key?(:retry_on_failure)
269
271
  opts[:retry_on_failure] === true ? DEFAULT_MAX_RETRIES : opts[:retry_on_failure]
@@ -272,10 +274,10 @@ module Elasticsearch
272
274
  end
273
275
 
274
276
  params = params.clone
275
-
276
277
  ignore = Array(params.delete(:ignore)).compact.map { |s| s.to_i }
277
278
 
278
279
  begin
280
+ sleep(delay_on_retry / 1000.0) if tries > 0
279
281
  tries += 1
280
282
  connection = get_connection or raise Error.new('Cannot get new connection from pool.')
281
283
 
@@ -284,9 +286,7 @@ module Elasticsearch
284
286
  end
285
287
 
286
288
  url = connection.full_url(path, params)
287
-
288
289
  response = block.call(connection, url)
289
-
290
290
  connection.healthy! if connection.failures > 0
291
291
 
292
292
  # Raise an exception so we can catch it for `retry_on_status`
@@ -309,7 +309,6 @@ module Elasticsearch
309
309
  log_error "[#{e.class}] #{e.message} #{connection.host.inspect}"
310
310
 
311
311
  connection.dead!
312
-
313
312
  if reload_on_failure and tries < connections.all.size
314
313
  log_warn "[#{e.class}] Reloading connections (attempt #{tries} of #{connections.all.size})"
315
314
  reload_connections! and retry
@@ -336,14 +335,10 @@ module Elasticsearch
336
335
  duration = Time.now - start
337
336
 
338
337
  if response.status.to_i >= 300
339
- __log_response method, path, params, body, url, response, nil, 'N/A', duration
340
- __trace method, path, params, connection.connection.headers, body, url, response, nil, 'N/A', duration if tracer
341
-
338
+ __log_response(method, path, params, body, url, response, nil, 'N/A', duration)
339
+ __trace(method, path, params, connection_headers(connection), body, url, response, nil, 'N/A', duration) if tracer
342
340
  # Log the failure only when `ignore` doesn't match the response status
343
- unless ignore.include?(response.status.to_i)
344
- log_fatal "[#{response.status}] #{response.body}"
345
- end
346
-
341
+ log_fatal "[#{response.status}] #{response.body}" unless ignore.include?(response.status.to_i)
347
342
  __raise_transport_error response unless ignore.include?(response.status.to_i)
348
343
  end
349
344
 
@@ -354,10 +349,8 @@ module Elasticsearch
354
349
  __log_response method, path, params, body, url, response, json, took, duration
355
350
  end
356
351
 
357
- __trace method, path, params, connection.connection.headers, body, url, response, nil, 'N/A', duration if tracer
358
-
359
- warnings(response.headers['warning']) if response.headers&.[]('warning')
360
-
352
+ __trace(method, path, params, connection_headers(connection), body, url, response, nil, 'N/A', duration) if tracer
353
+ log_warn(response.headers['warning']) if response.headers&.[]('warning')
361
354
  Response.new response.status, json || response.body, response.headers
362
355
  ensure
363
356
  @last_request_at = Time.now
@@ -376,17 +369,38 @@ module Elasticsearch
376
369
 
377
370
  USER_AGENT_STR = 'User-Agent'.freeze
378
371
  USER_AGENT_REGEX = /user\-?\_?agent/
372
+ ACCEPT_ENCODING = 'Accept-Encoding'.freeze
373
+ CONTENT_ENCODING = 'Content-Encoding'.freeze
379
374
  CONTENT_TYPE_STR = 'Content-Type'.freeze
380
375
  CONTENT_TYPE_REGEX = /content\-?\_?type/
381
376
  DEFAULT_CONTENT_TYPE = 'application/json'.freeze
382
377
  GZIP = 'gzip'.freeze
383
- ACCEPT_ENCODING = 'Accept-Encoding'.freeze
384
378
  GZIP_FIRST_TWO_BYTES = '1f8b'.freeze
385
379
  HEX_STRING_DIRECTIVE = 'H*'.freeze
386
380
  RUBY_ENCODING = '1.9'.respond_to?(:force_encoding)
387
381
 
382
+ def compress_request(body, headers)
383
+ if body
384
+ headers ||= {}
385
+
386
+ if gzipped?(body)
387
+ headers[CONTENT_ENCODING] = GZIP
388
+ elsif use_compression?
389
+ headers[CONTENT_ENCODING] = GZIP
390
+ gzip = Zlib::GzipWriter.new(StringIO.new)
391
+ gzip << body
392
+ body = gzip.close.string
393
+ else
394
+ headers.delete(CONTENT_ENCODING)
395
+ end
396
+ elsif headers
397
+ headers.delete(CONTENT_ENCODING)
398
+ end
399
+
400
+ [body, headers]
401
+ end
402
+
388
403
  def decompress_response(body)
389
- return body unless use_compression?
390
404
  return body unless gzipped?(body)
391
405
 
392
406
  io = StringIO.new(body)
@@ -399,6 +413,8 @@ module Elasticsearch
399
413
  end
400
414
 
401
415
  def gzipped?(body)
416
+ return unless body && !body.empty?
417
+
402
418
  body[0..1].unpack(HEX_STRING_DIRECTIVE)[0] == GZIP_FIRST_TWO_BYTES
403
419
  end
404
420
 
@@ -432,8 +448,12 @@ module Elasticsearch
432
448
  end
433
449
  end
434
450
 
435
- def warnings(warning)
436
- warn("warning: #{warning}")
451
+ def connection_headers(connection)
452
+ if defined?(Elasticsearch::Transport::Transport::HTTP::Manticore) && self.class == Elasticsearch::Transport::Transport::HTTP::Manticore
453
+ @request_options[:headers]
454
+ else
455
+ connection.connection.headers
456
+ end
437
457
  end
438
458
  end
439
459
  end
@@ -33,6 +33,7 @@ module Elasticsearch
33
33
  DEFAULT_RESURRECT_TIMEOUT = 60
34
34
 
35
35
  attr_reader :host, :connection, :options, :failures, :dead_since
36
+ attr_accessor :verified
36
37
 
37
38
  # @option arguments [Hash] :host Host information (example: `{host: 'localhost', port: 9200}`)
38
39
  # @option arguments [Object] :connection The transport-specific physical connection or "session"
@@ -42,6 +43,7 @@ module Elasticsearch
42
43
  @host = arguments[:host].is_a?(Hash) ? Redacted.new(arguments[:host]) : arguments[:host]
43
44
  @connection = arguments[:connection]
44
45
  @options = arguments[:options] || {}
46
+ @verified = false
45
47
  @state_mutex = Mutex.new
46
48
 
47
49
  @options[:resurrect_timeout] ||= DEFAULT_RESURRECT_TIMEOUT
@@ -153,7 +155,6 @@ module Elasticsearch
153
155
  "<#{self.class.name} host: #{host} (#{dead? ? 'dead since ' + dead_since.to_s : 'alive'})>"
154
156
  end
155
157
  end
156
-
157
158
  end
158
159
  end
159
160
  end
@@ -19,29 +19,31 @@ module Elasticsearch
19
19
  module Transport
20
20
  module Transport
21
21
  module HTTP
22
-
23
22
  # Alternative HTTP transport implementation, using the [_Curb_](https://rubygems.org/gems/curb) client.
24
23
  #
25
24
  # @see Transport::Base
26
25
  #
27
26
  class Curb
28
27
  include Base
29
-
30
28
  # Performs the request by invoking {Transport::Base#perform_request} with a block.
31
29
  #
32
30
  # @return [Response]
33
31
  # @see Transport::Base#perform_request
34
32
  #
35
33
  def perform_request(method, path, params={}, body=nil, headers=nil, opts={})
36
- super do |connection, url|
34
+ super do |connection, _url|
37
35
  connection.connection.url = connection.full_url(path, params)
36
+ body = body ? __convert_to_json(body) : nil
37
+ body, headers = compress_request(body, headers)
38
38
 
39
39
  case method
40
40
  when 'HEAD'
41
41
  connection.connection.set :nobody, true
42
42
  when 'GET', 'POST', 'PUT', 'DELETE'
43
43
  connection.connection.set :nobody, false
44
- connection.connection.put_data = __convert_to_json(body) if body
44
+
45
+ connection.connection.put_data = body if body
46
+
45
47
  if headers
46
48
  if connection.connection.headers
47
49
  connection.connection.headers.merge!(headers)
@@ -44,12 +44,13 @@ module Elasticsearch
44
44
  headers
45
45
  end
46
46
 
47
- response = connection.connection.run_request(
48
- method.downcase.to_sym,
49
- url,
50
- (body ? __convert_to_json(body) : nil),
51
- headers
52
- )
47
+ body = body ? __convert_to_json(body) : nil
48
+ body, headers = compress_request(body, headers)
49
+
50
+ response = connection.connection.run_request(method.downcase.to_sym,
51
+ url,
52
+ body,
53
+ headers)
53
54
 
54
55
  Response.new response.status, decompress_response(response.body), response.headers
55
56
  end
@@ -62,7 +63,7 @@ module Elasticsearch
62
63
  def __build_connection(host, options={}, block=nil)
63
64
  client = ::Faraday.new(__full_url(host), options, &block)
64
65
  apply_headers(client, options)
65
- Connections::Connection.new :host => host, :connection => client
66
+ Connections::Connection.new(host: host, connection: client)
66
67
  end
67
68
 
68
69
  # Returns an array of implementation specific connection errors.
@@ -63,6 +63,7 @@ module Elasticsearch
63
63
  include Base
64
64
 
65
65
  def initialize(arguments={}, &block)
66
+ @request_options = { headers: (arguments.dig(:transport_options, :headers) || {}) }
66
67
  @manticore = build_client(arguments[:options] || {})
67
68
  super(arguments, &block)
68
69
  end
@@ -82,7 +83,10 @@ module Elasticsearch
82
83
  #
83
84
  def perform_request(method, path, params={}, body=nil, headers=nil, opts={})
84
85
  super do |connection, url|
85
- params[:body] = __convert_to_json(body) if body
86
+ body = body ? __convert_to_json(body) : nil
87
+ body, headers = compress_request(body, @request_options[:headers])
88
+
89
+ params[:body] = body if body
86
90
  params[:headers] = headers if headers
87
91
  params = params.merge @request_options
88
92
  case method
@@ -109,7 +113,6 @@ module Elasticsearch
109
113
  # @return [Connections::Collection]
110
114
  #
111
115
  def __build_connections
112
- @request_options = {}
113
116
  apply_headers(@request_options, options[:transport_options])
114
117
  apply_headers(@request_options, options)
115
118
 
@@ -155,11 +158,11 @@ module Elasticsearch
155
158
  private
156
159
 
157
160
  def apply_headers(request_options, options)
158
- headers = (options && options[:headers]) || {}
161
+ headers = options&.[](:headers) || {}
159
162
  headers[CONTENT_TYPE_STR] = find_value(headers, CONTENT_TYPE_REGEX) || DEFAULT_CONTENT_TYPE
160
163
  headers[USER_AGENT_STR] = find_value(headers, USER_AGENT_REGEX) || user_agent_header
161
164
  headers[ACCEPT_ENCODING] = GZIP if use_compression?
162
- request_options.merge!(headers: headers)
165
+ request_options[:headers].merge!(headers)
163
166
  end
164
167
 
165
168
  def user_agent_header
@@ -17,6 +17,6 @@
17
17
 
18
18
  module Elasticsearch
19
19
  module Transport
20
- VERSION = '7.14.0'.freeze
20
+ VERSION = '7.16.0'.freeze
21
21
  end
22
22
  end
@@ -18,6 +18,7 @@
18
18
  require 'uri'
19
19
  require 'time'
20
20
  require 'timeout'
21
+ require 'zlib'
21
22
  require 'multi_json'
22
23
  require 'faraday'
23
24
 
@@ -1466,28 +1466,19 @@ describe Elasticsearch::Transport::Client do
1466
1466
  end
1467
1467
 
1468
1468
  context 'when Elasticsearch response includes a warning header' do
1469
+ let(:logger) { double('logger', warn: '', warn?: '', info?: '', info: '', debug?: '', debug: '') }
1469
1470
  let(:client) do
1470
- Elasticsearch::Transport::Client.new(hosts: hosts)
1471
+ Elasticsearch::Transport::Client.new(hosts: hosts, logger: logger)
1471
1472
  end
1472
1473
 
1473
1474
  let(:warning) { 'Elasticsearch warning: "deprecation warning"' }
1474
1475
 
1475
1476
  it 'prints a warning' do
1476
- allow_any_instance_of(Elasticsearch::Transport::Transport::Response).to receive(:headers) do
1477
- { 'warning' => warning }
1478
- end
1479
-
1480
- begin
1481
- stderr = $stderr
1482
- fake_stderr = StringIO.new
1483
- $stderr = fake_stderr
1484
-
1485
- client.perform_request('GET', '/')
1486
- fake_stderr.rewind
1487
- expect(fake_stderr.string).to eq("warning: #{warning}\n")
1488
- ensure
1489
- $stderr = stderr
1477
+ expect_any_instance_of(Faraday::Connection).to receive(:run_request) do
1478
+ Elasticsearch::Transport::Transport::Response.new(200, {}, { 'warning' => warning })
1490
1479
  end
1480
+ client.perform_request('GET', '/')
1481
+ expect(logger).to have_received(:warn).with(warning)
1491
1482
  end
1492
1483
  end
1493
1484
 
@@ -1668,6 +1659,29 @@ describe Elasticsearch::Transport::Client do
1668
1659
  end
1669
1660
  end
1670
1661
 
1662
+ context 'when retry_on_failure is true and delay_on_retry is specified' do
1663
+ context 'when a node is unreachable' do
1664
+ let(:hosts) do
1665
+ [ELASTICSEARCH_HOSTS.first, "foobar1", "foobar2"]
1666
+ end
1667
+
1668
+ let(:options) do
1669
+ { retry_on_failure: true, delay_on_retry: 3000 }
1670
+ end
1671
+
1672
+ let(:responses) do
1673
+ 5.times.collect do
1674
+ client.perform_request('GET', '_nodes/_local')
1675
+ end
1676
+ end
1677
+
1678
+ it 'retries on failure' do
1679
+ allow_any_instance_of(Object).to receive(:sleep).with(3000 / 1000)
1680
+ expect(responses.all? { true }).to be(true)
1681
+ end
1682
+ end
1683
+ end
1684
+
1671
1685
  context 'when reload_on_failure is true' do
1672
1686
 
1673
1687
  let(:hosts) do
@@ -1727,7 +1741,7 @@ describe Elasticsearch::Transport::Client do
1727
1741
  end
1728
1742
 
1729
1743
  it 'sets the Accept-Encoding header' do
1730
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1744
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1731
1745
  end
1732
1746
 
1733
1747
  it 'preserves the other headers' do
@@ -1746,7 +1760,7 @@ describe Elasticsearch::Transport::Client do
1746
1760
  end
1747
1761
 
1748
1762
  it 'sets the Accept-Encoding header' do
1749
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1763
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1750
1764
  end
1751
1765
 
1752
1766
  it 'preserves the other headers' do
@@ -1765,7 +1779,7 @@ describe Elasticsearch::Transport::Client do
1765
1779
  end
1766
1780
 
1767
1781
  it 'sets the Accept-Encoding header' do
1768
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1782
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1769
1783
  end
1770
1784
 
1771
1785
  it 'preserves the other headers' do
@@ -1784,7 +1798,7 @@ describe Elasticsearch::Transport::Client do
1784
1798
  end
1785
1799
 
1786
1800
  it 'sets the Accept-Encoding header' do
1787
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1801
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1788
1802
  end
1789
1803
 
1790
1804
  it 'preserves the other headers' do
@@ -1803,7 +1817,7 @@ describe Elasticsearch::Transport::Client do
1803
1817
  end
1804
1818
 
1805
1819
  it 'sets the Accept-Encoding header' do
1806
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1820
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1807
1821
  end
1808
1822
 
1809
1823
  it 'preserves the other headers' do
@@ -1827,7 +1841,7 @@ describe Elasticsearch::Transport::Client do
1827
1841
  end
1828
1842
 
1829
1843
  it 'sets the Accept-Encoding header' do
1830
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1844
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1831
1845
  end
1832
1846
 
1833
1847
  it 'preserves the other headers' do
@@ -1895,7 +1909,6 @@ describe Elasticsearch::Transport::Client do
1895
1909
  end
1896
1910
 
1897
1911
  context 'when request headers are specified' do
1898
-
1899
1912
  let(:response) do
1900
1913
  client.perform_request('GET', '/', {}, nil, { 'Content-Type' => 'application/yaml' })
1901
1914
  end
@@ -1906,9 +1919,7 @@ describe Elasticsearch::Transport::Client do
1906
1919
  end
1907
1920
 
1908
1921
  describe 'selector' do
1909
-
1910
1922
  context 'when the round-robin selector is used' do
1911
-
1912
1923
  let(:nodes) do
1913
1924
  3.times.collect do
1914
1925
  client.perform_request('GET', '_nodes/_local').body['nodes'].to_a[0][1]['name']
@@ -1989,4 +2000,76 @@ describe Elasticsearch::Transport::Client do
1989
2000
  end
1990
2001
  end
1991
2002
  end
2003
+
2004
+ context 'CA Fingerprinting' do
2005
+ context 'when setting a ca_fingerprint' do
2006
+ after do
2007
+ File.delete('./certificate.crt')
2008
+ File.delete('./certificate.key')
2009
+ end
2010
+
2011
+ let(:certificate) do
2012
+ system(
2013
+ 'openssl req -new -newkey rsa:4096 -days 3650 -nodes -x509 -subj "/C=BE/O=Test/CN=Test"' \
2014
+ ' -keyout certificate.key -out certificate.crt',
2015
+ err: File::NULL
2016
+ )
2017
+ OpenSSL::X509::Certificate.new File.read('./certificate.crt')
2018
+ end
2019
+
2020
+ let(:client) do
2021
+ Elasticsearch::Transport::Client.new(
2022
+ host: 'https://elastic:changeme@localhost:9200',
2023
+ ca_fingerprint: OpenSSL::Digest::SHA256.hexdigest(certificate.to_der)
2024
+ )
2025
+ end
2026
+
2027
+ it 'validates CA fingerprints on perform request' do
2028
+ expect(client.transport.connections.connections.map(&:verified).uniq).to eq [false]
2029
+ allow(client.transport).to receive(:perform_request) { 'Hello' }
2030
+
2031
+ server = double('server').as_null_object
2032
+ allow(TCPSocket).to receive(:new) { server }
2033
+ socket = double('socket')
2034
+ allow(OpenSSL::SSL::SSLSocket).to receive(:new) { socket }
2035
+ allow(socket).to receive(:connect) { nil }
2036
+ allow(socket).to receive(:peer_cert_chain) { [certificate] }
2037
+
2038
+ response = client.perform_request('GET', '/')
2039
+ expect(client.transport.connections.connections.map(&:verified).uniq).to eq [true]
2040
+ expect(response).to eq 'Hello'
2041
+ end
2042
+ end
2043
+
2044
+ context 'when using an http host' do
2045
+ let(:client) do
2046
+ Elasticsearch::Transport::Client.new(
2047
+ host: 'http://elastic:changeme@localhost:9200',
2048
+ ca_fingerprint: 'test'
2049
+ )
2050
+ end
2051
+
2052
+ it 'raises an error' do
2053
+ expect do
2054
+ client.perform_request('GET', '/')
2055
+ end.to raise_exception(Elasticsearch::Transport::Transport::Error)
2056
+ end
2057
+ end
2058
+
2059
+ context 'when not setting a ca_fingerprint' do
2060
+ let(:client) do
2061
+ Elasticsearch::Transport::Client.new(
2062
+ host: 'http://elastic:changeme@localhost:9200'
2063
+ )
2064
+ end
2065
+
2066
+ it 'has unvalidated connections' do
2067
+ allow(client).to receive(:validate_ca_fingerprints) { nil }
2068
+ allow(client.transport).to receive(:perform_request) { nil }
2069
+
2070
+ client.perform_request('GET', '/')
2071
+ expect(client).to_not have_received(:validate_ca_fingerprints)
2072
+ end
2073
+ end
2074
+ end
1992
2075
  end