elasticsearch-transport 7.13.3 → 7.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -15,35 +15,24 @@
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
17
 
18
- require "uri"
19
- require "time"
20
- require "timeout"
21
- require "multi_json"
22
- require "faraday"
18
+ require 'uri'
19
+ require 'time'
20
+ require 'timeout'
21
+ require 'zlib'
22
+ require 'multi_json'
23
+ require 'faraday'
23
24
 
24
- require "elasticsearch/transport/transport/loggable"
25
- require "elasticsearch/transport/transport/serializer/multi_json"
26
- require "elasticsearch/transport/transport/sniffer"
27
- require "elasticsearch/transport/transport/response"
28
- require "elasticsearch/transport/transport/errors"
29
- require "elasticsearch/transport/transport/base"
30
- require "elasticsearch/transport/transport/connections/selector"
31
- require "elasticsearch/transport/transport/connections/connection"
32
- require "elasticsearch/transport/transport/connections/collection"
33
- require "elasticsearch/transport/transport/http/faraday"
34
- require "elasticsearch/transport/client"
35
- require "elasticsearch/transport/redacted"
25
+ require 'elasticsearch/transport/transport/loggable'
26
+ require 'elasticsearch/transport/transport/serializer/multi_json'
27
+ require 'elasticsearch/transport/transport/sniffer'
28
+ require 'elasticsearch/transport/transport/response'
29
+ require 'elasticsearch/transport/transport/errors'
30
+ require 'elasticsearch/transport/transport/base'
31
+ require 'elasticsearch/transport/transport/connections/selector'
32
+ require 'elasticsearch/transport/transport/connections/connection'
33
+ require 'elasticsearch/transport/transport/connections/collection'
34
+ require 'elasticsearch/transport/transport/http/faraday'
35
+ require 'elasticsearch/transport/client'
36
+ require 'elasticsearch/transport/redacted'
36
37
 
37
- require "elasticsearch/transport/version"
38
-
39
- module Elasticsearch
40
- module Client
41
-
42
- # A convenience wrapper for {::Elasticsearch::Transport::Client#initialize}.
43
- #
44
- def new(arguments={}, &block)
45
- Elasticsearch::Transport::Client.new(arguments, &block)
46
- end
47
- extend self
48
- end
49
- end
38
+ require 'elasticsearch/transport/version'
@@ -32,14 +32,14 @@ describe Elasticsearch::Transport::Transport::Base do
32
32
  expect(logger).not_to receive(:error).with(/secret_password/)
33
33
 
34
34
  expect {
35
- client.cluster.stats
35
+ client.perform_request('GET', '_cluster/stats')
36
36
  }.to raise_exception(Faraday::ConnectionFailed)
37
37
  end
38
38
 
39
39
  it 'replaces the password with the string \'REDACTED\'' do
40
40
  expect(logger).to receive(:error).with(/REDACTED/)
41
41
  expect {
42
- client.cluster.stats
42
+ client.perform_request('GET', '_cluster/stats')
43
43
  }.to raise_exception(Faraday::ConnectionFailed)
44
44
  end
45
45
  end
@@ -65,7 +65,27 @@ describe Elasticsearch::Transport::Transport::Base do
65
65
  }
66
66
  end
67
67
 
68
- it_behaves_like 'a redacted string'
68
+ if jruby?
69
+ let(:client) { Elasticsearch::Transport::Client.new(arguments) }
70
+ let(:logger) { double('logger', fatal?: true, fatal: '') }
71
+
72
+ it 'does not include the password in the logged string' do
73
+ expect(logger).not_to receive(:fatal).with(/secret_password/)
74
+
75
+ expect {
76
+ client.perform_request('GET', '_cluster/stats')
77
+ }.to raise_exception(Faraday::SSLError)
78
+ end
79
+
80
+ it 'replaces the password with the string \'REDACTED\'' do
81
+ expect(logger).to receive(:fatal).with(/REDACTED/)
82
+ expect {
83
+ client.perform_request('GET', '_cluster/stats')
84
+ }.to raise_exception(Faraday::SSLError)
85
+ end
86
+ else
87
+ it_behaves_like 'a redacted string'
88
+ end
69
89
  end
70
90
 
71
91
  context 'when the user and password are provided in the URI object' do
@@ -75,8 +95,27 @@ describe Elasticsearch::Transport::Transport::Base do
75
95
  logger: logger
76
96
  }
77
97
  end
78
-
79
- it_behaves_like 'a redacted string'
98
+ if jruby?
99
+ let(:client) { Elasticsearch::Transport::Client.new(arguments) }
100
+ let(:logger) { double('logger', fatal?: true, fatal: '') }
101
+
102
+ it 'does not include the password in the logged string' do
103
+ expect(logger).not_to receive(:fatal).with(/secret_password/)
104
+
105
+ expect {
106
+ client.perform_request('GET', '_cluster/stats')
107
+ }.to raise_exception(Faraday::SSLError)
108
+ end
109
+
110
+ it 'replaces the password with the string \'REDACTED\'' do
111
+ expect(logger).to receive(:fatal).with(/REDACTED/)
112
+ expect {
113
+ client.perform_request('GET', '_cluster/stats')
114
+ }.to raise_exception(Faraday::SSLError)
115
+ end
116
+ else
117
+ it_behaves_like 'a redacted string'
118
+ end
80
119
  end
81
120
  end
82
121
 
@@ -94,7 +133,7 @@ describe Elasticsearch::Transport::Transport::Base do
94
133
  end
95
134
 
96
135
  it 'raises an exception' do
97
- expect { client.info }.to raise_exception(Faraday::ConnectionFailed)
136
+ expect { client.perform_request('GET', '/') }.to raise_exception(Faraday::ConnectionFailed)
98
137
  end
99
138
  end
100
139
 
@@ -129,7 +168,7 @@ describe Elasticsearch::Transport::Transport::Base do
129
168
 
130
169
  let(:arguments) do
131
170
  {
132
- hosts: ['http://localhost:9250'],
171
+ hosts: ELASTICSEARCH_HOSTS,
133
172
  retry_on_status: ['404']
134
173
  }
135
174
  end
@@ -24,10 +24,6 @@ describe Elasticsearch::Transport::Client do
24
24
  end
25
25
  end
26
26
 
27
- it 'is aliased as Elasticsearch::Client' do
28
- expect(Elasticsearch::Client.new).to be_a(described_class)
29
- end
30
-
31
27
  it 'has a default transport' do
32
28
  expect(client.transport).to be_a(Elasticsearch::Transport::Client::DEFAULT_TRANSPORT_CLASS)
33
29
  end
@@ -231,13 +227,14 @@ describe Elasticsearch::Transport::Client do
231
227
 
232
228
  describe 'adapter' do
233
229
  context 'when no adapter is specified' do
234
- let(:adapter) do
235
- client.transport.connections.all.first.connection.builder.adapter
236
- end
230
+ fork do
231
+ let(:client) { described_class.new }
232
+ let(:adapter) { client.transport.connections.all.first.connection.builder.adapter }
237
233
 
238
- it 'uses Faraday NetHttp' do
239
- expect(adapter).to eq Faraday::Adapter::NetHttp
240
- end
234
+ it 'uses Faraday NetHttp' do
235
+ expect(adapter).to eq Faraday::Adapter::NetHttp
236
+ end
237
+ end unless jruby?
241
238
  end
242
239
 
243
240
  context 'when the adapter is patron' do
@@ -1416,14 +1413,14 @@ describe Elasticsearch::Transport::Client do
1416
1413
  let(:client) { described_class.new(host: hosts) }
1417
1414
 
1418
1415
  it 'doesnae raise an ArgumentError' do
1419
- expect { client.search(opaque_id: 'no_error') }.not_to raise_error
1416
+ expect { client.perform_request('GET', '_search', opaque_id: 'no_error') }.not_to raise_error
1420
1417
  end
1421
1418
 
1422
1419
  it 'uses X-Opaque-Id in the header' do
1423
1420
  allow(client).to receive(:perform_request) { OpenStruct.new(body: '') }
1424
- expect { client.search(opaque_id: 'opaque_id') }.not_to raise_error
1421
+ expect { client.perform_request('GET', '_search', {}, nil, opaque_id: 'opaque_id') }.not_to raise_error
1425
1422
  expect(client).to have_received(:perform_request)
1426
- .with('GET', '_search', { opaque_id: 'opaque_id' }, nil, {})
1423
+ .with('GET', '_search', {}, nil, { opaque_id: 'opaque_id' })
1427
1424
  end
1428
1425
  end
1429
1426
  end
@@ -1435,10 +1432,7 @@ describe Elasticsearch::Transport::Client do
1435
1432
  headers = client.transport.connections.first.connection.headers
1436
1433
 
1437
1434
  expect(headers['Content-Type']).to eq('application/vnd.elasticsearch+json; compatible-with=7')
1438
- expect(headers['Accept']).to eq('application/vnd.elasticsearch+json;compatible-with=7')
1439
-
1440
- response = client.perform_request('GET', '/')
1441
- expect(response.headers['content-type']).to eq('application/json; charset=UTF-8')
1435
+ expect(headers['Accept']).to eq('application/vnd.elasticsearch+json; compatible-with=7')
1442
1436
 
1443
1437
  ENV.delete('ELASTIC_CLIENT_APIVERSIONING')
1444
1438
  end
@@ -1469,28 +1463,19 @@ describe Elasticsearch::Transport::Client do
1469
1463
  end
1470
1464
 
1471
1465
  context 'when Elasticsearch response includes a warning header' do
1466
+ let(:logger) { double('logger', warn: '', warn?: '', info?: '', info: '', debug?: '', debug: '') }
1472
1467
  let(:client) do
1473
- Elasticsearch::Transport::Client.new(hosts: hosts)
1468
+ Elasticsearch::Transport::Client.new(hosts: hosts, logger: logger)
1474
1469
  end
1475
1470
 
1476
1471
  let(:warning) { 'Elasticsearch warning: "deprecation warning"' }
1477
1472
 
1478
1473
  it 'prints a warning' do
1479
- allow_any_instance_of(Elasticsearch::Transport::Transport::Response).to receive(:headers) do
1480
- { 'warning' => warning }
1481
- end
1482
-
1483
- begin
1484
- stderr = $stderr
1485
- fake_stderr = StringIO.new
1486
- $stderr = fake_stderr
1487
-
1488
- client.perform_request('GET', '/')
1489
- fake_stderr.rewind
1490
- expect(fake_stderr.string).to eq("warning: #{warning}\n")
1491
- ensure
1492
- $stderr = stderr
1474
+ expect_any_instance_of(Faraday::Connection).to receive(:run_request) do
1475
+ Elasticsearch::Transport::Transport::Response.new(200, {}, { 'warning' => warning })
1493
1476
  end
1477
+ client.perform_request('GET', '/')
1478
+ expect(logger).to have_received(:warn).with(warning)
1494
1479
  end
1495
1480
  end
1496
1481
 
@@ -1500,7 +1485,7 @@ describe Elasticsearch::Transport::Client do
1500
1485
 
1501
1486
  it 'performs the request with the header' do
1502
1487
  allow(client).to receive(:perform_request) { OpenStruct.new(body: '') }
1503
- expect { client.search(headers: headers) }.not_to raise_error
1488
+ expect { client.perform_request('GET', '_search', {}, nil, headers) }.not_to raise_error
1504
1489
  expect(client).to have_received(:perform_request)
1505
1490
  .with('GET', '_search', {}, nil, headers)
1506
1491
  end
@@ -1514,7 +1499,7 @@ describe Elasticsearch::Transport::Client do
1514
1499
  )
1515
1500
  end
1516
1501
  let(:instance_headers) { { set_in_instantiation: 'header value' } }
1517
- let(:param_headers) {{'user-agent' => 'My Ruby Tests', 'set-on-method-call' => 'header value'}}
1502
+ let(:param_headers) { {'user-agent' => 'My Ruby Tests', 'set-on-method-call' => 'header value'} }
1518
1503
 
1519
1504
  it 'performs the request with the header' do
1520
1505
  expected_headers = client.transport.connections.connections.first.connection.headers.merge(param_headers)
@@ -1523,7 +1508,7 @@ describe Elasticsearch::Transport::Client do
1523
1508
  .to receive(:run_request)
1524
1509
  .with(:get, "http://#{hosts[0]}/_search", nil, expected_headers) { OpenStruct.new(body: '')}
1525
1510
 
1526
- client.search(headers: param_headers)
1511
+ client.perform_request('GET', '_search', {}, nil, param_headers)
1527
1512
  end
1528
1513
  end
1529
1514
  end
@@ -1560,7 +1545,6 @@ describe Elasticsearch::Transport::Client do
1560
1545
  end
1561
1546
 
1562
1547
  context 'when a request is made' do
1563
-
1564
1548
  let!(:response) do
1565
1549
  client.perform_request('GET', '_cluster/health')
1566
1550
  end
@@ -1571,9 +1555,7 @@ describe Elasticsearch::Transport::Client do
1571
1555
  end
1572
1556
 
1573
1557
  describe '#initialize' do
1574
-
1575
1558
  context 'when options are specified' do
1576
-
1577
1559
  let(:transport_options) do
1578
1560
  { headers: { accept: 'application/yaml', content_type: 'application/yaml' } }
1579
1561
  end
@@ -1589,9 +1571,8 @@ describe Elasticsearch::Transport::Client do
1589
1571
  end
1590
1572
 
1591
1573
  context 'when a block is provided' do
1592
-
1593
1574
  let(:client) do
1594
- Elasticsearch::Client.new(host: ELASTICSEARCH_HOSTS.first, logger: logger) do |client|
1575
+ described_class.new(host: ELASTICSEARCH_HOSTS.first, logger: logger) do |client|
1595
1576
  client.headers['Accept'] = 'application/yaml'
1596
1577
  end
1597
1578
  end
@@ -1607,7 +1588,7 @@ describe Elasticsearch::Transport::Client do
1607
1588
 
1608
1589
  context 'when the Faraday adapter is set in the block' do
1609
1590
  let(:client) do
1610
- Elasticsearch::Client.new(host: ELASTICSEARCH_HOSTS.first, logger: logger) do |client|
1591
+ described_class.new(host: ELASTICSEARCH_HOSTS.first, logger: logger) do |client|
1611
1592
  client.adapter(:net_http_persistent)
1612
1593
  end
1613
1594
  end
@@ -1675,6 +1656,29 @@ describe Elasticsearch::Transport::Client do
1675
1656
  end
1676
1657
  end
1677
1658
 
1659
+ context 'when retry_on_failure is true and delay_on_retry is specified' do
1660
+ context 'when a node is unreachable' do
1661
+ let(:hosts) do
1662
+ [ELASTICSEARCH_HOSTS.first, "foobar1", "foobar2"]
1663
+ end
1664
+
1665
+ let(:options) do
1666
+ { retry_on_failure: true, delay_on_retry: 3000 }
1667
+ end
1668
+
1669
+ let(:responses) do
1670
+ 5.times.collect do
1671
+ client.perform_request('GET', '_nodes/_local')
1672
+ end
1673
+ end
1674
+
1675
+ it 'retries on failure' do
1676
+ allow_any_instance_of(Object).to receive(:sleep).with(3000 / 1000)
1677
+ expect(responses.all? { true }).to be(true)
1678
+ end
1679
+ end
1680
+ end
1681
+
1678
1682
  context 'when reload_on_failure is true' do
1679
1683
 
1680
1684
  let(:hosts) do
@@ -1734,7 +1738,7 @@ describe Elasticsearch::Transport::Client do
1734
1738
  end
1735
1739
 
1736
1740
  it 'sets the Accept-Encoding header' do
1737
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1741
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1738
1742
  end
1739
1743
 
1740
1744
  it 'preserves the other headers' do
@@ -1753,7 +1757,7 @@ describe Elasticsearch::Transport::Client do
1753
1757
  end
1754
1758
 
1755
1759
  it 'sets the Accept-Encoding header' do
1756
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1760
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1757
1761
  end
1758
1762
 
1759
1763
  it 'preserves the other headers' do
@@ -1772,7 +1776,7 @@ describe Elasticsearch::Transport::Client do
1772
1776
  end
1773
1777
 
1774
1778
  it 'sets the Accept-Encoding header' do
1775
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1779
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1776
1780
  end
1777
1781
 
1778
1782
  it 'preserves the other headers' do
@@ -1791,7 +1795,7 @@ describe Elasticsearch::Transport::Client do
1791
1795
  end
1792
1796
 
1793
1797
  it 'sets the Accept-Encoding header' do
1794
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1798
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1795
1799
  end
1796
1800
 
1797
1801
  it 'preserves the other headers' do
@@ -1810,7 +1814,7 @@ describe Elasticsearch::Transport::Client do
1810
1814
  end
1811
1815
 
1812
1816
  it 'sets the Accept-Encoding header' do
1813
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1817
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1814
1818
  end
1815
1819
 
1816
1820
  it 'preserves the other headers' do
@@ -1821,11 +1825,12 @@ describe Elasticsearch::Transport::Client do
1821
1825
  end
1822
1826
 
1823
1827
  context 'when using Curb as the transport', unless: jruby? do
1824
-
1825
1828
  let(:client) do
1826
- described_class.new(hosts: ELASTICSEARCH_HOSTS,
1827
- compression: true,
1828
- transport_class: Elasticsearch::Transport::Transport::HTTP::Curb)
1829
+ described_class.new(
1830
+ hosts: ELASTICSEARCH_HOSTS,
1831
+ compression: true,
1832
+ transport_class: Elasticsearch::Transport::Transport::HTTP::Curb
1833
+ )
1829
1834
  end
1830
1835
 
1831
1836
  it 'compresses the request and decompresses the response' do
@@ -1833,7 +1838,7 @@ describe Elasticsearch::Transport::Client do
1833
1838
  end
1834
1839
 
1835
1840
  it 'sets the Accept-Encoding header' do
1836
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1841
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1837
1842
  end
1838
1843
 
1839
1844
  it 'preserves the other headers' do
@@ -1842,7 +1847,6 @@ describe Elasticsearch::Transport::Client do
1842
1847
  end
1843
1848
 
1844
1849
  context 'when using Manticore as the transport', if: jruby? do
1845
-
1846
1850
  let(:client) do
1847
1851
  described_class.new(hosts: ELASTICSEARCH_HOSTS,
1848
1852
  compression: true,
@@ -1856,9 +1860,7 @@ describe Elasticsearch::Transport::Client do
1856
1860
  end
1857
1861
 
1858
1862
  describe '#perform_request' do
1859
-
1860
1863
  context 'when a request is made' do
1861
-
1862
1864
  before do
1863
1865
  client.perform_request('DELETE', '_all')
1864
1866
  client.perform_request('DELETE', 'myindex') rescue
@@ -1881,7 +1883,6 @@ describe Elasticsearch::Transport::Client do
1881
1883
  end
1882
1884
 
1883
1885
  context 'when an invalid url is specified' do
1884
-
1885
1886
  it 'raises an exception' do
1886
1887
  expect {
1887
1888
  client.perform_request('GET', 'myindex/mydoc/1?routing=FOOBARBAZ')
@@ -1890,7 +1891,6 @@ describe Elasticsearch::Transport::Client do
1890
1891
  end
1891
1892
 
1892
1893
  context 'when the \'ignore\' parameter is specified' do
1893
-
1894
1894
  let(:response) do
1895
1895
  client.perform_request('PUT', '_foobar', ignore: 400)
1896
1896
  end
@@ -1906,7 +1906,6 @@ describe Elasticsearch::Transport::Client do
1906
1906
  end
1907
1907
 
1908
1908
  context 'when request headers are specified' do
1909
-
1910
1909
  let(:response) do
1911
1910
  client.perform_request('GET', '/', {}, nil, { 'Content-Type' => 'application/yaml' })
1912
1911
  end
@@ -1917,9 +1916,7 @@ describe Elasticsearch::Transport::Client do
1917
1916
  end
1918
1917
 
1919
1918
  describe 'selector' do
1920
-
1921
1919
  context 'when the round-robin selector is used' do
1922
-
1923
1920
  let(:nodes) do
1924
1921
  3.times.collect do
1925
1922
  client.perform_request('GET', '_nodes/_local').body['nodes'].to_a[0][1]['name']
@@ -1927,7 +1924,7 @@ describe Elasticsearch::Transport::Client do
1927
1924
  end
1928
1925
 
1929
1926
  let(:node_names) do
1930
- client.nodes.stats['nodes'].collect do |name, stats|
1927
+ client.perform_request('GET', '_nodes/stats').body('nodes').collect do |name, stats|
1931
1928
  stats['name']
1932
1929
  end
1933
1930
  end
@@ -1946,7 +1943,6 @@ describe Elasticsearch::Transport::Client do
1946
1943
  end
1947
1944
 
1948
1945
  context 'when patron is used as an adapter', unless: jruby? do
1949
-
1950
1946
  before do
1951
1947
  require 'patron'
1952
1948
  end
@@ -2001,4 +1997,76 @@ describe Elasticsearch::Transport::Client do
2001
1997
  end
2002
1998
  end
2003
1999
  end
2000
+
2001
+ context 'CA Fingerprinting' do
2002
+ context 'when setting a ca_fingerprint' do
2003
+ after do
2004
+ File.delete('./certificate.crt')
2005
+ File.delete('./certificate.key')
2006
+ end
2007
+
2008
+ let(:certificate) do
2009
+ system(
2010
+ 'openssl req -new -newkey rsa:4096 -days 3650 -nodes -x509 -subj "/C=BE/O=Test/CN=Test"' \
2011
+ ' -keyout certificate.key -out certificate.crt',
2012
+ err: File::NULL
2013
+ )
2014
+ OpenSSL::X509::Certificate.new File.read('./certificate.crt')
2015
+ end
2016
+
2017
+ let(:client) do
2018
+ Elasticsearch::Transport::Client.new(
2019
+ host: 'https://elastic:changeme@localhost:9200',
2020
+ ca_fingerprint: OpenSSL::Digest::SHA256.hexdigest(certificate.to_der)
2021
+ )
2022
+ end
2023
+
2024
+ it 'validates CA fingerprints on perform request' do
2025
+ expect(client.transport.connections.connections.map(&:verified).uniq).to eq [false]
2026
+ allow(client.transport).to receive(:perform_request) { 'Hello' }
2027
+
2028
+ server = double('server').as_null_object
2029
+ allow(TCPSocket).to receive(:new) { server }
2030
+ socket = double('socket')
2031
+ allow(OpenSSL::SSL::SSLSocket).to receive(:new) { socket }
2032
+ allow(socket).to receive(:connect) { nil }
2033
+ allow(socket).to receive(:peer_cert_chain) { [certificate] }
2034
+
2035
+ response = client.perform_request('GET', '/')
2036
+ expect(client.transport.connections.connections.map(&:verified).uniq).to eq [true]
2037
+ expect(response).to eq 'Hello'
2038
+ end
2039
+ end
2040
+
2041
+ context 'when using an http host' do
2042
+ let(:client) do
2043
+ Elasticsearch::Transport::Client.new(
2044
+ host: 'http://elastic:changeme@localhost:9200',
2045
+ ca_fingerprint: 'test'
2046
+ )
2047
+ end
2048
+
2049
+ it 'raises an error' do
2050
+ expect do
2051
+ client.perform_request('GET', '/')
2052
+ end.to raise_exception(Elasticsearch::Transport::Transport::Error)
2053
+ end
2054
+ end
2055
+
2056
+ context 'when not setting a ca_fingerprint' do
2057
+ let(:client) do
2058
+ Elasticsearch::Transport::Client.new(
2059
+ host: 'http://elastic:changeme@localhost:9200'
2060
+ )
2061
+ end
2062
+
2063
+ it 'has unvalidated connections' do
2064
+ allow(client).to receive(:validate_ca_fingerprints) { nil }
2065
+ allow(client.transport).to receive(:perform_request) { nil }
2066
+
2067
+ client.perform_request('GET', '/')
2068
+ expect(client).to_not have_received(:validate_ca_fingerprints)
2069
+ end
2070
+ end
2071
+ end
2004
2072
  end
@@ -0,0 +1,126 @@
1
+ # Licensed to Elasticsearch B.V. under one or more contributor
2
+ # license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright
4
+ # ownership. Elasticsearch B.V. licenses this file to you under
5
+ # the Apache License, Version 2.0 (the "License"); you may
6
+ # not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ unless defined?(JRUBY_VERSION)
19
+ require_relative '../../../spec_helper'
20
+
21
+ describe Elasticsearch::Transport::Transport::HTTP::Curb do
22
+ let(:client) do
23
+ Elasticsearch::Transport::Client.new(transport_class: described_class)
24
+ end
25
+
26
+ describe '#perform_request' do
27
+ subject(:perform_request) { client.perform_request(*args) }
28
+ let(:args) do
29
+ ['POST', '/', {}, body, headers]
30
+ end
31
+ let(:body) { '{"foo":"bar"}' }
32
+ let(:headers) { { 'Content-Type' => 'application/x-ndjson' } }
33
+
34
+ before do
35
+ allow_any_instance_of(Curl::Easy).to receive(:http).and_return(true)
36
+ end
37
+
38
+ it 'convert body to json' do
39
+ expect(client.transport).to receive(:__convert_to_json).with(body)
40
+ perform_request
41
+ end
42
+
43
+ it 'call compress_request' do
44
+ expect(client.transport).to receive(:compress_request).with(body, headers)
45
+ perform_request
46
+ end
47
+
48
+ it 'return response' do
49
+ expect(perform_request).to be_kind_of(Elasticsearch::Transport::Transport::Response)
50
+ end
51
+
52
+ it 'put body' do
53
+ expect(client.transport.connections.first.connection).to receive('put_data=').with(body)
54
+ perform_request
55
+ end
56
+
57
+ context 'when body nil' do
58
+ let(:body) { nil }
59
+
60
+ it 'convert body to json' do
61
+ expect(client.transport).not_to receive(:__convert_to_json)
62
+ perform_request
63
+ end
64
+
65
+ it 'call compress_request' do
66
+ expect(client.transport).to receive(:compress_request).with(body, headers)
67
+ perform_request
68
+ end
69
+
70
+ it 'put body' do
71
+ expect(client.transport.connections.first.connection).not_to receive('put_data=')
72
+ perform_request
73
+ end
74
+ end
75
+
76
+ context 'when body is hash' do
77
+ let(:body) { { foo: 'bar' } }
78
+ let(:body_string) { '{"foo":"bar"}' }
79
+
80
+ it 'convert body to json' do
81
+ expect(client.transport).to receive(:__convert_to_json).with(body)
82
+ perform_request
83
+ end
84
+
85
+ it 'call compress_request' do
86
+ expect(client.transport).to receive(:compress_request).with(body_string, headers)
87
+ perform_request
88
+ end
89
+
90
+ it 'put body' do
91
+ expect(client.transport.connections.first.connection).to receive('put_data=').with(body_string)
92
+ perform_request
93
+ end
94
+ end
95
+
96
+ context 'when compression enabled' do
97
+ let(:client) do
98
+ Elasticsearch::Transport::Client.new(transport_class: described_class, compression: true)
99
+ end
100
+ let(:body_string) { '{"foo":"bar"}' }
101
+ let(:compressed_body) do
102
+ gzip = Zlib::GzipWriter.new(StringIO.new)
103
+ gzip << body_string
104
+ gzip.close.string
105
+ end
106
+
107
+ before { allow(client.transport).to receive(:decompress_response).and_return('') }
108
+
109
+ it 'put compressed body' do
110
+ expect(client.transport.connections.first.connection).to receive('put_data=').with(compressed_body)
111
+ perform_request
112
+ end
113
+
114
+ it 'set Content-Encoding header' do
115
+ perform_request
116
+ expect(client.transport.connections.first.connection.headers).to include('Content-Encoding')
117
+ end
118
+
119
+ it 'set Content-Encoding to gzip' do
120
+ perform_request
121
+ expect(client.transport.connections.first.connection.headers['Content-Encoding']).to eql('gzip')
122
+ end
123
+ end
124
+ end
125
+ end
126
+ end