elasticsearch-transport 7.13.3 → 7.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -15,35 +15,24 @@
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
17
 
18
- require "uri"
19
- require "time"
20
- require "timeout"
21
- require "multi_json"
22
- require "faraday"
18
+ require 'uri'
19
+ require 'time'
20
+ require 'timeout'
21
+ require 'zlib'
22
+ require 'multi_json'
23
+ require 'faraday'
23
24
 
24
- require "elasticsearch/transport/transport/loggable"
25
- require "elasticsearch/transport/transport/serializer/multi_json"
26
- require "elasticsearch/transport/transport/sniffer"
27
- require "elasticsearch/transport/transport/response"
28
- require "elasticsearch/transport/transport/errors"
29
- require "elasticsearch/transport/transport/base"
30
- require "elasticsearch/transport/transport/connections/selector"
31
- require "elasticsearch/transport/transport/connections/connection"
32
- require "elasticsearch/transport/transport/connections/collection"
33
- require "elasticsearch/transport/transport/http/faraday"
34
- require "elasticsearch/transport/client"
35
- require "elasticsearch/transport/redacted"
25
+ require 'elasticsearch/transport/transport/loggable'
26
+ require 'elasticsearch/transport/transport/serializer/multi_json'
27
+ require 'elasticsearch/transport/transport/sniffer'
28
+ require 'elasticsearch/transport/transport/response'
29
+ require 'elasticsearch/transport/transport/errors'
30
+ require 'elasticsearch/transport/transport/base'
31
+ require 'elasticsearch/transport/transport/connections/selector'
32
+ require 'elasticsearch/transport/transport/connections/connection'
33
+ require 'elasticsearch/transport/transport/connections/collection'
34
+ require 'elasticsearch/transport/transport/http/faraday'
35
+ require 'elasticsearch/transport/client'
36
+ require 'elasticsearch/transport/redacted'
36
37
 
37
- require "elasticsearch/transport/version"
38
-
39
- module Elasticsearch
40
- module Client
41
-
42
- # A convenience wrapper for {::Elasticsearch::Transport::Client#initialize}.
43
- #
44
- def new(arguments={}, &block)
45
- Elasticsearch::Transport::Client.new(arguments, &block)
46
- end
47
- extend self
48
- end
49
- end
38
+ require 'elasticsearch/transport/version'
@@ -32,14 +32,14 @@ describe Elasticsearch::Transport::Transport::Base do
32
32
  expect(logger).not_to receive(:error).with(/secret_password/)
33
33
 
34
34
  expect {
35
- client.cluster.stats
35
+ client.perform_request('GET', '_cluster/stats')
36
36
  }.to raise_exception(Faraday::ConnectionFailed)
37
37
  end
38
38
 
39
39
  it 'replaces the password with the string \'REDACTED\'' do
40
40
  expect(logger).to receive(:error).with(/REDACTED/)
41
41
  expect {
42
- client.cluster.stats
42
+ client.perform_request('GET', '_cluster/stats')
43
43
  }.to raise_exception(Faraday::ConnectionFailed)
44
44
  end
45
45
  end
@@ -65,7 +65,27 @@ describe Elasticsearch::Transport::Transport::Base do
65
65
  }
66
66
  end
67
67
 
68
- it_behaves_like 'a redacted string'
68
+ if jruby?
69
+ let(:client) { Elasticsearch::Transport::Client.new(arguments) }
70
+ let(:logger) { double('logger', fatal?: true, fatal: '') }
71
+
72
+ it 'does not include the password in the logged string' do
73
+ expect(logger).not_to receive(:fatal).with(/secret_password/)
74
+
75
+ expect {
76
+ client.perform_request('GET', '_cluster/stats')
77
+ }.to raise_exception(Faraday::SSLError)
78
+ end
79
+
80
+ it 'replaces the password with the string \'REDACTED\'' do
81
+ expect(logger).to receive(:fatal).with(/REDACTED/)
82
+ expect {
83
+ client.perform_request('GET', '_cluster/stats')
84
+ }.to raise_exception(Faraday::SSLError)
85
+ end
86
+ else
87
+ it_behaves_like 'a redacted string'
88
+ end
69
89
  end
70
90
 
71
91
  context 'when the user and password are provided in the URI object' do
@@ -75,8 +95,27 @@ describe Elasticsearch::Transport::Transport::Base do
75
95
  logger: logger
76
96
  }
77
97
  end
78
-
79
- it_behaves_like 'a redacted string'
98
+ if jruby?
99
+ let(:client) { Elasticsearch::Transport::Client.new(arguments) }
100
+ let(:logger) { double('logger', fatal?: true, fatal: '') }
101
+
102
+ it 'does not include the password in the logged string' do
103
+ expect(logger).not_to receive(:fatal).with(/secret_password/)
104
+
105
+ expect {
106
+ client.perform_request('GET', '_cluster/stats')
107
+ }.to raise_exception(Faraday::SSLError)
108
+ end
109
+
110
+ it 'replaces the password with the string \'REDACTED\'' do
111
+ expect(logger).to receive(:fatal).with(/REDACTED/)
112
+ expect {
113
+ client.perform_request('GET', '_cluster/stats')
114
+ }.to raise_exception(Faraday::SSLError)
115
+ end
116
+ else
117
+ it_behaves_like 'a redacted string'
118
+ end
80
119
  end
81
120
  end
82
121
 
@@ -94,7 +133,7 @@ describe Elasticsearch::Transport::Transport::Base do
94
133
  end
95
134
 
96
135
  it 'raises an exception' do
97
- expect { client.info }.to raise_exception(Faraday::ConnectionFailed)
136
+ expect { client.perform_request('GET', '/') }.to raise_exception(Faraday::ConnectionFailed)
98
137
  end
99
138
  end
100
139
 
@@ -129,7 +168,7 @@ describe Elasticsearch::Transport::Transport::Base do
129
168
 
130
169
  let(:arguments) do
131
170
  {
132
- hosts: ['http://localhost:9250'],
171
+ hosts: ELASTICSEARCH_HOSTS,
133
172
  retry_on_status: ['404']
134
173
  }
135
174
  end
@@ -24,10 +24,6 @@ describe Elasticsearch::Transport::Client do
24
24
  end
25
25
  end
26
26
 
27
- it 'is aliased as Elasticsearch::Client' do
28
- expect(Elasticsearch::Client.new).to be_a(described_class)
29
- end
30
-
31
27
  it 'has a default transport' do
32
28
  expect(client.transport).to be_a(Elasticsearch::Transport::Client::DEFAULT_TRANSPORT_CLASS)
33
29
  end
@@ -231,13 +227,14 @@ describe Elasticsearch::Transport::Client do
231
227
 
232
228
  describe 'adapter' do
233
229
  context 'when no adapter is specified' do
234
- let(:adapter) do
235
- client.transport.connections.all.first.connection.builder.adapter
236
- end
230
+ fork do
231
+ let(:client) { described_class.new }
232
+ let(:adapter) { client.transport.connections.all.first.connection.builder.adapter }
237
233
 
238
- it 'uses Faraday NetHttp' do
239
- expect(adapter).to eq Faraday::Adapter::NetHttp
240
- end
234
+ it 'uses Faraday NetHttp' do
235
+ expect(adapter).to eq Faraday::Adapter::NetHttp
236
+ end
237
+ end unless jruby?
241
238
  end
242
239
 
243
240
  context 'when the adapter is patron' do
@@ -1416,14 +1413,14 @@ describe Elasticsearch::Transport::Client do
1416
1413
  let(:client) { described_class.new(host: hosts) }
1417
1414
 
1418
1415
  it 'doesnae raise an ArgumentError' do
1419
- expect { client.search(opaque_id: 'no_error') }.not_to raise_error
1416
+ expect { client.perform_request('GET', '_search', opaque_id: 'no_error') }.not_to raise_error
1420
1417
  end
1421
1418
 
1422
1419
  it 'uses X-Opaque-Id in the header' do
1423
1420
  allow(client).to receive(:perform_request) { OpenStruct.new(body: '') }
1424
- expect { client.search(opaque_id: 'opaque_id') }.not_to raise_error
1421
+ expect { client.perform_request('GET', '_search', {}, nil, opaque_id: 'opaque_id') }.not_to raise_error
1425
1422
  expect(client).to have_received(:perform_request)
1426
- .with('GET', '_search', { opaque_id: 'opaque_id' }, nil, {})
1423
+ .with('GET', '_search', {}, nil, { opaque_id: 'opaque_id' })
1427
1424
  end
1428
1425
  end
1429
1426
  end
@@ -1469,28 +1466,19 @@ describe Elasticsearch::Transport::Client do
1469
1466
  end
1470
1467
 
1471
1468
  context 'when Elasticsearch response includes a warning header' do
1469
+ let(:logger) { double('logger', warn: '', warn?: '', info?: '', info: '', debug?: '', debug: '') }
1472
1470
  let(:client) do
1473
- Elasticsearch::Transport::Client.new(hosts: hosts)
1471
+ Elasticsearch::Transport::Client.new(hosts: hosts, logger: logger)
1474
1472
  end
1475
1473
 
1476
1474
  let(:warning) { 'Elasticsearch warning: "deprecation warning"' }
1477
1475
 
1478
1476
  it 'prints a warning' do
1479
- allow_any_instance_of(Elasticsearch::Transport::Transport::Response).to receive(:headers) do
1480
- { 'warning' => warning }
1481
- end
1482
-
1483
- begin
1484
- stderr = $stderr
1485
- fake_stderr = StringIO.new
1486
- $stderr = fake_stderr
1487
-
1488
- client.perform_request('GET', '/')
1489
- fake_stderr.rewind
1490
- expect(fake_stderr.string).to eq("warning: #{warning}\n")
1491
- ensure
1492
- $stderr = stderr
1477
+ expect_any_instance_of(Faraday::Connection).to receive(:run_request) do
1478
+ Elasticsearch::Transport::Transport::Response.new(200, {}, { 'warning' => warning })
1493
1479
  end
1480
+ client.perform_request('GET', '/')
1481
+ expect(logger).to have_received(:warn).with(warning)
1494
1482
  end
1495
1483
  end
1496
1484
 
@@ -1500,7 +1488,7 @@ describe Elasticsearch::Transport::Client do
1500
1488
 
1501
1489
  it 'performs the request with the header' do
1502
1490
  allow(client).to receive(:perform_request) { OpenStruct.new(body: '') }
1503
- expect { client.search(headers: headers) }.not_to raise_error
1491
+ expect { client.perform_request('GET', '_search', {}, nil, headers) }.not_to raise_error
1504
1492
  expect(client).to have_received(:perform_request)
1505
1493
  .with('GET', '_search', {}, nil, headers)
1506
1494
  end
@@ -1514,7 +1502,7 @@ describe Elasticsearch::Transport::Client do
1514
1502
  )
1515
1503
  end
1516
1504
  let(:instance_headers) { { set_in_instantiation: 'header value' } }
1517
- let(:param_headers) {{'user-agent' => 'My Ruby Tests', 'set-on-method-call' => 'header value'}}
1505
+ let(:param_headers) { {'user-agent' => 'My Ruby Tests', 'set-on-method-call' => 'header value'} }
1518
1506
 
1519
1507
  it 'performs the request with the header' do
1520
1508
  expected_headers = client.transport.connections.connections.first.connection.headers.merge(param_headers)
@@ -1523,7 +1511,7 @@ describe Elasticsearch::Transport::Client do
1523
1511
  .to receive(:run_request)
1524
1512
  .with(:get, "http://#{hosts[0]}/_search", nil, expected_headers) { OpenStruct.new(body: '')}
1525
1513
 
1526
- client.search(headers: param_headers)
1514
+ client.perform_request('GET', '_search', {}, nil, param_headers)
1527
1515
  end
1528
1516
  end
1529
1517
  end
@@ -1560,7 +1548,6 @@ describe Elasticsearch::Transport::Client do
1560
1548
  end
1561
1549
 
1562
1550
  context 'when a request is made' do
1563
-
1564
1551
  let!(:response) do
1565
1552
  client.perform_request('GET', '_cluster/health')
1566
1553
  end
@@ -1571,9 +1558,7 @@ describe Elasticsearch::Transport::Client do
1571
1558
  end
1572
1559
 
1573
1560
  describe '#initialize' do
1574
-
1575
1561
  context 'when options are specified' do
1576
-
1577
1562
  let(:transport_options) do
1578
1563
  { headers: { accept: 'application/yaml', content_type: 'application/yaml' } }
1579
1564
  end
@@ -1589,9 +1574,8 @@ describe Elasticsearch::Transport::Client do
1589
1574
  end
1590
1575
 
1591
1576
  context 'when a block is provided' do
1592
-
1593
1577
  let(:client) do
1594
- Elasticsearch::Client.new(host: ELASTICSEARCH_HOSTS.first, logger: logger) do |client|
1578
+ described_class.new(host: ELASTICSEARCH_HOSTS.first, logger: logger) do |client|
1595
1579
  client.headers['Accept'] = 'application/yaml'
1596
1580
  end
1597
1581
  end
@@ -1607,7 +1591,7 @@ describe Elasticsearch::Transport::Client do
1607
1591
 
1608
1592
  context 'when the Faraday adapter is set in the block' do
1609
1593
  let(:client) do
1610
- Elasticsearch::Client.new(host: ELASTICSEARCH_HOSTS.first, logger: logger) do |client|
1594
+ described_class.new(host: ELASTICSEARCH_HOSTS.first, logger: logger) do |client|
1611
1595
  client.adapter(:net_http_persistent)
1612
1596
  end
1613
1597
  end
@@ -1675,6 +1659,29 @@ describe Elasticsearch::Transport::Client do
1675
1659
  end
1676
1660
  end
1677
1661
 
1662
+ context 'when retry_on_failure is true and delay_on_retry is specified' do
1663
+ context 'when a node is unreachable' do
1664
+ let(:hosts) do
1665
+ [ELASTICSEARCH_HOSTS.first, "foobar1", "foobar2"]
1666
+ end
1667
+
1668
+ let(:options) do
1669
+ { retry_on_failure: true, delay_on_retry: 3000 }
1670
+ end
1671
+
1672
+ let(:responses) do
1673
+ 5.times.collect do
1674
+ client.perform_request('GET', '_nodes/_local')
1675
+ end
1676
+ end
1677
+
1678
+ it 'retries on failure' do
1679
+ allow_any_instance_of(Object).to receive(:sleep).with(3000 / 1000)
1680
+ expect(responses.all? { true }).to be(true)
1681
+ end
1682
+ end
1683
+ end
1684
+
1678
1685
  context 'when reload_on_failure is true' do
1679
1686
 
1680
1687
  let(:hosts) do
@@ -1734,7 +1741,7 @@ describe Elasticsearch::Transport::Client do
1734
1741
  end
1735
1742
 
1736
1743
  it 'sets the Accept-Encoding header' do
1737
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1744
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1738
1745
  end
1739
1746
 
1740
1747
  it 'preserves the other headers' do
@@ -1753,7 +1760,7 @@ describe Elasticsearch::Transport::Client do
1753
1760
  end
1754
1761
 
1755
1762
  it 'sets the Accept-Encoding header' do
1756
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1763
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1757
1764
  end
1758
1765
 
1759
1766
  it 'preserves the other headers' do
@@ -1772,7 +1779,7 @@ describe Elasticsearch::Transport::Client do
1772
1779
  end
1773
1780
 
1774
1781
  it 'sets the Accept-Encoding header' do
1775
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1782
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1776
1783
  end
1777
1784
 
1778
1785
  it 'preserves the other headers' do
@@ -1791,7 +1798,7 @@ describe Elasticsearch::Transport::Client do
1791
1798
  end
1792
1799
 
1793
1800
  it 'sets the Accept-Encoding header' do
1794
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1801
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1795
1802
  end
1796
1803
 
1797
1804
  it 'preserves the other headers' do
@@ -1810,7 +1817,7 @@ describe Elasticsearch::Transport::Client do
1810
1817
  end
1811
1818
 
1812
1819
  it 'sets the Accept-Encoding header' do
1813
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1820
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1814
1821
  end
1815
1822
 
1816
1823
  it 'preserves the other headers' do
@@ -1821,11 +1828,12 @@ describe Elasticsearch::Transport::Client do
1821
1828
  end
1822
1829
 
1823
1830
  context 'when using Curb as the transport', unless: jruby? do
1824
-
1825
1831
  let(:client) do
1826
- described_class.new(hosts: ELASTICSEARCH_HOSTS,
1827
- compression: true,
1828
- transport_class: Elasticsearch::Transport::Transport::HTTP::Curb)
1832
+ described_class.new(
1833
+ hosts: ELASTICSEARCH_HOSTS,
1834
+ compression: true,
1835
+ transport_class: Elasticsearch::Transport::Transport::HTTP::Curb
1836
+ )
1829
1837
  end
1830
1838
 
1831
1839
  it 'compresses the request and decompresses the response' do
@@ -1833,7 +1841,7 @@ describe Elasticsearch::Transport::Client do
1833
1841
  end
1834
1842
 
1835
1843
  it 'sets the Accept-Encoding header' do
1836
- expect(client.transport.connections[0].connection.headers['Accept-Encoding'])
1844
+ expect(client.transport.connections[0].connection.headers['Accept-Encoding']).to eq 'gzip'
1837
1845
  end
1838
1846
 
1839
1847
  it 'preserves the other headers' do
@@ -1842,7 +1850,6 @@ describe Elasticsearch::Transport::Client do
1842
1850
  end
1843
1851
 
1844
1852
  context 'when using Manticore as the transport', if: jruby? do
1845
-
1846
1853
  let(:client) do
1847
1854
  described_class.new(hosts: ELASTICSEARCH_HOSTS,
1848
1855
  compression: true,
@@ -1856,9 +1863,7 @@ describe Elasticsearch::Transport::Client do
1856
1863
  end
1857
1864
 
1858
1865
  describe '#perform_request' do
1859
-
1860
1866
  context 'when a request is made' do
1861
-
1862
1867
  before do
1863
1868
  client.perform_request('DELETE', '_all')
1864
1869
  client.perform_request('DELETE', 'myindex') rescue
@@ -1881,7 +1886,6 @@ describe Elasticsearch::Transport::Client do
1881
1886
  end
1882
1887
 
1883
1888
  context 'when an invalid url is specified' do
1884
-
1885
1889
  it 'raises an exception' do
1886
1890
  expect {
1887
1891
  client.perform_request('GET', 'myindex/mydoc/1?routing=FOOBARBAZ')
@@ -1890,7 +1894,6 @@ describe Elasticsearch::Transport::Client do
1890
1894
  end
1891
1895
 
1892
1896
  context 'when the \'ignore\' parameter is specified' do
1893
-
1894
1897
  let(:response) do
1895
1898
  client.perform_request('PUT', '_foobar', ignore: 400)
1896
1899
  end
@@ -1906,7 +1909,6 @@ describe Elasticsearch::Transport::Client do
1906
1909
  end
1907
1910
 
1908
1911
  context 'when request headers are specified' do
1909
-
1910
1912
  let(:response) do
1911
1913
  client.perform_request('GET', '/', {}, nil, { 'Content-Type' => 'application/yaml' })
1912
1914
  end
@@ -1917,9 +1919,7 @@ describe Elasticsearch::Transport::Client do
1917
1919
  end
1918
1920
 
1919
1921
  describe 'selector' do
1920
-
1921
1922
  context 'when the round-robin selector is used' do
1922
-
1923
1923
  let(:nodes) do
1924
1924
  3.times.collect do
1925
1925
  client.perform_request('GET', '_nodes/_local').body['nodes'].to_a[0][1]['name']
@@ -1927,7 +1927,7 @@ describe Elasticsearch::Transport::Client do
1927
1927
  end
1928
1928
 
1929
1929
  let(:node_names) do
1930
- client.nodes.stats['nodes'].collect do |name, stats|
1930
+ client.perform_request('GET', '_nodes/stats').body('nodes').collect do |name, stats|
1931
1931
  stats['name']
1932
1932
  end
1933
1933
  end
@@ -1946,7 +1946,6 @@ describe Elasticsearch::Transport::Client do
1946
1946
  end
1947
1947
 
1948
1948
  context 'when patron is used as an adapter', unless: jruby? do
1949
-
1950
1949
  before do
1951
1950
  require 'patron'
1952
1951
  end
@@ -2001,4 +2000,76 @@ describe Elasticsearch::Transport::Client do
2001
2000
  end
2002
2001
  end
2003
2002
  end
2003
+
2004
+ context 'CA Fingerprinting' do
2005
+ context 'when setting a ca_fingerprint' do
2006
+ after do
2007
+ File.delete('./certificate.crt')
2008
+ File.delete('./certificate.key')
2009
+ end
2010
+
2011
+ let(:certificate) do
2012
+ system(
2013
+ 'openssl req -new -newkey rsa:4096 -days 3650 -nodes -x509 -subj "/C=BE/O=Test/CN=Test"' \
2014
+ ' -keyout certificate.key -out certificate.crt',
2015
+ err: File::NULL
2016
+ )
2017
+ OpenSSL::X509::Certificate.new File.read('./certificate.crt')
2018
+ end
2019
+
2020
+ let(:client) do
2021
+ Elasticsearch::Transport::Client.new(
2022
+ host: 'https://elastic:changeme@localhost:9200',
2023
+ ca_fingerprint: OpenSSL::Digest::SHA256.hexdigest(certificate.to_der)
2024
+ )
2025
+ end
2026
+
2027
+ it 'validates CA fingerprints on perform request' do
2028
+ expect(client.transport.connections.connections.map(&:verified).uniq).to eq [false]
2029
+ allow(client.transport).to receive(:perform_request) { 'Hello' }
2030
+
2031
+ server = double('server').as_null_object
2032
+ allow(TCPSocket).to receive(:new) { server }
2033
+ socket = double('socket')
2034
+ allow(OpenSSL::SSL::SSLSocket).to receive(:new) { socket }
2035
+ allow(socket).to receive(:connect) { nil }
2036
+ allow(socket).to receive(:peer_cert_chain) { [certificate] }
2037
+
2038
+ response = client.perform_request('GET', '/')
2039
+ expect(client.transport.connections.connections.map(&:verified).uniq).to eq [true]
2040
+ expect(response).to eq 'Hello'
2041
+ end
2042
+ end
2043
+
2044
+ context 'when using an http host' do
2045
+ let(:client) do
2046
+ Elasticsearch::Transport::Client.new(
2047
+ host: 'http://elastic:changeme@localhost:9200',
2048
+ ca_fingerprint: 'test'
2049
+ )
2050
+ end
2051
+
2052
+ it 'raises an error' do
2053
+ expect do
2054
+ client.perform_request('GET', '/')
2055
+ end.to raise_exception(Elasticsearch::Transport::Transport::Error)
2056
+ end
2057
+ end
2058
+
2059
+ context 'when not setting a ca_fingerprint' do
2060
+ let(:client) do
2061
+ Elasticsearch::Transport::Client.new(
2062
+ host: 'http://elastic:changeme@localhost:9200'
2063
+ )
2064
+ end
2065
+
2066
+ it 'has unvalidated connections' do
2067
+ allow(client).to receive(:validate_ca_fingerprints) { nil }
2068
+ allow(client.transport).to receive(:perform_request) { nil }
2069
+
2070
+ client.perform_request('GET', '/')
2071
+ expect(client).to_not have_received(:validate_ca_fingerprints)
2072
+ end
2073
+ end
2074
+ end
2004
2075
  end
@@ -0,0 +1,126 @@
1
+ # Licensed to Elasticsearch B.V. under one or more contributor
2
+ # license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright
4
+ # ownership. Elasticsearch B.V. licenses this file to you under
5
+ # the Apache License, Version 2.0 (the "License"); you may
6
+ # not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ unless defined?(JRUBY_VERSION)
19
+ require_relative '../../../spec_helper'
20
+
21
+ describe Elasticsearch::Transport::Transport::HTTP::Curb do
22
+ let(:client) do
23
+ Elasticsearch::Transport::Client.new(transport_class: described_class)
24
+ end
25
+
26
+ describe '#perform_request' do
27
+ subject(:perform_request) { client.perform_request(*args) }
28
+ let(:args) do
29
+ ['POST', '/', {}, body, headers]
30
+ end
31
+ let(:body) { '{"foo":"bar"}' }
32
+ let(:headers) { { 'Content-Type' => 'application/x-ndjson' } }
33
+
34
+ before do
35
+ allow_any_instance_of(Curl::Easy).to receive(:http).and_return(true)
36
+ end
37
+
38
+ it 'convert body to json' do
39
+ expect(client.transport).to receive(:__convert_to_json).with(body)
40
+ perform_request
41
+ end
42
+
43
+ it 'call compress_request' do
44
+ expect(client.transport).to receive(:compress_request).with(body, headers)
45
+ perform_request
46
+ end
47
+
48
+ it 'return response' do
49
+ expect(perform_request).to be_kind_of(Elasticsearch::Transport::Transport::Response)
50
+ end
51
+
52
+ it 'put body' do
53
+ expect(client.transport.connections.first.connection).to receive('put_data=').with(body)
54
+ perform_request
55
+ end
56
+
57
+ context 'when body nil' do
58
+ let(:body) { nil }
59
+
60
+ it 'convert body to json' do
61
+ expect(client.transport).not_to receive(:__convert_to_json)
62
+ perform_request
63
+ end
64
+
65
+ it 'call compress_request' do
66
+ expect(client.transport).to receive(:compress_request).with(body, headers)
67
+ perform_request
68
+ end
69
+
70
+ it 'put body' do
71
+ expect(client.transport.connections.first.connection).not_to receive('put_data=')
72
+ perform_request
73
+ end
74
+ end
75
+
76
+ context 'when body is hash' do
77
+ let(:body) { { foo: 'bar' } }
78
+ let(:body_string) { '{"foo":"bar"}' }
79
+
80
+ it 'convert body to json' do
81
+ expect(client.transport).to receive(:__convert_to_json).with(body)
82
+ perform_request
83
+ end
84
+
85
+ it 'call compress_request' do
86
+ expect(client.transport).to receive(:compress_request).with(body_string, headers)
87
+ perform_request
88
+ end
89
+
90
+ it 'put body' do
91
+ expect(client.transport.connections.first.connection).to receive('put_data=').with(body_string)
92
+ perform_request
93
+ end
94
+ end
95
+
96
+ context 'when compression enabled' do
97
+ let(:client) do
98
+ Elasticsearch::Transport::Client.new(transport_class: described_class, compression: true)
99
+ end
100
+ let(:body_string) { '{"foo":"bar"}' }
101
+ let(:compressed_body) do
102
+ gzip = Zlib::GzipWriter.new(StringIO.new)
103
+ gzip << body_string
104
+ gzip.close.string
105
+ end
106
+
107
+ before { allow(client.transport).to receive(:decompress_response).and_return('') }
108
+
109
+ it 'put compressed body' do
110
+ expect(client.transport.connections.first.connection).to receive('put_data=').with(compressed_body)
111
+ perform_request
112
+ end
113
+
114
+ it 'set Content-Encoding header' do
115
+ perform_request
116
+ expect(client.transport.connections.first.connection.headers).to include('Content-Encoding')
117
+ end
118
+
119
+ it 'set Content-Encoding to gzip' do
120
+ perform_request
121
+ expect(client.transport.connections.first.connection.headers['Content-Encoding']).to eql('gzip')
122
+ end
123
+ end
124
+ end
125
+ end
126
+ end