elasticsearch-extensions 0.0.22 → 0.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile +3 -7
- data/README.md +4 -4
- data/elasticsearch-extensions.gemspec +2 -2
- data/lib/elasticsearch/extensions/reindex.rb +25 -16
- data/lib/elasticsearch/extensions/test/cluster.rb +49 -20
- data/lib/elasticsearch/extensions/version.rb +1 -1
- data/test/reindex/integration/reindex_test.rb +42 -3
- data/test/reindex/unit/reindex_test.rb +15 -15
- data/test/test/cluster/integration/cluster_test.rb +5 -0
- data/test/test/cluster/unit/cluster_test.rb +33 -6
- metadata +2 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 929b42632d0c89e1007c16e6d08d465df863ad58
|
4
|
+
data.tar.gz: 8ebc114b63d1cf9d64346a7da06d4fc385a401f6
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 053e210e305b080280e75ef955a5a6b7981cadc604cab50250308ce6a335ea0ef06e5eef655e11789b78a501604bc7399a4b747e4db6ab292812c209a7cf014e
|
7
|
+
data.tar.gz: c19de28219676b133fe3850cadadd42087c9ebc3da86da055563e3e2be68c5bfa1e4d7c4aff79b68cdc629acc7dbf8674b751d4f97336558f8dff36f81deecdd
|
data/Gemfile
CHANGED
@@ -3,18 +3,14 @@ source 'https://rubygems.org'
|
|
3
3
|
# Specify your gem's dependencies in elasticsearch-extensions.gemspec
|
4
4
|
gemspec
|
5
5
|
|
6
|
-
if File.
|
6
|
+
if File.exist? File.expand_path("../../elasticsearch-api/elasticsearch-api.gemspec", __FILE__)
|
7
7
|
gem 'elasticsearch-api', :path => File.expand_path("../../elasticsearch-api", __FILE__), :require => false
|
8
8
|
end
|
9
9
|
|
10
|
-
if File.
|
10
|
+
if File.exist? File.expand_path("../../elasticsearch-transport/elasticsearch-transport.gemspec", __FILE__)
|
11
11
|
gem 'elasticsearch-transport', :path => File.expand_path("../../elasticsearch-transport", __FILE__), :require => false
|
12
12
|
end
|
13
13
|
|
14
|
-
if File.
|
15
|
-
gem 'elasticsearch-extensions', :path => File.expand_path("../../elasticsearch-extensions", __FILE__), :require => false
|
16
|
-
end
|
17
|
-
|
18
|
-
if File.exists? File.expand_path("../../elasticsearch/elasticsearch.gemspec", __FILE__)
|
14
|
+
if File.exist? File.expand_path("../../elasticsearch/elasticsearch.gemspec", __FILE__)
|
19
15
|
gem 'elasticsearch', :path => File.expand_path("../../elasticsearch/", __FILE__)
|
20
16
|
end
|
data/README.md
CHANGED
@@ -136,10 +136,10 @@ Start the cluster on specific port, with a specific Elasticsearch version, numbe
|
|
136
136
|
require 'elasticsearch/extensions/test/cluster'
|
137
137
|
|
138
138
|
Elasticsearch::Extensions::Test::Cluster.start \
|
139
|
-
cluster_name:
|
140
|
-
command:
|
141
|
-
port:
|
142
|
-
|
139
|
+
cluster_name: "my-testing-cluster",
|
140
|
+
command: "/usr/local/Cellar/elasticsearch/0.90.10/bin/elasticsearch",
|
141
|
+
port: 9350,
|
142
|
+
number_of_nodes: 3
|
143
143
|
|
144
144
|
# Starting 3 Elasticsearch nodes.....................
|
145
145
|
# --------------------------------------------------------------------------------
|
@@ -20,7 +20,7 @@ Gem::Specification.new do |s|
|
|
20
20
|
|
21
21
|
s.add_dependency "ansi"
|
22
22
|
|
23
|
-
unless File.
|
23
|
+
unless File.exist? File.expand_path("../../elasticsearch/elasticsearch.gemspec", __FILE__)
|
24
24
|
s.add_dependency "elasticsearch"
|
25
25
|
end
|
26
26
|
|
@@ -47,7 +47,7 @@ Gem::Specification.new do |s|
|
|
47
47
|
s.add_development_dependency "ci_reporter", "~> 1.9"
|
48
48
|
|
49
49
|
if defined?(RUBY_VERSION) && RUBY_VERSION < '1.9'
|
50
|
-
s.add_development_dependency "json"
|
50
|
+
s.add_development_dependency "json", '~> 1.8'
|
51
51
|
end
|
52
52
|
|
53
53
|
if defined?(RUBY_VERSION) && RUBY_VERSION > '1.9'
|
@@ -125,28 +125,21 @@ module Elasticsearch
|
|
125
125
|
response = arguments[:source][:client].search(
|
126
126
|
index: arguments[:source][:index],
|
127
127
|
scroll: arguments[:scroll],
|
128
|
-
size: arguments[:batch_size]
|
129
|
-
search_type: 'scan',
|
130
|
-
fields: ['_source', '_parent', '_routing', '_timestamp']
|
128
|
+
size: arguments[:batch_size]
|
131
129
|
)
|
132
130
|
|
131
|
+
documents = response['hits']['hits']
|
132
|
+
|
133
|
+
unless documents.empty?
|
134
|
+
bulk_response = __store_batch(documents)
|
135
|
+
output[:errors] += bulk_response['items'].select { |k, v| k.values.first['error'] }.size
|
136
|
+
end
|
137
|
+
|
133
138
|
while response = arguments[:source][:client].scroll(scroll_id: response['_scroll_id'], scroll: arguments[:scroll]) do
|
134
139
|
documents = response['hits']['hits']
|
135
140
|
break if documents.empty?
|
136
141
|
|
137
|
-
|
138
|
-
doc['_index'] = arguments[:target][:index]
|
139
|
-
|
140
|
-
arguments[:transform].call(doc) if arguments[:transform]
|
141
|
-
|
142
|
-
doc['data'] = doc['_source']
|
143
|
-
doc.delete('_score')
|
144
|
-
doc.delete('_source')
|
145
|
-
|
146
|
-
{ index: doc }
|
147
|
-
end
|
148
|
-
|
149
|
-
bulk_response = arguments[:target][:client].bulk body: bulk
|
142
|
+
bulk_response = __store_batch(documents)
|
150
143
|
output[:errors] += bulk_response['items'].select { |k, v| k.values.first['error'] }.size
|
151
144
|
end
|
152
145
|
|
@@ -154,6 +147,22 @@ module Elasticsearch
|
|
154
147
|
|
155
148
|
output
|
156
149
|
end
|
150
|
+
|
151
|
+
def __store_batch(documents)
|
152
|
+
body = documents.map do |doc|
|
153
|
+
doc['_index'] = arguments[:target][:index]
|
154
|
+
|
155
|
+
arguments[:transform].call(doc) if arguments[:transform]
|
156
|
+
|
157
|
+
doc['data'] = doc['_source']
|
158
|
+
doc.delete('_score')
|
159
|
+
doc.delete('_source')
|
160
|
+
|
161
|
+
{ index: doc }
|
162
|
+
end
|
163
|
+
|
164
|
+
arguments[:target][:client].bulk body: body
|
165
|
+
end
|
157
166
|
end
|
158
167
|
end
|
159
168
|
end
|
@@ -161,17 +161,20 @@ module Elasticsearch
|
|
161
161
|
-E path.repo=/tmp \
|
162
162
|
-E repositories.url.allowed_urls=http://snapshot.test* \
|
163
163
|
-E discovery.zen.minimum_master_nodes=#{arguments[:number_of_nodes]-1} \
|
164
|
+
-E node.max_local_storage_nodes=#{arguments[:number_of_nodes]} \
|
164
165
|
-E logger.level=DEBUG \
|
165
166
|
#{arguments[:es_params]} \
|
166
167
|
> /dev/null
|
167
168
|
COMMAND
|
168
169
|
}
|
169
170
|
}
|
171
|
+
COMMANDS['6.0'] = COMMANDS['5.0']
|
172
|
+
COMMANDS.freeze
|
170
173
|
|
171
174
|
# Create a new instance of the Cluster class
|
172
175
|
#
|
173
176
|
# @option arguments [String] :cluster_name Cluster name (default: `elasticsearch_test`)
|
174
|
-
# @option arguments [Integer] :
|
177
|
+
# @option arguments [Integer] :number_of_nodes Number of desired nodes (default: 2)
|
175
178
|
# @option arguments [String] :command Elasticsearch command (default: `elasticsearch`)
|
176
179
|
# @option arguments [String] :port Starting port number; will be auto-incremented (default: 9250)
|
177
180
|
# @option arguments [String] :node_name The node name (will be appended with a number)
|
@@ -188,7 +191,7 @@ module Elasticsearch
|
|
188
191
|
# @see Cluster#start
|
189
192
|
#
|
190
193
|
def initialize(arguments={})
|
191
|
-
@arguments = arguments
|
194
|
+
@arguments = arguments.dup
|
192
195
|
|
193
196
|
@arguments[:command] ||= ENV.fetch('TEST_CLUSTER_COMMAND', 'elasticsearch')
|
194
197
|
@arguments[:port] ||= ENV.fetch('TEST_CLUSTER_PORT', 9250).to_i
|
@@ -221,7 +224,7 @@ module Elasticsearch
|
|
221
224
|
# @example Start a cluster with a custom configuration
|
222
225
|
# Elasticsearch::Extensions::Test::Cluster::Cluster.new(
|
223
226
|
# cluster_name: 'my-cluster',
|
224
|
-
#
|
227
|
+
# number_of_nodes: 3,
|
225
228
|
# node_name: 'my-node',
|
226
229
|
# port: 9350
|
227
230
|
# ).start
|
@@ -240,7 +243,7 @@ module Elasticsearch
|
|
240
243
|
return false
|
241
244
|
end
|
242
245
|
|
243
|
-
__remove_cluster_data
|
246
|
+
__remove_cluster_data if @clear_cluster
|
244
247
|
|
245
248
|
STDOUT.print "Starting ".ansi(:faint) + arguments[:number_of_nodes].to_s.ansi(:bold, :faint) +
|
246
249
|
" Elasticsearch nodes..".ansi(:faint)
|
@@ -256,6 +259,7 @@ module Elasticsearch
|
|
256
259
|
pid = Process.spawn(command)
|
257
260
|
Process.detach pid
|
258
261
|
pids << pid
|
262
|
+
sleep 1
|
259
263
|
end
|
260
264
|
|
261
265
|
__check_for_running_processes(pids)
|
@@ -365,8 +369,8 @@ module Elasticsearch
|
|
365
369
|
when /^0|^1/
|
366
370
|
'0.0.0.0'
|
367
371
|
when /^2/
|
368
|
-
'
|
369
|
-
when /^5/
|
372
|
+
'_local_'
|
373
|
+
when /^5|^6/
|
370
374
|
'_local_'
|
371
375
|
else
|
372
376
|
raise RuntimeError, "Cannot determine default network host from version [#{version}]"
|
@@ -418,18 +422,34 @@ module Elasticsearch
|
|
418
422
|
raise RuntimeError, "Cannot determine Elasticsearch version from jar [#{jar}]"
|
419
423
|
end
|
420
424
|
else
|
421
|
-
STDERR.puts "[!] Cannot find Elasticsearch .jar from path to command [#{arguments[:command]}], using
|
425
|
+
STDERR.puts "[!] Cannot find Elasticsearch .jar from path to command [#{arguments[:command]}], using `#{arguments[:command]} --version`" if ENV['DEBUG']
|
426
|
+
|
427
|
+
unless File.exist? arguments[:command]
|
428
|
+
raise Errno::ENOENT, "File [#{arguments[:command]}] does not exist -- did you pass a correct path to the Elasticsearch launch script"
|
429
|
+
end
|
422
430
|
|
423
431
|
output = ''
|
424
432
|
|
425
433
|
begin
|
426
434
|
# First, try the new `--version` syntax...
|
427
435
|
STDERR.puts "Running [#{arguments[:command]} --version] to determine version" if ENV['DEBUG']
|
428
|
-
|
436
|
+
rout, wout = IO.pipe
|
437
|
+
pid = Process.spawn("#{arguments[:command]} --version", out: wout)
|
438
|
+
|
439
|
+
Timeout::timeout(10) do
|
440
|
+
Process.wait(pid)
|
441
|
+
wout.close unless wout.closed?
|
442
|
+
output = rout.read unless rout.closed?
|
443
|
+
rout.close unless rout.closed?
|
444
|
+
end
|
429
445
|
rescue Timeout::Error
|
430
|
-
# ...else, the
|
446
|
+
# ...else, the old `-v` syntax
|
431
447
|
STDERR.puts "Running [#{arguments[:command]} -v] to determine version" if ENV['DEBUG']
|
432
448
|
output = `#{arguments[:command]} -v`
|
449
|
+
ensure
|
450
|
+
Process.kill('INT', pid) if pid
|
451
|
+
wout.close unless wout.closed?
|
452
|
+
rout.close unless rout.closed?
|
433
453
|
end
|
434
454
|
|
435
455
|
STDERR.puts "> #{output}" if ENV['DEBUG']
|
@@ -454,6 +474,8 @@ module Elasticsearch
|
|
454
474
|
'2.0'
|
455
475
|
when /^5\..*/
|
456
476
|
'5.0'
|
477
|
+
when /^6\..*/
|
478
|
+
'6.0'
|
457
479
|
else
|
458
480
|
raise RuntimeError, "Cannot determine major version from [#{version}]"
|
459
481
|
end
|
@@ -485,17 +507,25 @@ module Elasticsearch
|
|
485
507
|
# @return Boolean
|
486
508
|
#
|
487
509
|
def __wait_for_status(status='green', timeout=30)
|
488
|
-
|
489
|
-
|
490
|
-
|
510
|
+
begin
|
511
|
+
Timeout::timeout(timeout) do
|
512
|
+
loop do
|
513
|
+
response = __get_cluster_health(status)
|
514
|
+
STDERR.puts response if ENV['DEBUG']
|
491
515
|
|
492
|
-
|
493
|
-
|
494
|
-
|
516
|
+
if response && response['status'] == status && ( arguments[:number_of_nodes].nil? || arguments[:number_of_nodes].to_i == response['number_of_nodes'].to_i )
|
517
|
+
break
|
518
|
+
end
|
495
519
|
|
496
|
-
|
497
|
-
|
520
|
+
STDOUT.print '.'.ansi(:faint)
|
521
|
+
sleep 1
|
522
|
+
end
|
498
523
|
end
|
524
|
+
rescue Timeout::Error => e
|
525
|
+
message = "\nTimeout while waiting for cluster status [#{status}]"
|
526
|
+
message += " and [#{arguments[:number_of_nodes]}] nodes" if arguments[:number_of_nodes]
|
527
|
+
STDOUT.puts message.ansi(:red, :bold)
|
528
|
+
raise e
|
499
529
|
end
|
500
530
|
|
501
531
|
return true
|
@@ -553,13 +583,12 @@ module Elasticsearch
|
|
553
583
|
JSON.parse(response)
|
554
584
|
end
|
555
585
|
|
556
|
-
# Remove the data directory
|
586
|
+
# Remove the data directory
|
557
587
|
#
|
558
588
|
# @api private
|
559
589
|
#
|
560
590
|
def __remove_cluster_data
|
561
|
-
|
562
|
-
FileUtils.rm_rf "#{arguments[:path_data]}/#{arguments[:cluster_name]}" if @clear_cluster
|
591
|
+
FileUtils.rm_rf arguments[:path_data]
|
563
592
|
end
|
564
593
|
|
565
594
|
|
@@ -25,13 +25,20 @@ class Elasticsearch::Extensions::ReindexIntegrationTest < Elasticsearch::Test::I
|
|
25
25
|
@client.index index: 'test1', type: 'd', id: 3, body: { title: 'TEST 3', category: 'three' }
|
26
26
|
@client.indices.refresh index: 'test1'
|
27
27
|
|
28
|
+
@client.indices.create index: 'test2'
|
29
|
+
|
28
30
|
@client.cluster.health wait_for_status: 'yellow'
|
29
31
|
end
|
30
32
|
|
33
|
+
teardown do
|
34
|
+
@client.indices.delete index: '_all'
|
35
|
+
end
|
36
|
+
|
31
37
|
should "copy documents from one index to another" do
|
32
38
|
reindex = Elasticsearch::Extensions::Reindex.new \
|
33
39
|
source: { index: 'test1', client: @client },
|
34
40
|
target: { index: 'test2' },
|
41
|
+
batch_size: 2,
|
35
42
|
refresh: true
|
36
43
|
|
37
44
|
result = reindex.perform
|
@@ -40,6 +47,36 @@ class Elasticsearch::Extensions::ReindexIntegrationTest < Elasticsearch::Test::I
|
|
40
47
|
assert_equal 3, @client.search(index: 'test2')['hits']['total']
|
41
48
|
end
|
42
49
|
|
50
|
+
should "copy documents with parent/child relationship" do
|
51
|
+
mapping = { mappings: { p: {}, c: { _parent: { type: 'p' } } } }
|
52
|
+
@client.indices.create index: 'test_parent_1', body: mapping
|
53
|
+
@client.indices.create index: 'test_parent_2', body: mapping
|
54
|
+
|
55
|
+
@client.index index: 'test_parent_1', type: 'p', id: 1, body: { title: 'Parent 1' }
|
56
|
+
@client.index index: 'test_parent_1', type: 'p', id: 2, body: { title: 'Parent 2' }
|
57
|
+
@client.index index: 'test_parent_1', type: 'c', parent: 1, body: { title: 'Child One' }
|
58
|
+
@client.index index: 'test_parent_1', type: 'c', parent: 1, body: { title: 'Child Two' }
|
59
|
+
|
60
|
+
@client.indices.refresh index: 'test_parent_1'
|
61
|
+
|
62
|
+
reindex = Elasticsearch::Extensions::Reindex.new \
|
63
|
+
source: { index: 'test_parent_1', client: @client },
|
64
|
+
target: { index: 'test_parent_2' },
|
65
|
+
batch_size: 2,
|
66
|
+
refresh: true
|
67
|
+
|
68
|
+
result = reindex.perform
|
69
|
+
|
70
|
+
assert_equal 0, result[:errors]
|
71
|
+
assert_equal 4, @client.search(index: 'test_parent_2')['hits']['total']
|
72
|
+
|
73
|
+
response = @client.search index: 'test_parent_2', body: {
|
74
|
+
query: { has_child: { type: 'c', query: { match: { title: 'two' } } } } }
|
75
|
+
|
76
|
+
assert_equal 1, response['hits']['hits'].size
|
77
|
+
assert_equal 'Parent 1', response['hits']['hits'][0]['_source']['title']
|
78
|
+
end
|
79
|
+
|
43
80
|
should "transform documents with a lambda" do
|
44
81
|
reindex = Elasticsearch::Extensions::Reindex.new \
|
45
82
|
source: { index: 'test1', client: @client },
|
@@ -60,18 +97,20 @@ class Elasticsearch::Extensions::ReindexIntegrationTest < Elasticsearch::Test::I
|
|
60
97
|
|
61
98
|
reindex = Elasticsearch::Extensions::Reindex.new \
|
62
99
|
source: { index: 'test1', client: @client },
|
63
|
-
target: { index: 'test3'
|
64
|
-
refresh: true
|
100
|
+
target: { index: 'test3' }
|
65
101
|
|
66
102
|
result = reindex.perform
|
67
103
|
|
104
|
+
@client.indices.refresh index: 'test3'
|
105
|
+
|
68
106
|
assert_equal 3, result[:errors]
|
69
107
|
assert_equal 0, @client.search(index: 'test3')['hits']['total']
|
70
108
|
end
|
71
109
|
|
72
110
|
should "reindex via the API integration" do
|
73
|
-
@client.
|
111
|
+
@client.indices.create index: 'test4'
|
74
112
|
|
113
|
+
@client.reindex source: { index: 'test1' }, target: { index: 'test4' }
|
75
114
|
@client.indices.refresh index: 'test4'
|
76
115
|
|
77
116
|
assert_equal 3, @client.search(index: 'test4')['hits']['total']
|
@@ -43,8 +43,8 @@ class Elasticsearch::Extensions::ReindexTest < Test::Unit::TestCase
|
|
43
43
|
'_id' => d['_id'],
|
44
44
|
'data' => d['_source']
|
45
45
|
} }]
|
46
|
-
@bulk_response = {'errors'=>false, 'items' => [{'index' => {}}]}
|
47
|
-
@bulk_response_error = {'errors'=>true, 'items' => [{'index' => {}},{'index' => {'error' => 'FOOBAR'}}]}
|
46
|
+
@bulk_response = {'errors'=>false, 'items' => [{'index' => {}}, {'index' => {}}]}
|
47
|
+
@bulk_response_error = {'errors'=>true, 'items' => [{'index' => {}}, {'index' => {'error' => 'FOOBAR'}}]}
|
48
48
|
end
|
49
49
|
|
50
50
|
should "scroll through the index and save batches in bulk" do
|
@@ -52,11 +52,15 @@ class Elasticsearch::Extensions::ReindexTest < Test::Unit::TestCase
|
|
52
52
|
subject = Elasticsearch::Extensions::Reindex.new source: { index: 'foo', client: client },
|
53
53
|
target: { index: 'bar' }
|
54
54
|
|
55
|
-
client.expects(:search)
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
55
|
+
client.expects(:search)
|
56
|
+
.returns({ '_scroll_id' => 'scroll_id_1' }.merge(Marshal.load(Marshal.dump(@default_response))))
|
57
|
+
client.expects(:scroll)
|
58
|
+
.returns(Marshal.load(Marshal.dump(@default_response)))
|
59
|
+
.then
|
60
|
+
.returns(@empty_response).times(2)
|
61
|
+
client.expects(:bulk)
|
62
|
+
.with(body: @bulk_request)
|
63
|
+
.returns(@bulk_response).times(2)
|
60
64
|
|
61
65
|
result = subject.perform
|
62
66
|
|
@@ -68,10 +72,8 @@ class Elasticsearch::Extensions::ReindexTest < Test::Unit::TestCase
|
|
68
72
|
subject = Elasticsearch::Extensions::Reindex.new source: { index: 'foo', client: client },
|
69
73
|
target: { index: 'bar' }
|
70
74
|
|
71
|
-
client.expects(:search).returns({ '_scroll_id' => 'scroll_id_1' })
|
72
|
-
client.expects(:scroll).returns(@
|
73
|
-
.then.returns(@empty_response)
|
74
|
-
.times(2)
|
75
|
+
client.expects(:search).returns({ '_scroll_id' => 'scroll_id_1' }.merge(@default_response))
|
76
|
+
client.expects(:scroll).returns(@empty_response)
|
75
77
|
client.expects(:bulk).with(body: @bulk_request).returns(@bulk_response_error)
|
76
78
|
|
77
79
|
result = subject.perform
|
@@ -86,10 +88,8 @@ class Elasticsearch::Extensions::ReindexTest < Test::Unit::TestCase
|
|
86
88
|
target: { index: 'bar' },
|
87
89
|
transform: lambda { |d| d['_source']['foo'].upcase!; d }
|
88
90
|
|
89
|
-
client.expects(:search).returns({ '_scroll_id' => 'scroll_id_1' })
|
90
|
-
client.expects(:scroll).returns(@
|
91
|
-
.then.returns(@empty_response)
|
92
|
-
.times(2)
|
91
|
+
client.expects(:search).returns({ '_scroll_id' => 'scroll_id_1' }.merge(@default_response))
|
92
|
+
client.expects(:scroll).returns(@empty_response)
|
93
93
|
client.expects(:bulk).with do |arguments|
|
94
94
|
assert_equal 'BAR', arguments[:body][0][:index]['data']['foo']
|
95
95
|
true
|
@@ -22,6 +22,11 @@ class Elasticsearch::Extensions::TestClusterIntegrationTest < Test::Unit::TestCa
|
|
22
22
|
should "start and stop #{build.to_s}" do
|
23
23
|
puts ("----- #{build.to_s} " + "-"*(80-7-build.to_s.size)).to_s.ansi(:bold)
|
24
24
|
Elasticsearch::Extensions::Test::Cluster.start command: PATH_TO_BUILDS.join(build.join('bin/elasticsearch')).to_s
|
25
|
+
|
26
|
+
# Index some data to create the data directory
|
27
|
+
client = Elasticsearch::Client.new host: "localhost:9250"
|
28
|
+
client.index index: 'test1', type: 'd', id: 1, body: { title: 'TEST' }
|
29
|
+
|
25
30
|
Elasticsearch::Extensions::Test::Cluster.stop command: PATH_TO_BUILDS.join(build.join('bin/elasticsearch')).to_s
|
26
31
|
end
|
27
32
|
end
|
@@ -38,6 +38,13 @@ class Elasticsearch::Extensions::TestClusterTest < Test::Unit::TestCase
|
|
38
38
|
assert_equal 9400, c.arguments[:port]
|
39
39
|
end
|
40
40
|
|
41
|
+
should "not modify the arguments" do
|
42
|
+
args = { port: 9400 }.freeze
|
43
|
+
|
44
|
+
assert_nothing_raised { Cluster::Cluster.new args }
|
45
|
+
assert_nil args[:command]
|
46
|
+
end
|
47
|
+
|
41
48
|
should "take parameters from environment variables" do
|
42
49
|
ENV['TEST_CLUSTER_PORT'] = '9400'
|
43
50
|
|
@@ -88,6 +95,13 @@ class Elasticsearch::Extensions::TestClusterTest < Test::Unit::TestCase
|
|
88
95
|
end
|
89
96
|
end
|
90
97
|
|
98
|
+
should "remove cluster data" do
|
99
|
+
@subject.unstub(:__remove_cluster_data)
|
100
|
+
FileUtils.expects(:rm_rf).with("/tmp/elasticsearch_test")
|
101
|
+
|
102
|
+
@subject.__remove_cluster_data
|
103
|
+
end
|
104
|
+
|
91
105
|
context "when starting a cluster, " do
|
92
106
|
should "return false when it's already running" do
|
93
107
|
Process.expects(:spawn).never
|
@@ -238,11 +252,18 @@ class Elasticsearch::Extensions::TestClusterTest < Test::Unit::TestCase
|
|
238
252
|
assert_equal '2.0', @subject.__determine_version
|
239
253
|
end
|
240
254
|
|
241
|
-
should "return version from `elasticsearch
|
255
|
+
should "return version from `elasticsearch --version`" do
|
242
256
|
File.expects(:exist?).with('/foo/bar/bin/../lib/').returns(false)
|
243
257
|
|
244
|
-
|
245
|
-
|
258
|
+
Process.stubs(:wait)
|
259
|
+
Process.expects(:spawn)
|
260
|
+
.with do |command, options|
|
261
|
+
assert_equal "/foo/bar/bin/elasticsearch --version", command
|
262
|
+
end
|
263
|
+
.returns(123)
|
264
|
+
Process.expects(:kill).with('INT', 123)
|
265
|
+
|
266
|
+
IO.any_instance.expects(:read)
|
246
267
|
.returns('Version: 2.3.0-SNAPSHOT, Build: d1c86b0/2016-03-30T10:43:20Z, JVM: 1.8.0_60')
|
247
268
|
|
248
269
|
assert_equal '2.0', @subject.__determine_version
|
@@ -259,9 +280,15 @@ class Elasticsearch::Extensions::TestClusterTest < Test::Unit::TestCase
|
|
259
280
|
should "raise an exception when the version cannot be parsed from command output" do
|
260
281
|
File.expects(:exist?).with('/foo/bar/bin/../lib/').returns(false)
|
261
282
|
|
262
|
-
|
263
|
-
|
264
|
-
.
|
283
|
+
Process.stubs(:wait)
|
284
|
+
Process.expects(:spawn)
|
285
|
+
.with do |command, options|
|
286
|
+
assert_equal "/foo/bar/bin/elasticsearch --version", command
|
287
|
+
end
|
288
|
+
.returns(123)
|
289
|
+
Process.expects(:kill).with('INT', 123)
|
290
|
+
|
291
|
+
IO.any_instance.expects(:read).returns('Version: FOOBAR')
|
265
292
|
|
266
293
|
assert_raise(RuntimeError) { @subject.__determine_version }
|
267
294
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: elasticsearch-extensions
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.23
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Karel Minarik
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2016-
|
11
|
+
date: 2016-12-18 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: ansi
|
@@ -313,4 +313,3 @@ test_files:
|
|
313
313
|
- test/test/cluster/integration/cluster_test.rb
|
314
314
|
- test/test/cluster/unit/cluster_test.rb
|
315
315
|
- test/test_helper.rb
|
316
|
-
has_rdoc:
|