chef-solr 0.9.8 → 0.9.10.rc.0
Sign up to get free protection for your applications and to get access to all the features.
- data/bin/chef-solr-indexer +2 -1
- data/lib/chef/solr.rb +53 -34
- data/lib/chef/solr/index.rb +46 -103
- data/lib/chef/solr/index_queue_consumer.rb +4 -7
- data/lib/chef/solr/version.rb +1 -1
- data/spec/chef/solr/index_spec.rb +96 -97
- data/spec/chef/solr_spec.rb +89 -34
- metadata +15 -9
data/bin/chef-solr-indexer
CHANGED
@@ -17,10 +17,11 @@
|
|
17
17
|
# limitations under the License.
|
18
18
|
#
|
19
19
|
|
20
|
+
require 'rubygems'
|
21
|
+
|
20
22
|
$:.unshift(File.expand_path(File.join(File.dirname(__FILE__), "..", "lib")))
|
21
23
|
$:.unshift(File.expand_path(File.join(File.dirname(__FILE__), "..", "..", "chef", "lib")))
|
22
24
|
|
23
|
-
require 'rubygems'
|
24
25
|
require 'chef/solr/application/indexer'
|
25
26
|
|
26
27
|
Chef::Solr::Application::Indexer.new.run
|
data/lib/chef/solr.rb
CHANGED
@@ -20,13 +20,6 @@ require 'chef/mixin/xml_escape'
|
|
20
20
|
require 'chef/log'
|
21
21
|
require 'chef/config'
|
22
22
|
require 'chef/couchdb'
|
23
|
-
require 'chef/role'
|
24
|
-
require 'chef/node'
|
25
|
-
require 'chef/data_bag'
|
26
|
-
require 'chef/data_bag_item'
|
27
|
-
require 'chef/api_client'
|
28
|
-
require 'chef/openid_registration'
|
29
|
-
require 'chef/webui_user'
|
30
23
|
require 'net/http'
|
31
24
|
require 'libxml'
|
32
25
|
require 'uri'
|
@@ -55,11 +48,10 @@ class Chef
|
|
55
48
|
select_url = "/solr/select?#{to_params(options)}"
|
56
49
|
Chef::Log.debug("Sending #{select_url} to Solr")
|
57
50
|
req = Net::HTTP::Get.new(select_url)
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
end
|
51
|
+
|
52
|
+
description = "Search Query to Solr '#{solr_url}#{select_url}'"
|
53
|
+
|
54
|
+
res = http_request_handler(req, description)
|
63
55
|
Chef::Log.debug("Parsing Solr result set:\n#{res.body}")
|
64
56
|
eval(res.body)
|
65
57
|
end
|
@@ -68,34 +60,37 @@ class Chef
|
|
68
60
|
Chef::Log.debug("POSTing document to SOLR:\n#{doc}")
|
69
61
|
req = Net::HTTP::Post.new("/solr/update", "Content-Type" => "text/xml")
|
70
62
|
req.body = doc.to_s
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
res
|
63
|
+
|
64
|
+
description = "POST to Solr '#{solr_url}'"
|
65
|
+
|
66
|
+
http_request_handler(req, description)
|
76
67
|
end
|
77
68
|
|
78
|
-
|
79
|
-
|
69
|
+
START_XML = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<add><doc>"
|
70
|
+
END_XML = "</doc></add>\n"
|
71
|
+
FIELD_ATTR = '<field name="'
|
72
|
+
FIELD_ATTR_END = '">'
|
73
|
+
CLOSE_FIELD = "</field>"
|
80
74
|
|
75
|
+
def solr_add(data)
|
81
76
|
Chef::Log.debug("adding to SOLR: #{data.inspect}")
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
end
|
77
|
+
|
78
|
+
xml = ""
|
79
|
+
xml << START_XML
|
80
|
+
|
81
|
+
data.each do |field, values|
|
82
|
+
values.each do |v|
|
83
|
+
xml << FIELD_ATTR
|
84
|
+
xml << field
|
85
|
+
xml << FIELD_ATTR_END
|
86
|
+
xml << xml_escape(v)
|
87
|
+
xml << CLOSE_FIELD
|
94
88
|
end
|
95
|
-
xml_add << xml_doc
|
96
89
|
end
|
97
|
-
|
98
|
-
|
90
|
+
xml << END_XML
|
91
|
+
xml
|
92
|
+
|
93
|
+
post_to_solr(xml)
|
99
94
|
end
|
100
95
|
|
101
96
|
def solr_commit(opts={})
|
@@ -210,6 +205,30 @@ class Chef
|
|
210
205
|
'%'+$1.unpack('H2'*$1.size).join('%').upcase
|
211
206
|
}.tr(' ', '+')
|
212
207
|
end
|
208
|
+
|
209
|
+
# handles multiple net/http exceptions and no method closed? bug
|
210
|
+
def http_request_handler(req, description='HTTP call')
|
211
|
+
res = @http.request(req)
|
212
|
+
unless res.kind_of?(Net::HTTPSuccess)
|
213
|
+
Chef::Log.fatal("#{description} failed (#{res.class} #{res.code} #{res.message})")
|
214
|
+
res.error!
|
215
|
+
end
|
216
|
+
res
|
217
|
+
rescue Timeout::Error, Errno::EINVAL, EOFError, Net::HTTPBadResponse, Net::HTTPHeaderSyntaxError, Net::ProtocolError, Errno::ECONNREFUSED, Errno::ECONNRESET, Errno::ETIMEDOUT, NoMethodError => e
|
218
|
+
# http://redmine.ruby-lang.org/issues/show/2708
|
219
|
+
# http://redmine.ruby-lang.org/issues/show/2758
|
220
|
+
if e.to_s =~ /#{Regexp.escape(%q|undefined method 'closed?' for nil:NilClass|)}/
|
221
|
+
Chef::Log.fatal("#{description} failed. Chef::Exceptions::SolrConnectionError exception: Errno::ECONNREFUSED (net/http undefined method closed?) attempting to contact #{@solr_url}")
|
222
|
+
Chef::Log.debug("rescued error in http connect, treating it as Errno::ECONNREFUSED to hide bug in net/http")
|
223
|
+
Chef::Log.debug(e.backtrace.join("\n"))
|
224
|
+
raise Chef::Exceptions::SolrConnectionError, "Errno::ECONNREFUSED: Connection refused attempting to contact #{@solr_url}"
|
225
|
+
end
|
226
|
+
|
227
|
+
Chef::Log.fatal("#{description} failed. Chef::Exceptions::SolrConnectionError exception: #{e.class.name}: #{e.to_s} attempting to contact #{@solr_url}")
|
228
|
+
Chef::Log.debug(e.backtrace.join("\n"))
|
229
|
+
|
230
|
+
raise Chef::Exceptions::SolrConnectionError, "#{e.class.name}: #{e.to_s}"
|
231
|
+
end
|
213
232
|
|
214
233
|
end
|
215
234
|
end
|
data/lib/chef/solr/index.rb
CHANGED
@@ -6,9 +6,9 @@
|
|
6
6
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
7
7
|
# you may not use this file except in compliance with the License.
|
8
8
|
# You may obtain a copy of the License at
|
9
|
-
#
|
9
|
+
#
|
10
10
|
# http://www.apache.org/licenses/LICENSE-2.0
|
11
|
-
#
|
11
|
+
#
|
12
12
|
# Unless required by applicable law or agreed to in writing, software
|
13
13
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
14
14
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
@@ -26,12 +26,24 @@ class Chef
|
|
26
26
|
class Solr
|
27
27
|
class Index < Solr
|
28
28
|
|
29
|
+
UNDERSCORE = '_'
|
30
|
+
X = 'X'
|
31
|
+
|
32
|
+
X_CHEF_id_CHEF_X = 'X_CHEF_id_CHEF_X'
|
33
|
+
X_CHEF_database_CHEF_X = 'X_CHEF_database_CHEF_X'
|
34
|
+
X_CHEF_type_CHEF_X = 'X_CHEF_type_CHEF_X'
|
35
|
+
|
29
36
|
def add(id, database, type, item)
|
30
|
-
|
37
|
+
unless item.respond_to?(:keys)
|
38
|
+
raise ArgumentError, "#{self.class.name} can only index Hash-like objects. You gave #{item.inspect}"
|
39
|
+
end
|
40
|
+
|
31
41
|
to_index = flatten_and_expand(item)
|
32
|
-
|
33
|
-
to_index[
|
34
|
-
to_index[
|
42
|
+
|
43
|
+
to_index[X_CHEF_id_CHEF_X] = [id]
|
44
|
+
to_index[X_CHEF_database_CHEF_X] = [database]
|
45
|
+
to_index[X_CHEF_type_CHEF_X] = [type]
|
46
|
+
|
35
47
|
solr_add(to_index)
|
36
48
|
to_index
|
37
49
|
end
|
@@ -44,117 +56,48 @@ class Chef
|
|
44
56
|
solr_delete_by_query(query)
|
45
57
|
end
|
46
58
|
|
47
|
-
def flatten_and_expand(item
|
48
|
-
|
49
|
-
# If we have a parent, we want to add the current key as a value
|
50
|
-
if parent
|
51
|
-
# foo_bar = bar
|
52
|
-
set_field_value(fields, parent, key)
|
53
|
-
# foo_X = bar, etc.
|
54
|
-
make_expando_fields(parent).each do |ex_key|
|
55
|
-
set_field_value(fields, ex_key, key)
|
56
|
-
end
|
57
|
-
end
|
58
|
-
case item[key]
|
59
|
-
when Hash
|
60
|
-
parent_key = parent ? "#{parent}_#{key}" : key
|
61
|
-
flatten_and_expand(item[key], fields, parent_key)
|
62
|
-
else
|
63
|
-
parent_key = parent ? "#{parent}_#{key}" : key
|
64
|
-
set_field_value(fields, key, item[key])
|
65
|
-
set_field_value(fields, parent_key, item[key]) if parent
|
66
|
-
make_expando_fields(parent_key).each do |ex_key|
|
67
|
-
set_field_value(fields, ex_key, item[key])
|
68
|
-
end
|
69
|
-
end
|
70
|
-
end
|
71
|
-
fields
|
72
|
-
end
|
73
|
-
|
74
|
-
def make_expando_fields(key)
|
75
|
-
key = key.to_s
|
76
|
-
fields = Array.new
|
77
|
-
parts = key.split("_")
|
78
|
-
length = parts.length
|
79
|
-
parts.each_index do |i|
|
80
|
-
beginning = nil
|
81
|
-
remainder = nil
|
82
|
-
if i == 0
|
83
|
-
beginning = "X"
|
84
|
-
else
|
85
|
-
beginning = parts[0..i-1].join("_")
|
86
|
-
end
|
87
|
-
|
88
|
-
if i == length-1
|
89
|
-
remainder = "X"
|
90
|
-
else
|
91
|
-
remainder = parts[i+1..-1].join("_")
|
92
|
-
end
|
59
|
+
def flatten_and_expand(item)
|
60
|
+
@flattened_item = Hash.new {|hash, key| hash[key] = []}
|
93
61
|
|
94
|
-
|
95
|
-
|
96
|
-
fields << "#{beginning}_#{remainder}"
|
97
|
-
end
|
98
|
-
else
|
99
|
-
fields << "#{beginning}_X_#{remainder}"
|
100
|
-
end
|
62
|
+
item.each do |key, value|
|
63
|
+
flatten_each([key.to_s], value)
|
101
64
|
end
|
102
|
-
fields
|
103
|
-
end
|
104
65
|
|
105
|
-
|
106
|
-
|
107
|
-
if fields.has_key?(key)
|
108
|
-
convert_field_to_array(fields, key, value) unless fields[key].kind_of?(Array)
|
109
|
-
add_value_to_field_array(fields, key, value)
|
110
|
-
else
|
111
|
-
check_value(value)
|
112
|
-
if value.kind_of?(Array)
|
113
|
-
fields[key] = Array.new
|
114
|
-
value.each do |v|
|
115
|
-
if v.kind_of?(Hash)
|
116
|
-
flatten_and_expand(v, fields, key)
|
117
|
-
else
|
118
|
-
fields[key] << v.to_s
|
119
|
-
end
|
120
|
-
end
|
121
|
-
else
|
122
|
-
fields[key] = value.to_s
|
123
|
-
end
|
124
|
-
end
|
125
|
-
fields
|
66
|
+
@flattened_item.each_value { |values| values.uniq! }
|
67
|
+
@flattened_item
|
126
68
|
end
|
127
69
|
|
128
|
-
def
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
else
|
135
|
-
fields[key] << v.to_s unless fields[key].include?(v.to_s)
|
136
|
-
end
|
70
|
+
def flatten_each(keys, values)
|
71
|
+
case values
|
72
|
+
when Hash
|
73
|
+
values.each do |child_key, child_value|
|
74
|
+
add_field_value(keys, child_key)
|
75
|
+
flatten_each(keys + [child_key.to_s], child_value)
|
137
76
|
end
|
77
|
+
when Array
|
78
|
+
values.each { |child_value| flatten_each(keys, child_value) }
|
138
79
|
else
|
139
|
-
|
80
|
+
add_field_value(keys, values)
|
140
81
|
end
|
141
|
-
fields
|
142
82
|
end
|
143
83
|
|
144
|
-
def
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
fields
|
84
|
+
def add_field_value(keys, value)
|
85
|
+
value = value.to_s
|
86
|
+
each_expando_field(keys) { |expando_field| @flattened_item[expando_field] << value }
|
87
|
+
@flattened_item[keys.join(UNDERSCORE)] << value
|
88
|
+
@flattened_item[keys.last] << value
|
150
89
|
end
|
151
90
|
|
152
|
-
def
|
153
|
-
|
154
|
-
|
91
|
+
def each_expando_field(keys)
|
92
|
+
return if keys.size == 1
|
93
|
+
0.upto(keys.size - 1) do |index|
|
94
|
+
original = keys[index]
|
95
|
+
keys[index] = X
|
96
|
+
yield keys.join(UNDERSCORE)
|
97
|
+
keys[index] = original
|
98
|
+
end
|
155
99
|
end
|
156
100
|
|
157
101
|
end
|
158
102
|
end
|
159
103
|
end
|
160
|
-
|
@@ -20,12 +20,6 @@ require 'chef/log'
|
|
20
20
|
require 'chef/config'
|
21
21
|
require 'chef/solr'
|
22
22
|
require 'chef/solr/index'
|
23
|
-
require 'chef/node'
|
24
|
-
require 'chef/role'
|
25
|
-
require 'chef/rest'
|
26
|
-
require 'chef/data_bag'
|
27
|
-
require 'chef/data_bag_item'
|
28
|
-
require 'chef/api_client'
|
29
23
|
require 'chef/couchdb'
|
30
24
|
require 'chef/index_queue'
|
31
25
|
|
@@ -41,7 +35,10 @@ class Chef
|
|
41
35
|
Chef::Log.debug("Dequeued item for indexing: #{payload.inspect}")
|
42
36
|
|
43
37
|
begin
|
44
|
-
|
38
|
+
# older producers will send the raw item, and we no longer inflate it
|
39
|
+
# to an object.
|
40
|
+
pitem = payload["item"].to_hash
|
41
|
+
pitem.delete("json_class")
|
45
42
|
response = generate_response { index.add(payload["id"], payload["database"], payload["type"], pitem) }
|
46
43
|
rescue NoMethodError
|
47
44
|
response = generate_response() { raise ArgumentError, "Payload item does not respond to :keys or :to_hash, cannot index!" }
|
data/lib/chef/solr/version.rb
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
require File.expand_path(File.join("#{File.dirname(__FILE__)}", '..', '..', 'spec_helper'))
|
2
2
|
|
3
|
+
|
4
|
+
|
3
5
|
describe Chef::Solr::Index do
|
4
6
|
before(:each) do
|
5
7
|
@index = Chef::Solr::Index.new
|
@@ -25,37 +27,36 @@ describe Chef::Solr::Index do
|
|
25
27
|
|
26
28
|
it "should index the object as a single flat hash, with only strings or arrays as values" do
|
27
29
|
validate = {
|
28
|
-
"X_CHEF_id_CHEF_X" => 1,
|
29
|
-
"X_CHEF_database_CHEF_X" => "monkey",
|
30
|
-
"X_CHEF_type_CHEF_X" => "snakes",
|
31
|
-
"foo" => "bar",
|
30
|
+
"X_CHEF_id_CHEF_X" => [1],
|
31
|
+
"X_CHEF_database_CHEF_X" => ["monkey"],
|
32
|
+
"X_CHEF_type_CHEF_X" => ["snakes"],
|
33
|
+
"foo" => ["bar"],
|
32
34
|
"battles" => [ "often", "but", "for" ],
|
33
|
-
"battles_often" => "sings like smurfs",
|
34
|
-
"often" => "sings like smurfs",
|
35
|
-
"battles_but" => "still has good records",
|
36
|
-
"but" => "still has good records",
|
35
|
+
"battles_often" => ["sings like smurfs"],
|
36
|
+
"often" => ["sings like smurfs"],
|
37
|
+
"battles_but" => ["still has good records"],
|
38
|
+
"but" => ["still has good records"],
|
37
39
|
"battles_for" => [ "all", "of", "that" ],
|
38
40
|
"for" => [ "all", "of", "that" ],
|
39
|
-
"snoopy" => "
|
41
|
+
"snoopy" => ["sits-in-a-barn"],
|
40
42
|
"battles_X" => [ "sings like smurfs", "still has good records", "all", "of", "that" ],
|
41
|
-
"X_often" => "sings like smurfs",
|
42
|
-
"X_but" => "still has good records",
|
43
|
+
"X_often" =>[ "sings like smurfs"],
|
44
|
+
"X_but" => ["still has good records"],
|
43
45
|
"X_for" => [ "all", "of", "that" ]
|
44
|
-
}
|
45
|
-
to_index = @index.add(1, "monkey", "snakes", {
|
46
|
+
}
|
47
|
+
to_index = @index.add(1, "monkey", "snakes", {
|
46
48
|
"foo" => :bar,
|
47
|
-
"battles" => {
|
49
|
+
"battles" => {
|
48
50
|
"often" => "sings like smurfs",
|
49
51
|
"but" => "still has good records",
|
50
52
|
"for" => [ "all", "of", "that" ]
|
51
53
|
},
|
52
|
-
"snoopy" => "
|
54
|
+
"snoopy" => "sits-in-a-barn"
|
53
55
|
})
|
56
|
+
|
54
57
|
validate.each do |k, v|
|
55
58
|
if v.kind_of?(Array)
|
56
|
-
|
57
|
-
r = to_index[k] & v
|
58
|
-
r.length.should == to_index[k].length
|
59
|
+
to_index[k].sort.should == v.sort
|
59
60
|
else
|
60
61
|
to_index[k].should == v
|
61
62
|
end
|
@@ -88,101 +89,99 @@ describe Chef::Solr::Index do
|
|
88
89
|
end
|
89
90
|
|
90
91
|
it "should set a value for the parent as key, with the key as the value" do
|
91
|
-
@index.flatten_and_expand({ "one" => "woot" }
|
92
|
-
@fields["omerta"].should == "one"
|
92
|
+
@fields = @index.flatten_and_expand("omerta" => { "one" => "woot" })
|
93
|
+
@fields["omerta"].should == ["one"]
|
93
94
|
end
|
94
95
|
|
95
96
|
it "should call itself recursively for values that are hashes" do
|
96
|
-
@index.flatten_and_expand({ "one" => { "two" => "three", "four" => { "five" => "six" } }}
|
97
|
-
{
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
}.each do |k, v|
|
97
|
+
@fields = @index.flatten_and_expand({ "one" => { "two" => "three", "four" => { "five" => "six" } }})
|
98
|
+
expected = {"one" => [ "two", "four" ],
|
99
|
+
"one_two" => ["three"],
|
100
|
+
"X_two" => ["three"],
|
101
|
+
"two" => ["three"],
|
102
|
+
"one_four" => ["five"],
|
103
|
+
"X_four" => ["five"],
|
104
|
+
"one_X" => [ "three", "five" ],
|
105
|
+
"one_four_five" => ["six"],
|
106
|
+
"X_four_five" => ["six"],
|
107
|
+
"one_X_five" => ["six"],
|
108
|
+
"one_four_X" => ["six"],
|
109
|
+
"five" => ["six"]}
|
110
|
+
expected.each do |k, v|
|
111
111
|
@fields[k].should == v
|
112
112
|
end
|
113
113
|
end
|
114
114
|
|
115
115
|
it "should call itself recursively for hashes nested in arrays" do
|
116
|
-
@index.flatten_and_expand({ :one => [ { :two => "three" }, { :four => { :five => "six" } } ] }
|
117
|
-
{
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
@fields[
|
116
|
+
@fields = @index.flatten_and_expand({ :one => [ { :two => "three" }, { :four => { :five => "six" } } ] })
|
117
|
+
expected = {"one_X_five" => ["six"],
|
118
|
+
"one_four" => ["five"],
|
119
|
+
"one_X" => [ "three", "five" ],
|
120
|
+
"two" => ["three"],
|
121
|
+
"one_four_X" => ["six"],
|
122
|
+
"X_four" => ["five"],
|
123
|
+
"X_four_five" => ["six"],
|
124
|
+
"one" => [ "two", "four" ],
|
125
|
+
"one_four_five" => ["six"],
|
126
|
+
"five" => ["six"],
|
127
|
+
"X_two" => ["three"],
|
128
|
+
"one_two" => ["three"]}
|
129
|
+
|
130
|
+
expected.each do |key, expected_value|
|
131
|
+
@fields[key].should == expected_value
|
132
132
|
end
|
133
133
|
end
|
134
134
|
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
@
|
168
|
-
@fields["one"].should eql([ "two", "three" ])
|
169
|
-
end
|
170
|
-
|
171
|
-
it "should not duplicate values when a field has been set with multiple arrays" do
|
172
|
-
@index.set_field_value(@fields, "one", [ "two", "three" ])
|
173
|
-
@index.set_field_value(@fields, "one", [ "two", "four" ])
|
174
|
-
@fields["one"].should eql([ "two", "three", "four" ])
|
135
|
+
it "generates unlimited levels of expando fields when expanding" do
|
136
|
+
expected_keys = ["one",
|
137
|
+
"one_two",
|
138
|
+
"X_two",
|
139
|
+
"one_X",
|
140
|
+
"one_two_three",
|
141
|
+
"X_two_three",
|
142
|
+
"one_X_three",
|
143
|
+
"one_two_X",
|
144
|
+
"one_two_three_four",
|
145
|
+
"X_two_three_four",
|
146
|
+
"one_X_three_four",
|
147
|
+
"one_two_X_four",
|
148
|
+
"one_two_three_X",
|
149
|
+
"one_two_three_four_five",
|
150
|
+
"X_two_three_four_five",
|
151
|
+
"one_X_three_four_five",
|
152
|
+
"one_two_X_four_five",
|
153
|
+
"one_two_three_X_five",
|
154
|
+
"one_two_three_four_X",
|
155
|
+
"six",
|
156
|
+
"one_two_three_four_five_six",
|
157
|
+
"X_two_three_four_five_six",
|
158
|
+
"one_X_three_four_five_six",
|
159
|
+
"one_two_X_four_five_six",
|
160
|
+
"one_two_three_X_five_six",
|
161
|
+
"one_two_three_four_X_six",
|
162
|
+
"one_two_three_four_five_X"].sort
|
163
|
+
|
164
|
+
nested = {:one => {:two => {:three => {:four => {:five => {:six => :end}}}}}}
|
165
|
+
@fields = @index.flatten_and_expand(nested)
|
166
|
+
|
167
|
+
@fields.keys.sort.should include(*expected_keys)
|
175
168
|
end
|
176
169
|
|
170
|
+
end
|
177
171
|
|
178
|
-
|
179
|
-
|
172
|
+
describe "creating expando fields" do
|
173
|
+
def make_expando_fields(parts)
|
174
|
+
expando_fields = []
|
175
|
+
@index.each_expando_field(parts) { |ex| expando_fields << ex }
|
176
|
+
expando_fields
|
180
177
|
end
|
181
178
|
|
182
|
-
it "
|
183
|
-
|
184
|
-
|
185
|
-
}.should
|
179
|
+
it "joins the fields with a big X" do
|
180
|
+
make_expando_fields(%w{foo bar baz qux}).should == ["X_bar_baz_qux", "foo_X_baz_qux", "foo_bar_X_qux", "foo_bar_baz_X"]
|
181
|
+
make_expando_fields(%w{foo bar baz}).should == ["X_bar_baz", "foo_X_baz", "foo_bar_X"]
|
182
|
+
make_expando_fields(%w{foo bar}).should == ["X_bar", "foo_X"]
|
183
|
+
make_expando_fields(%w{foo}).should == []
|
186
184
|
end
|
187
185
|
end
|
186
|
+
|
188
187
|
end
|
data/spec/chef/solr_spec.rb
CHANGED
@@ -1,4 +1,5 @@
|
|
1
1
|
require File.expand_path(File.join("#{File.dirname(__FILE__)}", '..', 'spec_helper'))
|
2
|
+
require 'net/http'
|
2
3
|
|
3
4
|
describe Chef::Solr do
|
4
5
|
before(:each) do
|
@@ -27,33 +28,55 @@ describe Chef::Solr do
|
|
27
28
|
@solr.http = @http
|
28
29
|
end
|
29
30
|
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
31
|
+
describe "when the HTTP call is successful" do
|
32
|
+
it "should call get to /solr/select with the escaped query" do
|
33
|
+
Net::HTTP::Get.should_receive(:new).with(%r(q=hostname%3Alatte))
|
34
|
+
@solr.solr_select("chef_opscode", "node", :q => "hostname:latte")
|
35
|
+
end
|
34
36
|
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
37
|
+
it "should call get to /solr/select with wt=ruby" do
|
38
|
+
Net::HTTP::Get.should_receive(:new).with(%r(wt=ruby))
|
39
|
+
@solr.solr_select("chef_opscode", "node", :q => "hostname:latte")
|
40
|
+
end
|
39
41
|
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
42
|
+
it "should call get to /solr/select with indent=off" do
|
43
|
+
Net::HTTP::Get.should_receive(:new).with(%r(indent=off))
|
44
|
+
@solr.solr_select("chef_opscode", "node", :q => "hostname:latte")
|
45
|
+
end
|
46
|
+
|
47
|
+
it "should call get to /solr/select with filter query" do
|
48
|
+
Net::HTTP::Get.should_receive(:new).with(/fq=%2BX_CHEF_database_CHEF_X%3Achef_opscode\+%2BX_CHEF_type_CHEF_X%3Anode/)
|
49
|
+
@solr.solr_select("chef_opscode", "node", :q => "hostname:latte")
|
50
|
+
end
|
51
|
+
|
52
|
+
it "should return the evaluated response body" do
|
53
|
+
res = @solr.solr_select("chef_opscode", "node", :q => "hostname:latte")
|
54
|
+
res.should == { :some => :hash }
|
55
|
+
end
|
48
56
|
end
|
49
57
|
|
50
|
-
|
51
|
-
|
52
|
-
|
58
|
+
describe "when the HTTP call is unsuccessful" do
|
59
|
+
[Timeout::Error, EOFError, Net::HTTPBadResponse, Net::HTTPHeaderSyntaxError, Net::ProtocolError, Errno::ECONNREFUSED, Errno::ECONNRESET, Errno::ETIMEDOUT, Errno::EINVAL].each do |exception|
|
60
|
+
it "should rescue, log an error message, and raise a SolrConnectionError encountering exception #{exception}" do
|
61
|
+
lambda {
|
62
|
+
@http.should_receive(:request).with(instance_of(Net::HTTP::Get)).and_raise(exception)
|
63
|
+
Chef::Log.should_receive(:fatal).with(/Search Query to Solr '(.+?)' failed. Chef::Exceptions::SolrConnectionError exception: #{exception}:.+/)
|
64
|
+
@solr.solr_select("chef_opscode", "node", :q => "hostname:latte")
|
65
|
+
}.should raise_error(Chef::Exceptions::SolrConnectionError)
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
it "should rescue, log an error message, and raise a SolrConnectionError when encountering exception NoMethodError and net/http closed? bug" do
|
70
|
+
lambda {
|
71
|
+
@no_method_error = NoMethodError.new("undefined method 'closed\?' for nil:NilClass")
|
72
|
+
@http.should_receive(:request).with(instance_of(Net::HTTP::Get)).and_raise(@no_method_error)
|
73
|
+
Chef::Log.should_receive(:fatal).with(/Search Query to Solr '(.+?)' failed. Chef::Exceptions::SolrConnectionError exception: Errno::ECONNREFUSED.+net\/http undefined method closed.+/)
|
74
|
+
@solr.solr_select("chef_opscode", "node", :q => "hostname:latte")
|
75
|
+
}.should raise_error(Chef::Exceptions::SolrConnectionError)
|
76
|
+
end
|
53
77
|
end
|
54
78
|
end
|
55
79
|
|
56
|
-
|
57
80
|
describe "post_to_solr" do
|
58
81
|
before(:each) do
|
59
82
|
@http_response = mock(
|
@@ -70,27 +93,60 @@ describe Chef::Solr do
|
|
70
93
|
Net::HTTP::Post.stub!(:new).and_return(@http_request)
|
71
94
|
@doc = { "foo" => "bar" }
|
72
95
|
end
|
73
|
-
|
74
|
-
it "should post to /solr/update" do
|
75
|
-
Net::HTTP::Post.should_receive(:new).with("/solr/update", "Content-Type" => "text/xml").and_return(@http_request)
|
76
|
-
@solr.post_to_solr(@doc)
|
77
|
-
end
|
78
96
|
|
79
|
-
|
80
|
-
|
81
|
-
|
97
|
+
describe 'when the HTTP call is successful' do
|
98
|
+
it "should post to /solr/update" do
|
99
|
+
Net::HTTP::Post.should_receive(:new).with("/solr/update", "Content-Type" => "text/xml").and_return(@http_request)
|
100
|
+
@solr.post_to_solr(@doc)
|
101
|
+
end
|
102
|
+
|
103
|
+
it "should set the body of the request to the stringified doc" do
|
104
|
+
@http_request.should_receive(:body=).with("foo")
|
105
|
+
@solr.post_to_solr(:foo)
|
106
|
+
end
|
107
|
+
|
108
|
+
it "should send the request to solr" do
|
109
|
+
@http.should_receive(:request).with(@http_request).and_return(@http_response)
|
110
|
+
@solr.post_to_solr(:foo).should
|
111
|
+
end
|
82
112
|
end
|
83
113
|
|
84
|
-
|
85
|
-
|
86
|
-
|
114
|
+
describe "when the HTTP call is unsuccessful due to an exception" do
|
115
|
+
it "should post to /solr/update" do
|
116
|
+
Net::HTTP::Post.should_receive(:new).with("/solr/update", "Content-Type" => "text/xml").and_return(@http_request)
|
117
|
+
@solr.post_to_solr(@doc)
|
118
|
+
end
|
119
|
+
|
120
|
+
it "should set the body of the request to the stringified doc" do
|
121
|
+
@http_request.should_receive(:body=).with("foo")
|
122
|
+
@solr.post_to_solr(:foo)
|
123
|
+
end
|
124
|
+
|
125
|
+
[Timeout::Error, EOFError, Net::HTTPBadResponse, Net::HTTPHeaderSyntaxError, Net::ProtocolError, Errno::ECONNREFUSED, Errno::ECONNRESET, Errno::ETIMEDOUT, Errno::EINVAL].each do |exception|
|
126
|
+
it "should rescue and log an error message when encountering exception #{exception} and then re-raise it" do
|
127
|
+
lambda {
|
128
|
+
@http.should_receive(:request).with(@http_request).and_raise(exception)
|
129
|
+
Chef::Log.should_receive(:fatal).with(/POST to Solr '(.+?)' failed. Chef::Exceptions::SolrConnectionError exception: #{exception}:.+/)
|
130
|
+
@solr.post_to_solr(:foo)
|
131
|
+
}.should raise_error(Chef::Exceptions::SolrConnectionError)
|
132
|
+
end
|
133
|
+
end
|
134
|
+
|
135
|
+
it "should rescue and log an error message when encountering exception NoMethodError and net/http closed? bug" do
|
136
|
+
lambda {
|
137
|
+
@no_method_error = NoMethodError.new("undefined method 'closed\?' for nil:NilClass")
|
138
|
+
@http.should_receive(:request).with(@http_request).and_raise(@no_method_error)
|
139
|
+
Chef::Log.should_receive(:fatal).with(/POST to Solr '(.+?)' failed. Chef::Exceptions::SolrConnectionError exception: Errno::ECONNREFUSED.+net\/http undefined method closed.+/)
|
140
|
+
@solr.post_to_solr(:foo)
|
141
|
+
}.should raise_error(Chef::Exceptions::SolrConnectionError)
|
142
|
+
end
|
87
143
|
end
|
88
144
|
end
|
89
145
|
|
90
146
|
describe "solr_add" do
|
91
147
|
before(:each) do
|
92
148
|
@solr.stub!(:post_to_solr).and_return(true)
|
93
|
-
@data = { "foo" => "bar" }
|
149
|
+
@data = { "foo" => ["bar"] }
|
94
150
|
end
|
95
151
|
|
96
152
|
it "should send valid XML to solr" do
|
@@ -99,7 +155,7 @@ describe Chef::Solr do
|
|
99
155
|
end
|
100
156
|
|
101
157
|
it "XML escapes content before sending to SOLR" do
|
102
|
-
@data["foo"] = "<&>"
|
158
|
+
@data["foo"] = ["<&>"]
|
103
159
|
@solr.should_receive(:post_to_solr).with("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<add><doc><field name=\"foo\"><&></field></doc></add>\n")
|
104
160
|
|
105
161
|
@solr.solr_add(@data)
|
@@ -234,5 +290,4 @@ describe Chef::Solr do
|
|
234
290
|
@solr.rebuild_index["Chef::DataBag"].should == "success"
|
235
291
|
end
|
236
292
|
end
|
237
|
-
|
238
293
|
end
|
metadata
CHANGED
@@ -1,12 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: chef-solr
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
prerelease:
|
4
|
+
prerelease: true
|
5
5
|
segments:
|
6
6
|
- 0
|
7
7
|
- 9
|
8
|
-
-
|
9
|
-
|
8
|
+
- 10
|
9
|
+
- rc
|
10
|
+
- 0
|
11
|
+
version: 0.9.10.rc.0
|
10
12
|
platform: ruby
|
11
13
|
authors:
|
12
14
|
- Adam Jacob
|
@@ -14,7 +16,7 @@ autorequire:
|
|
14
16
|
bindir: bin
|
15
17
|
cert_chain: []
|
16
18
|
|
17
|
-
date: 2010-
|
19
|
+
date: 2010-10-07 00:00:00 -07:00
|
18
20
|
default_executable:
|
19
21
|
dependencies:
|
20
22
|
- !ruby/object:Gem::Dependency
|
@@ -58,8 +60,10 @@ dependencies:
|
|
58
60
|
segments:
|
59
61
|
- 0
|
60
62
|
- 9
|
61
|
-
-
|
62
|
-
|
63
|
+
- 10
|
64
|
+
- rc
|
65
|
+
- 0
|
66
|
+
version: 0.9.10.rc.0
|
63
67
|
type: :runtime
|
64
68
|
version_requirements: *id003
|
65
69
|
description:
|
@@ -114,11 +118,13 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
114
118
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
115
119
|
none: false
|
116
120
|
requirements:
|
117
|
-
- - "
|
121
|
+
- - ">"
|
118
122
|
- !ruby/object:Gem::Version
|
119
123
|
segments:
|
120
|
-
-
|
121
|
-
|
124
|
+
- 1
|
125
|
+
- 3
|
126
|
+
- 1
|
127
|
+
version: 1.3.1
|
122
128
|
requirements: []
|
123
129
|
|
124
130
|
rubyforge_project:
|