droonga-engine 1.0.5 → 1.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/bin/droonga-engine-absorb-data +2 -1
- data/bin/droonga-engine-catalog-generate +21 -5
- data/bin/droonga-engine-catalog-modify +22 -6
- data/bin/droonga-engine-configure +215 -0
- data/bin/droonga-engine-join +48 -123
- data/bin/droonga-engine-unjoin +14 -1
- data/doc/text/news.md +21 -0
- data/droonga-engine.gemspec +12 -10
- data/install/centos/droonga-engine +60 -0
- data/install/centos/functions.sh +35 -0
- data/install/debian/droonga-engine +155 -0
- data/install/debian/functions.sh +33 -0
- data/install.sh +360 -0
- data/lib/droonga/address.rb +3 -1
- data/lib/droonga/catalog/dataset.rb +2 -0
- data/lib/droonga/catalog/version1.rb +16 -3
- data/lib/droonga/catalog/version2.rb +16 -3
- data/lib/droonga/catalog_fetcher.rb +51 -0
- data/lib/droonga/catalog_generator.rb +6 -5
- data/lib/droonga/catalog_modifier.rb +45 -0
- data/lib/droonga/command/droonga_engine.rb +96 -29
- data/lib/droonga/command/droonga_engine_service.rb +5 -0
- data/lib/droonga/command/remote.rb +368 -0
- data/lib/droonga/command/serf_event_handler.rb +37 -304
- data/lib/droonga/dispatcher.rb +15 -1
- data/lib/droonga/engine/version.rb +1 -1
- data/lib/droonga/engine.rb +11 -4
- data/lib/droonga/engine_state.rb +2 -0
- data/lib/droonga/farm.rb +14 -5
- data/lib/droonga/fluent_message_receiver.rb +23 -6
- data/lib/droonga/fluent_message_sender.rb +5 -1
- data/lib/droonga/node_status.rb +67 -0
- data/lib/droonga/path.rb +28 -4
- data/lib/droonga/plugins/catalog.rb +40 -0
- data/lib/droonga/safe_file_writer.rb +1 -1
- data/lib/droonga/searcher.rb +3 -15
- data/lib/droonga/serf.rb +17 -32
- data/lib/droonga/serf_downloader.rb +26 -1
- data/lib/droonga/service_installation.rb +123 -0
- data/lib/droonga/session.rb +4 -0
- data/lib/droonga/slice.rb +22 -12
- data/lib/droonga/supervisor.rb +16 -2
- data/lib/droonga/worker_process_agent.rb +13 -1
- data/sample/droonga-engine.yaml +5 -0
- data/test/command/config/default/catalog.json +1 -1
- data/test/command/config/default/droonga-engine.yaml +4 -0
- data/test/command/config/version1/catalog.json +1 -1
- data/test/command/suite/catalog/fetch.expected +64 -0
- data/test/command/suite/catalog/fetch.test +6 -0
- data/test/unit/catalog/test_version1.rb +2 -2
- data/test/unit/catalog/test_version2.rb +3 -3
- data/test/unit/helper/sandbox.rb +3 -1
- data/test/unit/plugins/catalog/test_fetch.rb +76 -0
- data/test/unit/test_catalog_generator.rb +7 -3
- metadata +74 -27
- data/bin/droonga-engine-data-publisher +0 -66
@@ -14,12 +14,9 @@
|
|
14
14
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
15
15
|
|
16
16
|
require "json"
|
17
|
+
require "fileutils"
|
17
18
|
|
18
|
-
require "droonga/
|
19
|
-
require "droonga/serf"
|
20
|
-
require "droonga/catalog_generator"
|
21
|
-
require "droonga/data_absorber"
|
22
|
-
require "droonga/safe_file_writer"
|
19
|
+
require "droonga/command/remote"
|
23
20
|
|
24
21
|
module Droonga
|
25
22
|
module Command
|
@@ -31,331 +28,67 @@ module Droonga
|
|
31
28
|
end
|
32
29
|
|
33
30
|
def initialize
|
34
|
-
@
|
35
|
-
@serf_rpc_address = ENV["SERF_RPC_ADDRESS"] || "127.0.0.1:7373"
|
36
|
-
@serf_name = ENV["SERF_SELF_NAME"]
|
37
|
-
@response = {
|
38
|
-
"log" => []
|
39
|
-
}
|
31
|
+
@payload = nil
|
40
32
|
end
|
41
33
|
|
42
34
|
def run
|
43
|
-
|
44
|
-
|
45
|
-
log(" => ignoring event not for me")
|
46
|
-
output_response
|
47
|
-
return true
|
48
|
-
end
|
35
|
+
command_class = detect_command_class
|
36
|
+
return true if command_class.nil?
|
49
37
|
|
50
|
-
|
51
|
-
|
52
|
-
|
38
|
+
serf_name = ENV["SERF_SELF_NAME"]
|
39
|
+
command = command_class.new(serf_name, @payload)
|
40
|
+
command.process if command.should_process?
|
41
|
+
output_response(command.response)
|
42
|
+
true
|
43
|
+
rescue Exception => exception
|
44
|
+
#XXX Any exception blocks following serf operations.
|
45
|
+
# To keep it working, I rescue any exception for now.
|
46
|
+
FileUtils.mkdir_p(Path.serf_event_handler_errors)
|
47
|
+
File.open(Path.serf_event_handler_error_file, "w") do |file|
|
48
|
+
file.write(exception.inspect)
|
49
|
+
end
|
53
50
|
true
|
54
51
|
end
|
55
52
|
|
56
53
|
private
|
57
|
-
def
|
58
|
-
|
59
|
-
@payload = nil
|
60
|
-
case @event_name
|
54
|
+
def detect_command_class
|
55
|
+
case ENV["SERF_EVENT"]
|
61
56
|
when "user"
|
62
|
-
@event_sub_name = ENV["SERF_USER_EVENT"]
|
63
57
|
@payload = JSON.parse($stdin.gets)
|
64
|
-
|
58
|
+
detect_command_class_from_custom_event(ENV["SERF_USER_EVENT"])
|
65
59
|
when "query"
|
66
|
-
@event_sub_name = ENV["SERF_QUERY_NAME"]
|
67
60
|
@payload = JSON.parse($stdin.gets)
|
68
|
-
|
61
|
+
detect_command_class_from_custom_event(ENV["SERF_QUERY_NAME"])
|
69
62
|
when "member-join", "member-leave", "member-update", "member-reap"
|
70
|
-
|
63
|
+
Remote::UpdateLiveNodes
|
64
|
+
else
|
65
|
+
nil
|
71
66
|
end
|
72
67
|
end
|
73
68
|
|
74
|
-
def
|
75
|
-
|
76
|
-
return true unless @payload["node"]
|
77
|
-
|
78
|
-
@payload["node"] == @serf_name
|
79
|
-
end
|
80
|
-
|
81
|
-
def process_event
|
82
|
-
case @event_sub_name
|
69
|
+
def detect_command_class_from_custom_event(event_name)
|
70
|
+
case event_name
|
83
71
|
when "change_role"
|
84
|
-
|
72
|
+
Remote::ChangeRole
|
85
73
|
when "report_status"
|
86
|
-
|
74
|
+
Remote::ReportStatus
|
87
75
|
when "join"
|
88
|
-
|
76
|
+
Remote::Join
|
89
77
|
when "set_replicas"
|
90
|
-
|
78
|
+
Remote::SetReplicas
|
91
79
|
when "add_replicas"
|
92
|
-
|
80
|
+
Remote::AddReplicas
|
93
81
|
when "remove_replicas"
|
94
|
-
|
82
|
+
Remote::RemoveReplicas
|
95
83
|
when "absorb_data"
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
when "unpublish_catalog"
|
100
|
-
unpublish_catalog
|
101
|
-
end
|
102
|
-
end
|
103
|
-
|
104
|
-
def output_response
|
105
|
-
puts JSON.generate(@response)
|
106
|
-
end
|
107
|
-
|
108
|
-
def host
|
109
|
-
@serf_name.split(":").first
|
110
|
-
end
|
111
|
-
|
112
|
-
def given_hosts
|
113
|
-
hosts = @payload["hosts"]
|
114
|
-
return nil unless hosts
|
115
|
-
hosts = [hosts] if hosts.is_a?(String)
|
116
|
-
hosts
|
117
|
-
end
|
118
|
-
|
119
|
-
def report_status
|
120
|
-
@response["value"] = status(@payload["key"].to_sym)
|
121
|
-
end
|
122
|
-
|
123
|
-
def join
|
124
|
-
type = @payload["type"]
|
125
|
-
log("type = #{type}")
|
126
|
-
case type
|
127
|
-
when "replica"
|
128
|
-
join_as_replica
|
84
|
+
Remote::AbsorbData
|
85
|
+
else
|
86
|
+
nil
|
129
87
|
end
|
130
88
|
end
|
131
89
|
|
132
|
-
def
|
133
|
-
|
134
|
-
return unless source_node
|
135
|
-
|
136
|
-
log("source_node = #{source_node}")
|
137
|
-
|
138
|
-
source_host = source_node.split(":").first
|
139
|
-
|
140
|
-
# fetch_port = @payload["fetch_port"]
|
141
|
-
# catalog = fetch_catalog(source_node, fetch_port)
|
142
|
-
catalog = JSON.parse(Path.catalog.read) #XXX workaround until "fetch" become available
|
143
|
-
|
144
|
-
generator = create_current_catalog_generator(catalog)
|
145
|
-
dataset = generator.dataset_for_host(source_host) ||
|
146
|
-
generator.dataset_for_host(host)
|
147
|
-
return unless dataset
|
148
|
-
|
149
|
-
# restart self with the fetched catalog.
|
150
|
-
SafeFileWriter.write(Path.catalog, JSON.pretty_generate(catalog))
|
151
|
-
|
152
|
-
dataset_name = dataset.name
|
153
|
-
tag = dataset.replicas.tag
|
154
|
-
port = dataset.replicas.port
|
155
|
-
other_hosts = dataset.replicas.hosts
|
156
|
-
|
157
|
-
log("dataset = #{dataset_name}")
|
158
|
-
log("port = #{port}")
|
159
|
-
log("tag = #{tag}")
|
160
|
-
|
161
|
-
if @payload["copy"]
|
162
|
-
log("starting to copy data from #{source_host}")
|
163
|
-
|
164
|
-
modify_catalog do |modifier|
|
165
|
-
modifier.datasets[dataset_name].replicas.hosts = [host]
|
166
|
-
end
|
167
|
-
sleep(5) #TODO: wait for restart. this should be done more safely, to avoid starting of absorbing with old catalog.json.
|
168
|
-
|
169
|
-
save_status(:absorbing, true)
|
170
|
-
DataAbsorber.absorb(:dataset => dataset_name,
|
171
|
-
:source_host => source_host,
|
172
|
-
:destination_host => host,
|
173
|
-
:port => port,
|
174
|
-
:tag => tag)
|
175
|
-
delete_status(:absorbing)
|
176
|
-
sleep(1)
|
177
|
-
end
|
178
|
-
|
179
|
-
log("joining to the cluster: update myself")
|
180
|
-
|
181
|
-
modify_catalog do |modifier|
|
182
|
-
modifier.datasets[dataset_name].replicas.hosts += other_hosts
|
183
|
-
modifier.datasets[dataset_name].replicas.hosts.uniq!
|
184
|
-
end
|
185
|
-
end
|
186
|
-
|
187
|
-
def fetch_catalog(source_node, port)
|
188
|
-
source_host = source_node.split(":").first
|
189
|
-
|
190
|
-
url = "http://#{source_host}:#{port}"
|
191
|
-
connection = Faraday.new(url) do |builder|
|
192
|
-
builder.response(:follow_redirects)
|
193
|
-
builder.adapter(Faraday.default_adapter)
|
194
|
-
end
|
195
|
-
response = connection.get("/catalog.json")
|
196
|
-
catalog = response.body
|
197
|
-
|
198
|
-
JSON.parse(catalog)
|
199
|
-
end
|
200
|
-
|
201
|
-
def publish_catalog
|
202
|
-
port = @payload["port"]
|
203
|
-
return unless port
|
204
|
-
|
205
|
-
env = {}
|
206
|
-
publisher_command_line = [
|
207
|
-
"droonga-engine-data-publisher",
|
208
|
-
"--base-dir", Path.base.to_s,
|
209
|
-
"--port", port.to_s,
|
210
|
-
"--published-file", Path.catalog.to_s
|
211
|
-
]
|
212
|
-
pid = spawn(env, *publisher_command_line)
|
213
|
-
Process.detach(pid)
|
214
|
-
sleep(1) # wait until the directory is published
|
215
|
-
|
216
|
-
published_dir = Path.published(port)
|
217
|
-
pid_file = published_dir + ".pid"
|
218
|
-
|
219
|
-
File.open(pid_file.to_s, "w") do |file|
|
220
|
-
file.puts(pid)
|
221
|
-
end
|
222
|
-
end
|
223
|
-
|
224
|
-
def unpublish_catalog
|
225
|
-
port = @payload["port"]
|
226
|
-
return unless port
|
227
|
-
|
228
|
-
published_dir = Path.published(port)
|
229
|
-
pid_file = published_dir + ".pid"
|
230
|
-
pid = pid_file.read.to_i
|
231
|
-
|
232
|
-
Process.kill("INT", pid)
|
233
|
-
end
|
234
|
-
|
235
|
-
def set_replicas
|
236
|
-
dataset = @payload["dataset"]
|
237
|
-
return unless dataset
|
238
|
-
|
239
|
-
hosts = given_hosts
|
240
|
-
return unless hosts
|
241
|
-
|
242
|
-
log("new replicas: #{hosts.join(",")}")
|
243
|
-
|
244
|
-
modify_catalog do |modifier|
|
245
|
-
modifier.datasets[dataset].replicas.hosts = hosts
|
246
|
-
end
|
247
|
-
end
|
248
|
-
|
249
|
-
def add_replicas
|
250
|
-
dataset = @payload["dataset"]
|
251
|
-
return unless dataset
|
252
|
-
|
253
|
-
hosts = given_hosts
|
254
|
-
return unless hosts
|
255
|
-
|
256
|
-
hosts -= [host]
|
257
|
-
return if hosts.empty?
|
258
|
-
|
259
|
-
log("adding replicas: #{hosts.join(",")}")
|
260
|
-
|
261
|
-
modify_catalog do |modifier|
|
262
|
-
modifier.datasets[dataset].replicas.hosts += hosts
|
263
|
-
modifier.datasets[dataset].replicas.hosts.uniq!
|
264
|
-
end
|
265
|
-
end
|
266
|
-
|
267
|
-
def remove_replicas
|
268
|
-
dataset = @payload["dataset"]
|
269
|
-
return unless dataset
|
270
|
-
|
271
|
-
hosts = given_hosts
|
272
|
-
return unless hosts
|
273
|
-
|
274
|
-
log("removing replicas: #{hosts.join(",")}")
|
275
|
-
|
276
|
-
modify_catalog do |modifier|
|
277
|
-
modifier.datasets[dataset].replicas.hosts -= hosts
|
278
|
-
end
|
279
|
-
end
|
280
|
-
|
281
|
-
def modify_catalog
|
282
|
-
generator = create_current_catalog_generator
|
283
|
-
yield(generator)
|
284
|
-
SafeFileWriter.write(Path.catalog, JSON.pretty_generate(generator.generate))
|
285
|
-
end
|
286
|
-
|
287
|
-
def create_current_catalog_generator(current_catalog=nil)
|
288
|
-
current_catalog ||= JSON.parse(Path.catalog.read)
|
289
|
-
generator = CatalogGenerator.new
|
290
|
-
generator.load(current_catalog)
|
291
|
-
end
|
292
|
-
|
293
|
-
def absorb_data
|
294
|
-
source = @payload["source"]
|
295
|
-
return unless source
|
296
|
-
|
297
|
-
log("start to absorb data from #{source}")
|
298
|
-
|
299
|
-
dataset_name = @payload["dataset"]
|
300
|
-
port = @payload["port"]
|
301
|
-
tag = @payload["tag"]
|
302
|
-
|
303
|
-
if dataset_name.nil? or port.nil? or tag.nil?
|
304
|
-
current_catalog = JSON.parse(Path.catalog.read)
|
305
|
-
generator = CatalogGenerator.new
|
306
|
-
generator.load(current_catalog)
|
307
|
-
|
308
|
-
dataset = generator.dataset_for_host(source)
|
309
|
-
return unless dataset
|
310
|
-
|
311
|
-
dataset_name = dataset.name
|
312
|
-
port = dataset.replicas.port
|
313
|
-
tag = dataset.replicas.tag
|
314
|
-
end
|
315
|
-
|
316
|
-
log("dataset = #{dataset_name}")
|
317
|
-
log("port = #{port}")
|
318
|
-
log("tag = #{tag}")
|
319
|
-
|
320
|
-
save_status(:absorbing, true)
|
321
|
-
DataAbsorber.absorb(:dataset => dataset_name,
|
322
|
-
:source_host => source,
|
323
|
-
:destination_host => host,
|
324
|
-
:port => port,
|
325
|
-
:tag => tag,
|
326
|
-
:client => "droonga-send")
|
327
|
-
delete_status(:absorbing)
|
328
|
-
end
|
329
|
-
|
330
|
-
def live_nodes
|
331
|
-
Serf.live_nodes(@serf_name)
|
332
|
-
end
|
333
|
-
|
334
|
-
def output_live_nodes
|
335
|
-
path = Path.live_nodes
|
336
|
-
nodes = live_nodes
|
337
|
-
file_contents = JSON.pretty_generate(nodes)
|
338
|
-
SafeFileWriter.write(path, file_contents)
|
339
|
-
end
|
340
|
-
|
341
|
-
def status(key)
|
342
|
-
Serf.status(key)
|
343
|
-
end
|
344
|
-
|
345
|
-
def save_status(key, value)
|
346
|
-
status = Serf.load_status
|
347
|
-
status[key] = value
|
348
|
-
SafeFileWriter.write(Serf.status_file, JSON.pretty_generate(status))
|
349
|
-
end
|
350
|
-
|
351
|
-
def delete_status(key)
|
352
|
-
status = Serf.load_status
|
353
|
-
status.delete(key)
|
354
|
-
SafeFileWriter.write(Serf.status_file, JSON.pretty_generate(status))
|
355
|
-
end
|
356
|
-
|
357
|
-
def log(message)
|
358
|
-
@response["log"] << message
|
90
|
+
def output_response(response)
|
91
|
+
puts JSON.generate(response)
|
359
92
|
end
|
360
93
|
end
|
361
94
|
end
|
data/lib/droonga/dispatcher.rb
CHANGED
@@ -69,7 +69,20 @@ module Droonga
|
|
69
69
|
@farm.start
|
70
70
|
end
|
71
71
|
|
72
|
-
def
|
72
|
+
def stop_gracefully(&on_stop)
|
73
|
+
logger.trace("stop_gracefully: start")
|
74
|
+
@collector_runners.each_value do |collector_runner|
|
75
|
+
collector_runner.shutdown
|
76
|
+
end
|
77
|
+
@adapter_runners.each_value do |adapter_runner|
|
78
|
+
adapter_runner.shutdown
|
79
|
+
end
|
80
|
+
@farm.stop_gracefully(&on_stop)
|
81
|
+
logger.trace("stop_gracefully: done")
|
82
|
+
end
|
83
|
+
|
84
|
+
def stop_immediately
|
85
|
+
logger.trace("stop_immediately: start")
|
73
86
|
@collector_runners.each_value do |collector_runner|
|
74
87
|
collector_runner.shutdown
|
75
88
|
end
|
@@ -77,6 +90,7 @@ module Droonga
|
|
77
90
|
adapter_runner.shutdown
|
78
91
|
end
|
79
92
|
@farm.shutdown
|
93
|
+
logger.trace("stop_immediately: done")
|
80
94
|
end
|
81
95
|
|
82
96
|
def process_message(message)
|
data/lib/droonga/engine.rb
CHANGED
@@ -57,14 +57,19 @@ module Droonga
|
|
57
57
|
logger.trace("stop_gracefully: start")
|
58
58
|
@live_nodes_list_observer.stop
|
59
59
|
on_finish = lambda do
|
60
|
+
logger.trace("stop_gracefully/on_finish: start")
|
60
61
|
output_last_processed_timestamp
|
61
|
-
@dispatcher.
|
62
|
-
|
63
|
-
|
62
|
+
@dispatcher.stop_gracefully do
|
63
|
+
@state.shutdown
|
64
|
+
yield
|
65
|
+
end
|
66
|
+
logger.trace("stop_gracefully/on_finish: done")
|
64
67
|
end
|
65
68
|
if @state.have_session?
|
69
|
+
logger.trace("stop_gracefully/having sessions")
|
66
70
|
@state.on_finish = on_finish
|
67
71
|
else
|
72
|
+
logger.trace("stop_gracefully/no session")
|
68
73
|
on_finish.call
|
69
74
|
end
|
70
75
|
logger.trace("stop_gracefully: done")
|
@@ -75,7 +80,7 @@ module Droonga
|
|
75
80
|
logger.trace("stop_immediately: start")
|
76
81
|
output_last_processed_timestamp
|
77
82
|
@live_nodes_list_observer.stop
|
78
|
-
@dispatcher.
|
83
|
+
@dispatcher.stop_immediately
|
79
84
|
@state.shutdown
|
80
85
|
logger.trace("stop_immediately: done")
|
81
86
|
end
|
@@ -112,11 +117,13 @@ module Droonga
|
|
112
117
|
end
|
113
118
|
|
114
119
|
def output_last_processed_timestamp
|
120
|
+
logger.trace("output_last_processed_timestamp: start")
|
115
121
|
path = Path.last_processed_timestamp
|
116
122
|
FileUtils.mkdir_p(path.dirname.to_s)
|
117
123
|
path.open("w") do |file|
|
118
124
|
file.write(@last_processed_timestamp)
|
119
125
|
end
|
126
|
+
logger.trace("output_last_processed_timestamp: done")
|
120
127
|
end
|
121
128
|
|
122
129
|
def effective_message?(message)
|
data/lib/droonga/engine_state.rb
CHANGED
@@ -90,6 +90,7 @@ module Droonga
|
|
90
90
|
|
91
91
|
def register_session(id, session)
|
92
92
|
@sessions[id] = session
|
93
|
+
logger.trace("new session #{id} is registered. rest sessions=#{@sessions.size}")
|
93
94
|
end
|
94
95
|
|
95
96
|
def unregister_session(id)
|
@@ -97,6 +98,7 @@ module Droonga
|
|
97
98
|
unless have_session?
|
98
99
|
@on_finish.call if @on_finish
|
99
100
|
end
|
101
|
+
logger.trace("session #{id} is unregistered. rest sessions=#{@sessions.size}")
|
100
102
|
end
|
101
103
|
|
102
104
|
def have_session?
|
data/lib/droonga/farm.rb
CHANGED
@@ -49,14 +49,23 @@ module Droonga
|
|
49
49
|
end
|
50
50
|
end
|
51
51
|
|
52
|
-
def
|
53
|
-
|
52
|
+
def stop_gracefully
|
53
|
+
n_slices = @slices.size
|
54
|
+
n_done_slices = 0
|
54
55
|
@slices.each_value do |slice|
|
55
|
-
|
56
|
-
|
56
|
+
slice.stop_gracefully do
|
57
|
+
n_done_slices += 1
|
58
|
+
if n_done_slices == n_slices
|
59
|
+
yield if block_given?
|
60
|
+
end
|
57
61
|
end
|
58
62
|
end
|
59
|
-
|
63
|
+
end
|
64
|
+
|
65
|
+
def stop_immediately
|
66
|
+
@slices.each_value do |slice|
|
67
|
+
slice.stop_immediately
|
68
|
+
end
|
60
69
|
end
|
61
70
|
|
62
71
|
def process(slice_name, message)
|
@@ -43,6 +43,7 @@ module Droonga
|
|
43
43
|
def stop_gracefully
|
44
44
|
logger.trace("stop_gracefully: start")
|
45
45
|
shutdown_heartbeat_receiver
|
46
|
+
logger.trace("stop_gracefully: middle")
|
46
47
|
shutdown_server
|
47
48
|
logger.trace("stop_gracefully: done")
|
48
49
|
end
|
@@ -54,6 +55,12 @@ module Droonga
|
|
54
55
|
logger.trace("stop_immediately: done")
|
55
56
|
end
|
56
57
|
|
58
|
+
def shutdown_clients
|
59
|
+
@clients.dup.each do |client|
|
60
|
+
client.close
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
57
64
|
private
|
58
65
|
def start_heartbeat_receiver
|
59
66
|
logger.trace("start_heartbeat_receiver: start")
|
@@ -76,6 +83,9 @@ module Droonga
|
|
76
83
|
client = Client.new(connection) do |tag, time, record|
|
77
84
|
@on_message.call(tag, time, record)
|
78
85
|
end
|
86
|
+
client.on_close = lambda do
|
87
|
+
@clients.delete(client)
|
88
|
+
end
|
79
89
|
@clients << client
|
80
90
|
end
|
81
91
|
@loop.attach(@server)
|
@@ -88,13 +98,9 @@ module Droonga
|
|
88
98
|
end
|
89
99
|
|
90
100
|
def shutdown_server
|
101
|
+
logger.trace("shutdown_server: start")
|
91
102
|
@server.close
|
92
|
-
|
93
|
-
|
94
|
-
def shutdown_clients
|
95
|
-
@clients.each do |client|
|
96
|
-
client.close
|
97
|
-
end
|
103
|
+
logger.trace("shutdown_server: done")
|
98
104
|
end
|
99
105
|
|
100
106
|
def log_tag
|
@@ -152,16 +158,27 @@ module Droonga
|
|
152
158
|
class Client
|
153
159
|
include Loggable
|
154
160
|
|
161
|
+
attr_accessor :on_close
|
155
162
|
def initialize(io, &on_message)
|
156
163
|
@io = io
|
157
164
|
@on_message = on_message
|
165
|
+
@on_close = nil
|
158
166
|
@unpacker = MessagePack::Unpacker.new
|
167
|
+
|
159
168
|
on_read = lambda do |data|
|
160
169
|
feed(data)
|
161
170
|
end
|
162
171
|
@io.on_read do |data|
|
163
172
|
on_read.call(data)
|
164
173
|
end
|
174
|
+
|
175
|
+
on_close = lambda do
|
176
|
+
@io = nil
|
177
|
+
@on_close.call if @on_close
|
178
|
+
end
|
179
|
+
@io.on_close do
|
180
|
+
on_close.call
|
181
|
+
end
|
165
182
|
end
|
166
183
|
|
167
184
|
def close
|
@@ -34,6 +34,7 @@ module Droonga
|
|
34
34
|
@host = host
|
35
35
|
@port = port
|
36
36
|
@socket = nil
|
37
|
+
@packer = MessagePackPacker.new
|
37
38
|
@buffering = options[:buffering]
|
38
39
|
end
|
39
40
|
|
@@ -114,7 +115,10 @@ module Droonga
|
|
114
115
|
|
115
116
|
def create_packed_fluent_message(tag, data)
|
116
117
|
fluent_message = [tag, Time.now.to_i, data]
|
117
|
-
|
118
|
+
@packer.pack(fluent_message)
|
119
|
+
packed_fluent_message = @packer.to_s
|
120
|
+
@packer.clear
|
121
|
+
packed_fluent_message
|
118
122
|
end
|
119
123
|
|
120
124
|
def log_tag
|
@@ -0,0 +1,67 @@
|
|
1
|
+
# Copyright (C) 2014 Droonga Project
|
2
|
+
#
|
3
|
+
# This library is free software; you can redistribute it and/or
|
4
|
+
# modify it under the terms of the GNU Lesser General Public
|
5
|
+
# License version 2.1 as published by the Free Software Foundation.
|
6
|
+
#
|
7
|
+
# This library is distributed in the hope that it will be useful,
|
8
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
9
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
10
|
+
# Lesser General Public License for more details.
|
11
|
+
#
|
12
|
+
# You should have received a copy of the GNU Lesser General Public
|
13
|
+
# License along with this library; if not, write to the Free Software
|
14
|
+
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
15
|
+
|
16
|
+
require "json"
|
17
|
+
require "droonga/path"
|
18
|
+
require "droonga/safe_file_writer"
|
19
|
+
|
20
|
+
module Droonga
|
21
|
+
class NodeStatus
|
22
|
+
def initialize
|
23
|
+
@status = load
|
24
|
+
end
|
25
|
+
|
26
|
+
def have?(key)
|
27
|
+
key = normalize_key(key)
|
28
|
+
@status.include?(key)
|
29
|
+
end
|
30
|
+
|
31
|
+
def get(key)
|
32
|
+
key = normalize_key(key)
|
33
|
+
@status[key]
|
34
|
+
end
|
35
|
+
|
36
|
+
def set(key, value)
|
37
|
+
key = normalize_key(key)
|
38
|
+
@status[key] = value
|
39
|
+
SafeFileWriter.write(status_file, JSON.pretty_generate(@status))
|
40
|
+
end
|
41
|
+
|
42
|
+
def delete(key)
|
43
|
+
key = normalize_key(key)
|
44
|
+
@status.delete(key)
|
45
|
+
SafeFileWriter.write(status_file, JSON.pretty_generate(@status))
|
46
|
+
end
|
47
|
+
|
48
|
+
private
|
49
|
+
def normalize_key(key)
|
50
|
+
key.to_sym
|
51
|
+
end
|
52
|
+
|
53
|
+
def status_file
|
54
|
+
@status_file ||= Path.state + "status_file"
|
55
|
+
end
|
56
|
+
|
57
|
+
def load
|
58
|
+
if status_file.exist?
|
59
|
+
contents = status_file.read
|
60
|
+
unless contents.empty?
|
61
|
+
return JSON.parse(contents, :symbolize_names => true)
|
62
|
+
end
|
63
|
+
end
|
64
|
+
{}
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|