droonga-engine 1.0.3 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile +7 -0
- data/bin/droonga-engine-absorb-data +82 -0
- data/bin/droonga-engine-catalog-generate +16 -13
- data/bin/droonga-engine-catalog-modify +108 -0
- data/bin/droonga-engine-join +115 -0
- data/bin/droonga-engine-unjoin +90 -0
- data/doc/text/news.md +8 -0
- data/droonga-engine.gemspec +2 -1
- data/lib/droonga/buffered_tcp_socket.rb +132 -0
- data/lib/droonga/catalog_generator.rb +87 -4
- data/lib/droonga/command/droonga_engine.rb +27 -7
- data/lib/droonga/command/droonga_engine_service.rb +3 -2
- data/lib/droonga/command/serf_event_handler.rb +211 -14
- data/lib/droonga/data_absorber.rb +55 -0
- data/lib/droonga/dispatcher.rb +25 -11
- data/lib/droonga/engine/version.rb +1 -1
- data/lib/droonga/engine.rb +24 -24
- data/lib/droonga/engine_state.rb +23 -0
- data/lib/droonga/{catalog_observer.rb → file_observer.rb} +12 -7
- data/lib/droonga/fluent_message_sender.rb +24 -37
- data/lib/droonga/forwarder.rb +30 -5
- data/lib/droonga/handler_messenger.rb +3 -2
- data/lib/droonga/handler_runner.rb +29 -16
- data/lib/droonga/job_pusher.rb +12 -0
- data/lib/droonga/line_buffer.rb +42 -0
- data/lib/droonga/logger.rb +10 -6
- data/lib/droonga/path.rb +16 -0
- data/lib/droonga/plugins/search/distributed_search_planner.rb +1 -1
- data/lib/droonga/plugins/system.rb +50 -0
- data/lib/droonga/processor.rb +9 -4
- data/lib/droonga/safe_file_writer.rb +39 -0
- data/lib/droonga/serf.rb +212 -14
- data/lib/droonga/test/stub_handler_messenger.rb +3 -0
- data/lib/droonga/worker.rb +6 -1
- data/test/command/config/default/catalog.json +1 -1
- data/test/command/config/version1/catalog.json +2 -2
- data/test/command/suite/system/status.expected +12 -0
- data/test/command/suite/system/status.test +5 -0
- data/test/unit/plugins/system/test_status.rb +79 -0
- data/test/unit/test_catalog_generator.rb +1 -1
- data/test/unit/test_line_buffer.rb +62 -0
- metadata +46 -12
- data/lib/droonga/live_nodes_list_observer.rb +0 -72
@@ -20,10 +20,23 @@ module Droonga
|
|
20
20
|
DEFAULT_DATASET = "Default"
|
21
21
|
DEFAULT_HOSTS = ["127.0.0.1"]
|
22
22
|
DEFAULT_N_WORKERS = 4
|
23
|
-
|
23
|
+
DEFAULT_N_SLICES = 1
|
24
|
+
DEFAULT_PLUGINS = ["groonga", "search", "crud", "dump", "system"]
|
24
25
|
DEFAULT_PORT = 10031
|
25
26
|
DEFAULT_TAG = "droonga"
|
26
27
|
|
28
|
+
attr_reader :datasets
|
29
|
+
|
30
|
+
class << self
|
31
|
+
def generate(datasets_params)
|
32
|
+
generator = new
|
33
|
+
datasets_params.each do |name, params|
|
34
|
+
generator.add_dataset(name, params)
|
35
|
+
end
|
36
|
+
generator.generate
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
27
40
|
def initialize
|
28
41
|
@version = 2
|
29
42
|
@effective_date = Time.now
|
@@ -42,6 +55,44 @@ module Droonga
|
|
42
55
|
}
|
43
56
|
end
|
44
57
|
|
58
|
+
def load(catalog)
|
59
|
+
catalog["datasets"].each do |name, dataset|
|
60
|
+
add_dataset(name, dataset_to_params(dataset))
|
61
|
+
end
|
62
|
+
self
|
63
|
+
end
|
64
|
+
|
65
|
+
def dataset_for_host(host)
|
66
|
+
@datasets.each do |name, dataset|
|
67
|
+
if dataset.replicas.hosts.include?(host)
|
68
|
+
return dataset
|
69
|
+
end
|
70
|
+
end
|
71
|
+
nil
|
72
|
+
end
|
73
|
+
|
74
|
+
def modify(dataset_modifications)
|
75
|
+
dataset_modifications.each do |name, modification|
|
76
|
+
dataset = @datasets[name]
|
77
|
+
next unless dataset
|
78
|
+
|
79
|
+
replicas = dataset.replicas
|
80
|
+
|
81
|
+
if modification[:replica_hosts]
|
82
|
+
replicas.hosts = modification[:replica_hosts]
|
83
|
+
end
|
84
|
+
|
85
|
+
if modification[:add_replica_hosts]
|
86
|
+
replicas.hosts += modification[:add_replica_hosts]
|
87
|
+
replicas.hosts.uniq!
|
88
|
+
end
|
89
|
+
|
90
|
+
if modification[:remove_replica_hosts]
|
91
|
+
replicas.hosts -= modification[:remove_replica_hosts]
|
92
|
+
end
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
45
96
|
private
|
46
97
|
def catalog_datasets
|
47
98
|
catalog_datasets = {}
|
@@ -52,6 +103,8 @@ module Droonga
|
|
52
103
|
end
|
53
104
|
|
54
105
|
class Dataset
|
106
|
+
attr_reader :name
|
107
|
+
|
55
108
|
def initialize(name, options)
|
56
109
|
@name = name
|
57
110
|
@options = options
|
@@ -75,7 +128,7 @@ module Droonga
|
|
75
128
|
|
76
129
|
def replicas
|
77
130
|
return @options[:replicas] if @options[:replicas]
|
78
|
-
@generated_replicas ||= Replicas.new(@options)
|
131
|
+
@generated_replicas ||= Replicas.new(@options)
|
79
132
|
end
|
80
133
|
|
81
134
|
def to_catalog
|
@@ -83,7 +136,7 @@ module Droonga
|
|
83
136
|
"nWorkers" => n_workers,
|
84
137
|
"plugins" => plugins,
|
85
138
|
"schema" => schema,
|
86
|
-
"replicas" => replicas,
|
139
|
+
"replicas" => replicas.to_json,
|
87
140
|
}
|
88
141
|
catalog["fact"] = fact if fact
|
89
142
|
catalog
|
@@ -93,6 +146,9 @@ module Droonga
|
|
93
146
|
end
|
94
147
|
|
95
148
|
class Replicas
|
149
|
+
attr_accessor :hosts
|
150
|
+
attr_reader :port, :tag, :n_slices
|
151
|
+
|
96
152
|
def initialize(options={})
|
97
153
|
@hosts = options[:hosts] || DEFAULT_HOSTS
|
98
154
|
@port = options[:port]
|
@@ -122,7 +178,7 @@ module Droonga
|
|
122
178
|
@host = host
|
123
179
|
@port = options[:port] || DEFAULT_PORT
|
124
180
|
@tag = options[:tag] || DEFAULT_TAG
|
125
|
-
@n_slices = options[:n_slices] ||
|
181
|
+
@n_slices = options[:n_slices] || DEFAULT_N_SLICES
|
126
182
|
|
127
183
|
@n_volumes = 0
|
128
184
|
end
|
@@ -159,5 +215,32 @@ module Droonga
|
|
159
215
|
@weight ||= 100 / @n_slices
|
160
216
|
end
|
161
217
|
end
|
218
|
+
|
219
|
+
ADDRESS_MATCHER = /\A(.*):(\d+)\/([^\.]+)\.(.+)\z/
|
220
|
+
|
221
|
+
def dataset_to_params(dataset)
|
222
|
+
params = {}
|
223
|
+
params[:n_workers] = dataset["nWorkers"]
|
224
|
+
params[:n_slices] = dataset["replicas"].first["slices"].size
|
225
|
+
params[:plugins] = dataset["plugins"]
|
226
|
+
params[:schema] = dataset["schema"] if dataset["schema"]
|
227
|
+
params[:fact] = dataset["fact"] if dataset["fact"]
|
228
|
+
|
229
|
+
nodes = dataset["replicas"].collect do |replica|
|
230
|
+
ADDRESS_MATCHER =~ replica["slices"].first["volume"]["address"]
|
231
|
+
{
|
232
|
+
:host => $1,
|
233
|
+
:port => $2.to_i,
|
234
|
+
:tag => $3,
|
235
|
+
:path => $4,
|
236
|
+
}
|
237
|
+
end
|
238
|
+
params[:tag] = nodes.first[:tag]
|
239
|
+
params[:port] = nodes.first[:port].to_i
|
240
|
+
params[:hosts] = nodes.collect do |node|
|
241
|
+
node[:host]
|
242
|
+
end
|
243
|
+
params
|
244
|
+
end
|
162
245
|
end
|
163
246
|
end
|
@@ -19,11 +19,13 @@ require "ipaddr"
|
|
19
19
|
require "fileutils"
|
20
20
|
|
21
21
|
require "coolio"
|
22
|
+
require "sigdump"
|
22
23
|
|
23
24
|
require "droonga/path"
|
24
25
|
require "droonga/serf"
|
25
|
-
require "droonga/
|
26
|
+
require "droonga/file_observer"
|
26
27
|
require "droonga/service_control_protocol"
|
28
|
+
require "droonga/line_buffer"
|
27
29
|
|
28
30
|
module Droonga
|
29
31
|
module Command
|
@@ -130,7 +132,7 @@ module Droonga
|
|
130
132
|
end
|
131
133
|
|
132
134
|
def log_level
|
133
|
-
ENV["DROONGA_LOG_LEVEL"] || Logger::Level.
|
135
|
+
ENV["DROONGA_LOG_LEVEL"] || Logger::Level.default
|
134
136
|
end
|
135
137
|
|
136
138
|
def daemon?
|
@@ -235,6 +237,7 @@ module Droonga
|
|
235
237
|
|
236
238
|
def run
|
237
239
|
@serf = run_serf
|
240
|
+
@serf_status_observer = run_serf_status_observer
|
238
241
|
@service_runner = run_service
|
239
242
|
@catalog_observer = run_catalog_observer
|
240
243
|
@loop_breaker = Coolio::AsyncWatcher.new
|
@@ -267,12 +270,16 @@ module Droonga
|
|
267
270
|
trap(:HUP) do
|
268
271
|
restart_immediately
|
269
272
|
end
|
273
|
+
trap(:USR2) do
|
274
|
+
Sigdump.dump
|
275
|
+
end
|
270
276
|
end
|
271
277
|
|
272
278
|
def stop_gracefully
|
273
279
|
@loop_breaker.signal
|
274
280
|
@loop_breaker.detach
|
275
281
|
@serf.shutdown
|
282
|
+
@serf_status_observer.stop
|
276
283
|
@catalog_observer.stop
|
277
284
|
@service_runner.stop_gracefully
|
278
285
|
end
|
@@ -281,6 +288,7 @@ module Droonga
|
|
281
288
|
@loop_breaker.signal
|
282
289
|
@loop_breaker.detach
|
283
290
|
@serf.shutdown
|
291
|
+
@serf_status_observer.stop
|
284
292
|
@catalog_observer.stop
|
285
293
|
@service_runner.stop_immediately
|
286
294
|
end
|
@@ -290,7 +298,6 @@ module Droonga
|
|
290
298
|
old_service_runner = @service_runner
|
291
299
|
@service_runner = run_service
|
292
300
|
@service_runner.on_ready = lambda do
|
293
|
-
@serf.restart if @serf.running?
|
294
301
|
@service_runner.on_failure = nil
|
295
302
|
old_service_runner.stop_gracefully
|
296
303
|
end
|
@@ -304,7 +311,6 @@ module Droonga
|
|
304
311
|
@loop_breaker.signal
|
305
312
|
old_service_runner = @service_runner
|
306
313
|
@service_runner = run_service
|
307
|
-
@serf.restart if @serf.running?
|
308
314
|
old_service_runner.stop_immediately
|
309
315
|
end
|
310
316
|
|
@@ -320,8 +326,22 @@ module Droonga
|
|
320
326
|
serf
|
321
327
|
end
|
322
328
|
|
329
|
+
def restart_serf
|
330
|
+
@serf.shutdown if @serf
|
331
|
+
@serf = run_serf
|
332
|
+
end
|
333
|
+
|
334
|
+
def run_serf_status_observer
|
335
|
+
serf_status_observer = FileObserver.new(@loop, Serf.status_file)
|
336
|
+
serf_status_observer.on_change = lambda do
|
337
|
+
restart_serf
|
338
|
+
end
|
339
|
+
serf_status_observer.start
|
340
|
+
serf_status_observer
|
341
|
+
end
|
342
|
+
|
323
343
|
def run_catalog_observer
|
324
|
-
catalog_observer =
|
344
|
+
catalog_observer = FileObserver.new(@loop, Path.catalog)
|
325
345
|
catalog_observer.on_change = lambda do
|
326
346
|
restart_graceful
|
327
347
|
end
|
@@ -414,9 +434,9 @@ module Droonga
|
|
414
434
|
|
415
435
|
def attach_control_read_in(control_read_in)
|
416
436
|
@control_read_in = Coolio::IO.new(control_read_in)
|
437
|
+
line_buffer = LineBuffer.new
|
417
438
|
on_read = lambda do |data|
|
418
|
-
|
419
|
-
data.each_line do |line|
|
439
|
+
line_buffer.feed(data) do |line|
|
420
440
|
case line
|
421
441
|
when Messages::READY
|
422
442
|
on_ready
|
@@ -18,6 +18,7 @@ require "optparse"
|
|
18
18
|
require "coolio"
|
19
19
|
|
20
20
|
require "droonga/service_control_protocol"
|
21
|
+
require "droonga/line_buffer"
|
21
22
|
require "droonga/engine"
|
22
23
|
require "droonga/fluent_message_receiver"
|
23
24
|
require "droonga/internal_fluent_message_receiver"
|
@@ -162,8 +163,8 @@ module Droonga
|
|
162
163
|
@control_read = Coolio::IO.new(IO.new(@control_read_fd))
|
163
164
|
@control_read_fd = nil
|
164
165
|
on_read = lambda do |data|
|
165
|
-
|
166
|
-
data
|
166
|
+
line_buffer = LineBuffer.new
|
167
|
+
line_buffer.feed(data) do |line|
|
167
168
|
case line
|
168
169
|
when Messages::STOP_GRACEFUL
|
169
170
|
stop_gracefully
|
@@ -13,15 +13,13 @@
|
|
13
13
|
# License along with this library; if not, write to the Free Software
|
14
14
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
15
15
|
|
16
|
-
require "optparse"
|
17
|
-
require "pathname"
|
18
16
|
require "json"
|
19
|
-
require "fileutils"
|
20
|
-
require "tempfile"
|
21
17
|
|
22
18
|
require "droonga/path"
|
23
19
|
require "droonga/serf"
|
24
|
-
require "droonga/
|
20
|
+
require "droonga/catalog_generator"
|
21
|
+
require "droonga/data_absorber"
|
22
|
+
require "droonga/safe_file_writer"
|
25
23
|
|
26
24
|
module Droonga
|
27
25
|
module Command
|
@@ -35,11 +33,14 @@ module Droonga
|
|
35
33
|
def initialize
|
36
34
|
@serf = ENV["SERF"] || Serf.path
|
37
35
|
@serf_rpc_address = ENV["SERF_RPC_ADDRESS"] || "127.0.0.1:7373"
|
36
|
+
@serf_name = ENV["SERF_SELF_NAME"]
|
38
37
|
end
|
39
38
|
|
40
39
|
def run
|
41
40
|
parse_event
|
41
|
+
return true unless event_for_me?
|
42
42
|
|
43
|
+
process_event
|
43
44
|
output_live_nodes
|
44
45
|
true
|
45
46
|
end
|
@@ -47,14 +48,209 @@ module Droonga
|
|
47
48
|
private
|
48
49
|
def parse_event
|
49
50
|
@event_name = ENV["SERF_EVENT"]
|
51
|
+
@payload = nil
|
50
52
|
case @event_name
|
51
53
|
when "user"
|
52
|
-
@
|
54
|
+
@event_sub_name = ENV["SERF_USER_EVENT"]
|
55
|
+
@payload = JSON.parse($stdin.gets)
|
56
|
+
puts "event sub name = #{@event_sub_name}"
|
53
57
|
when "query"
|
54
|
-
@
|
58
|
+
@event_sub_name = ENV["SERF_QUERY_NAME"]
|
59
|
+
@payload = JSON.parse($stdin.gets)
|
60
|
+
puts "event sub name = #{@event_sub_name}"
|
55
61
|
end
|
56
62
|
end
|
57
63
|
|
64
|
+
def event_for_me?
|
65
|
+
return true unless @payload
|
66
|
+
return true unless @payload["node"]
|
67
|
+
|
68
|
+
@payload["node"] == @serf_name
|
69
|
+
end
|
70
|
+
|
71
|
+
def process_event
|
72
|
+
case @event_sub_name
|
73
|
+
when "change_role"
|
74
|
+
save_status(:role, @payload["role"])
|
75
|
+
when "join"
|
76
|
+
join
|
77
|
+
when "set_replicas"
|
78
|
+
set_replicas
|
79
|
+
when "add_replicas"
|
80
|
+
add_replicas
|
81
|
+
when "remove_replicas"
|
82
|
+
remove_replicas
|
83
|
+
when "absorb_data"
|
84
|
+
absorb_data
|
85
|
+
end
|
86
|
+
end
|
87
|
+
|
88
|
+
def host
|
89
|
+
@serf_name.split(":").first
|
90
|
+
end
|
91
|
+
|
92
|
+
def given_hosts
|
93
|
+
hosts = @payload["hosts"]
|
94
|
+
return nil unless hosts
|
95
|
+
hosts = [hosts] if hosts.is_a?(String)
|
96
|
+
hosts
|
97
|
+
end
|
98
|
+
|
99
|
+
def join
|
100
|
+
type = @payload["type"]
|
101
|
+
puts "type = #{type}"
|
102
|
+
case type
|
103
|
+
when "replica"
|
104
|
+
join_as_replica
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
def join_as_replica
|
109
|
+
source = @payload["source"]
|
110
|
+
return unless source
|
111
|
+
|
112
|
+
puts "source = #{source}"
|
113
|
+
|
114
|
+
generator = create_current_catalog_generator
|
115
|
+
dataset = generator.dataset_for_host(source) ||
|
116
|
+
generator.dataset_for_host(host)
|
117
|
+
return unless dataset
|
118
|
+
|
119
|
+
dataset_name = dataset.name
|
120
|
+
tag = dataset.replicas.tag
|
121
|
+
port = dataset.replicas.port
|
122
|
+
other_hosts = dataset.replicas.hosts
|
123
|
+
|
124
|
+
puts "dataset = #{dataset_name}"
|
125
|
+
puts "port = #{port}"
|
126
|
+
puts "tag = #{tag}"
|
127
|
+
|
128
|
+
if @payload["copy"]
|
129
|
+
puts "starting to copy data from #{source}"
|
130
|
+
|
131
|
+
modify_catalog do |modifier|
|
132
|
+
modifier.datasets[dataset_name].replicas.hosts = [host]
|
133
|
+
end
|
134
|
+
sleep(1) # wait for restart
|
135
|
+
|
136
|
+
DataAbsorber.absorb(:dataset => dataset_name,
|
137
|
+
:source_host => source,
|
138
|
+
:destination_host => host,
|
139
|
+
:port => port,
|
140
|
+
:tag => tag)
|
141
|
+
sleep(1)
|
142
|
+
end
|
143
|
+
|
144
|
+
puts "joining to the cluster: update myself"
|
145
|
+
|
146
|
+
modify_catalog do |modifier|
|
147
|
+
modifier.datasets[dataset_name].replicas.hosts += other_hosts
|
148
|
+
modifier.datasets[dataset_name].replicas.hosts.uniq!
|
149
|
+
end
|
150
|
+
sleep(1) # wait for restart
|
151
|
+
|
152
|
+
puts "joining to the cluster: update others"
|
153
|
+
|
154
|
+
source_node = "#{source}:#{port}/#{tag}"
|
155
|
+
Serf.send_query(source_node, "add_replicas",
|
156
|
+
"dataset" => dataset_name,
|
157
|
+
"hosts" => [host])
|
158
|
+
end
|
159
|
+
|
160
|
+
def set_replicas
|
161
|
+
dataset = @payload["dataset"]
|
162
|
+
return unless dataset
|
163
|
+
|
164
|
+
hosts = given_hosts
|
165
|
+
return unless hosts
|
166
|
+
|
167
|
+
puts "new replicas: #{hosts.join(",")}"
|
168
|
+
|
169
|
+
modify_catalog do |modifier|
|
170
|
+
modifier.datasets[dataset].replicas.hosts = hosts
|
171
|
+
end
|
172
|
+
end
|
173
|
+
|
174
|
+
def add_replicas
|
175
|
+
dataset = @payload["dataset"]
|
176
|
+
return unless dataset
|
177
|
+
|
178
|
+
hosts = given_hosts
|
179
|
+
return unless hosts
|
180
|
+
|
181
|
+
hosts -= [host]
|
182
|
+
return if hosts.empty?
|
183
|
+
|
184
|
+
puts "adding replicas: #{hosts.join(",")}"
|
185
|
+
|
186
|
+
modify_catalog do |modifier|
|
187
|
+
modifier.datasets[dataset].replicas.hosts += hosts
|
188
|
+
modifier.datasets[dataset].replicas.hosts.uniq!
|
189
|
+
end
|
190
|
+
end
|
191
|
+
|
192
|
+
def remove_replicas
|
193
|
+
dataset = @payload["dataset"]
|
194
|
+
return unless dataset
|
195
|
+
|
196
|
+
hosts = given_hosts
|
197
|
+
return unless hosts
|
198
|
+
|
199
|
+
puts "removing replicas: #{hosts.join(",")}"
|
200
|
+
|
201
|
+
modify_catalog do |modifier|
|
202
|
+
modifier.datasets[dataset].replicas.hosts -= hosts
|
203
|
+
end
|
204
|
+
end
|
205
|
+
|
206
|
+
def modify_catalog
|
207
|
+
generator = create_current_catalog_generator
|
208
|
+
yield(generator)
|
209
|
+
SafeFileWriter.write(Path.catalog, JSON.pretty_generate(generator.generate))
|
210
|
+
end
|
211
|
+
|
212
|
+
def create_current_catalog_generator
|
213
|
+
current_catalog = JSON.parse(Path.catalog.read)
|
214
|
+
generator = CatalogGenerator.new
|
215
|
+
generator.load(current_catalog)
|
216
|
+
end
|
217
|
+
|
218
|
+
def absorb_data
|
219
|
+
return unless event_for_me?
|
220
|
+
|
221
|
+
source = @payload["source"]
|
222
|
+
return unless source
|
223
|
+
|
224
|
+
puts "start to absorb data from #{source}"
|
225
|
+
|
226
|
+
dataset_name = @payload["dataset"]
|
227
|
+
port = @payload["port"]
|
228
|
+
tag = @payload["tag"]
|
229
|
+
|
230
|
+
if dataset_name.nil? or port.nil? or tag.nil?
|
231
|
+
current_catalog = JSON.parse(Path.catalog.read)
|
232
|
+
generator = CatalogGenerator.new
|
233
|
+
generator.load(current_catalog)
|
234
|
+
|
235
|
+
dataset = generator.dataset_for_host(source)
|
236
|
+
return unless dataset
|
237
|
+
|
238
|
+
dataset_name = dataset.name
|
239
|
+
port = dataset.replicas.port
|
240
|
+
tag = dataset.replicas.tag
|
241
|
+
end
|
242
|
+
|
243
|
+
puts "dataset = #{dataset_name}"
|
244
|
+
puts "port = #{port}"
|
245
|
+
puts "tag = #{tag}"
|
246
|
+
|
247
|
+
DataAbsorber.absorb(:dataset => dataset_name,
|
248
|
+
:source_host => source,
|
249
|
+
:destination_host => host,
|
250
|
+
:port => port,
|
251
|
+
:tag => tag)
|
252
|
+
end
|
253
|
+
|
58
254
|
def live_nodes
|
59
255
|
nodes = {}
|
60
256
|
members = `#{@serf} members -rpc-addr #{@serf_rpc_address}`
|
@@ -70,15 +266,16 @@ module Droonga
|
|
70
266
|
end
|
71
267
|
|
72
268
|
def output_live_nodes
|
73
|
-
|
269
|
+
path = Path.live_nodes
|
74
270
|
nodes = live_nodes
|
75
271
|
file_contents = JSON.pretty_generate(nodes)
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
272
|
+
SafeFileWriter.write(path, file_contents)
|
273
|
+
end
|
274
|
+
|
275
|
+
def save_status(key, value)
|
276
|
+
status = Serf.load_status
|
277
|
+
status[key] = value
|
278
|
+
SafeFileWriter.write(Serf.status_file, JSON.pretty_generate(status))
|
82
279
|
end
|
83
280
|
end
|
84
281
|
end
|
@@ -0,0 +1,55 @@
|
|
1
|
+
# Copyright (C) 2014 Droonga Project
|
2
|
+
#
|
3
|
+
# This library is free software; you can redistribute it and/or
|
4
|
+
# modify it under the terms of the GNU Lesser General Public
|
5
|
+
# License version 2.1 as published by the Free Software Foundation.
|
6
|
+
#
|
7
|
+
# This library is distributed in the hope that it will be useful,
|
8
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
9
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
10
|
+
# Lesser General Public License for more details.
|
11
|
+
#
|
12
|
+
# You should have received a copy of the GNU Lesser General Public
|
13
|
+
# License along with this library; if not, write to the Free Software
|
14
|
+
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
15
|
+
|
16
|
+
require "open3"
|
17
|
+
|
18
|
+
module Droonga
|
19
|
+
class DataAbsorber
|
20
|
+
class << self
|
21
|
+
def absorb(params)
|
22
|
+
drndump = params[:drndump] || "drndump"
|
23
|
+
drndump_options = []
|
24
|
+
drndump_options += ["--host", params[:source_host]] if params[:source_host]
|
25
|
+
drndump_options += ["--port", params[:port].to_s] if params[:port]
|
26
|
+
drndump_options += ["--tag", params[:tag]] if params[:tag]
|
27
|
+
drndump_options += ["--dataset", params[:dataset]] if params[:dataset]
|
28
|
+
drndump_options += ["--receiver-host", params[:destination_host]]
|
29
|
+
drndump_options += ["--receiver-port", params[:receiver_port].to_s] if params[:receiver_port]
|
30
|
+
|
31
|
+
client = params[:client] || "droonga-request"
|
32
|
+
client_options = []
|
33
|
+
client_options += ["--host", params[:destination_host]]
|
34
|
+
client_options += ["--port", params[:port].to_s] if params[:port]
|
35
|
+
client_options += ["--tag", params[:tag]] if params[:tag]
|
36
|
+
client_options += ["--receiver-host", params[:destination_host]]
|
37
|
+
client_options += ["--receiver-port", params[:receiver_port].to_s] if params[:receiver_port]
|
38
|
+
|
39
|
+
drndump_command_line = [drndump] + drndump_options
|
40
|
+
client_command_line = [client] + client_options
|
41
|
+
|
42
|
+
Open3.popen3(*drndump_command_line) do |dump_in, dump_out, dump_error, dump_thread|
|
43
|
+
dump_in.close
|
44
|
+
Open3.popen3(*client_command_line) do |client_in, client_out, client_error, client_thread|
|
45
|
+
client_out.close
|
46
|
+
dump_out.each do |dump|
|
47
|
+
yield dump if block_given?
|
48
|
+
client_in.puts(dump)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
data/lib/droonga/dispatcher.rb
CHANGED
@@ -46,19 +46,20 @@ module Droonga
|
|
46
46
|
end
|
47
47
|
end
|
48
48
|
|
49
|
-
|
49
|
+
attr_reader :engine_state
|
50
50
|
|
51
51
|
def initialize(engine_state, catalog)
|
52
52
|
@engine_state = engine_state
|
53
|
+
@forwarder = @engine_state.forwarder
|
54
|
+
@replier = @engine_state.replier
|
53
55
|
@catalog = catalog
|
54
|
-
@live_nodes = catalog.all_nodes
|
55
56
|
@adapter_runners = create_adapter_runners
|
56
57
|
@farm = Farm.new(@engine_state.name, @catalog, @engine_state.loop,
|
57
|
-
:
|
58
|
+
:engine_state => @engine_state,
|
59
|
+
:dispatcher => self,
|
60
|
+
:forwarder => @forwarder)
|
58
61
|
@collector_runners = create_collector_runners
|
59
62
|
@step_runners = create_step_runners
|
60
|
-
@forwarder = @engine_state.forwarder
|
61
|
-
@replier = @engine_state.replier
|
62
63
|
end
|
63
64
|
|
64
65
|
def start
|
@@ -169,22 +170,23 @@ module Droonga
|
|
169
170
|
|
170
171
|
def dispatch_steps(steps)
|
171
172
|
id = @engine_state.generate_id
|
172
|
-
|
173
|
+
|
174
|
+
destinations = []
|
173
175
|
steps.each do |step|
|
174
176
|
dataset = @catalog.dataset(step["dataset"])
|
175
177
|
if dataset
|
176
|
-
routes = dataset.get_routes(step, @live_nodes)
|
178
|
+
routes = dataset.get_routes(step, @engine_state.live_nodes)
|
177
179
|
step["routes"] = routes
|
178
180
|
else
|
179
181
|
step["routes"] ||= [id]
|
180
182
|
end
|
181
|
-
|
182
|
-
|
183
|
-
destinations[farm_path(route)] = true
|
183
|
+
destinations += step["routes"].collect do |route|
|
184
|
+
farm_path(route)
|
184
185
|
end
|
185
186
|
end
|
187
|
+
|
186
188
|
dispatch_message = { "id" => id, "steps" => steps }
|
187
|
-
destinations.
|
189
|
+
destinations.uniq.each do |destination|
|
188
190
|
dispatch(dispatch_message, destination)
|
189
191
|
end
|
190
192
|
end
|
@@ -210,6 +212,18 @@ module Droonga
|
|
210
212
|
@engine_state.local_route?(route)
|
211
213
|
end
|
212
214
|
|
215
|
+
def write_step?(step)
|
216
|
+
return false unless step["dataset"]
|
217
|
+
|
218
|
+
step_runner = @step_runners[step["dataset"]]
|
219
|
+
return false unless step_runner
|
220
|
+
|
221
|
+
step_definition = step_runner.find(step["command"])
|
222
|
+
return false unless step_definition
|
223
|
+
|
224
|
+
step_definition.write?
|
225
|
+
end
|
226
|
+
|
213
227
|
private
|
214
228
|
def farm_path(route)
|
215
229
|
@engine_state.farm_path(route)
|