logstash-core 6.0.0.alpha1-java → 6.0.0.alpha2-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (75) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash-core/logstash-core.jar +0 -0
  3. data/lib/logstash-core/version.rb +1 -1
  4. data/lib/logstash/agent.rb +81 -45
  5. data/lib/logstash/api/commands/hot_threads_reporter.rb +3 -3
  6. data/lib/logstash/api/commands/node.rb +13 -6
  7. data/lib/logstash/api/commands/stats.rb +18 -6
  8. data/lib/logstash/api/modules/node.rb +7 -0
  9. data/lib/logstash/api/modules/node_stats.rb +12 -5
  10. data/lib/logstash/bootstrap_check/default_config.rb +3 -7
  11. data/lib/logstash/compiler.rb +33 -15
  12. data/lib/logstash/compiler/lscl.rb +16 -8
  13. data/lib/logstash/config/mixin.rb +5 -42
  14. data/lib/logstash/config/pipeline_config.rb +1 -1
  15. data/lib/logstash/config/source/local.rb +28 -13
  16. data/lib/logstash/config/source/multi_local.rb +72 -0
  17. data/lib/logstash/config/source_loader.rb +1 -2
  18. data/lib/logstash/environment.rb +12 -3
  19. data/lib/logstash/execution_context.rb +7 -3
  20. data/lib/logstash/inputs/base.rb +2 -0
  21. data/lib/logstash/instrument/metric_type.rb +0 -2
  22. data/lib/logstash/instrument/periodic_poller/jvm.rb +5 -5
  23. data/lib/logstash/instrument/periodic_poller/pq.rb +1 -1
  24. data/lib/logstash/outputs/base.rb +2 -0
  25. data/lib/logstash/pipeline.rb +31 -14
  26. data/lib/logstash/pipeline_action/create.rb +1 -2
  27. data/lib/logstash/pipeline_action/reload.rb +2 -1
  28. data/lib/logstash/pipeline_settings.rb +50 -0
  29. data/lib/logstash/plugin.rb +1 -0
  30. data/lib/logstash/runner.rb +7 -5
  31. data/lib/logstash/settings.rb +11 -3
  32. data/lib/logstash/shutdown_watcher.rb +26 -0
  33. data/lib/logstash/state_resolver.rb +1 -3
  34. data/lib/logstash/util/dead_letter_queue_manager.rb +61 -0
  35. data/lib/logstash/util/environment_variables.rb +43 -0
  36. data/lib/logstash/util/thread_dump.rb +3 -1
  37. data/lib/logstash/version.rb +1 -1
  38. data/locales/en.yml +4 -0
  39. data/logstash-core.gemspec +4 -1
  40. data/spec/logstash/agent/converge_spec.rb +36 -35
  41. data/spec/logstash/agent_spec.rb +48 -177
  42. data/spec/{api/lib/commands/stats.rb → logstash/api/commands/stats_spec.rb} +7 -2
  43. data/spec/{api/lib → logstash/api}/errors_spec.rb +1 -1
  44. data/spec/{api/lib/api → logstash/api/modules}/logging_spec.rb +1 -10
  45. data/spec/{api/lib/api → logstash/api/modules}/node_plugins_spec.rb +1 -2
  46. data/spec/{api/lib/api → logstash/api/modules}/node_spec.rb +9 -8
  47. data/spec/{api/lib/api → logstash/api/modules}/node_stats_spec.rb +11 -9
  48. data/spec/{api/lib/api → logstash/api/modules}/plugins_spec.rb +4 -3
  49. data/spec/{api/lib/api → logstash/api/modules}/root_spec.rb +2 -2
  50. data/spec/{api/lib → logstash/api}/rack_app_spec.rb +0 -0
  51. data/spec/logstash/compiler/compiler_spec.rb +72 -9
  52. data/spec/logstash/config/source/local_spec.rb +20 -4
  53. data/spec/logstash/config/source/multi_local_spec.rb +113 -0
  54. data/spec/logstash/execution_context_spec.rb +14 -4
  55. data/spec/logstash/inputs/base_spec.rb +1 -1
  56. data/spec/logstash/instrument/wrapped_write_client_spec.rb +34 -19
  57. data/spec/logstash/output_delegator_spec.rb +1 -1
  58. data/spec/logstash/outputs/base_spec.rb +1 -1
  59. data/spec/logstash/pipeline_action/reload_spec.rb +1 -1
  60. data/spec/logstash/pipeline_action/stop_spec.rb +1 -1
  61. data/spec/logstash/pipeline_dlq_commit_spec.rb +107 -0
  62. data/spec/logstash/pipeline_pq_file_spec.rb +3 -1
  63. data/spec/logstash/pipeline_reporter_spec.rb +2 -1
  64. data/spec/logstash/pipeline_spec.rb +54 -43
  65. data/spec/logstash/runner_spec.rb +27 -36
  66. data/spec/logstash/settings/array_coercible_spec.rb +65 -0
  67. data/spec/logstash/settings_spec.rb +91 -0
  68. data/spec/logstash/shutdown_watcher_spec.rb +10 -16
  69. data/spec/logstash/state_resolver_spec.rb +6 -4
  70. data/spec/support/helpers.rb +16 -3
  71. data/spec/support/shared_contexts.rb +26 -2
  72. metadata +42 -39
  73. data/lib/logstash/instrument/metric_type/mean.rb +0 -33
  74. data/spec/api/lib/api/support/resource_dsl_methods.rb +0 -87
  75. data/spec/api/spec_helper.rb +0 -106
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 81ae0f7af1f442de24af8f661aaaeda947a52536
4
- data.tar.gz: 87404b518150d21bd9f20605b44ee0008a54efd0
3
+ metadata.gz: 58e1606b49e2fe865d648c113d2316ba19a4604d
4
+ data.tar.gz: 7c50f49a82f7bd2dec47546ddb51459e35d44c0c
5
5
  SHA512:
6
- metadata.gz: f7091e24dce522fe665b44ec723751298635cb4dface96907ac0dd0c840284ae0fcd96cd837eb0aa4193cf5a0a3630b419f81165aa45dd5b5d83abdf58921cba
7
- data.tar.gz: 05d429649b6302583ac6017d49e614ad75c98b62191e145ba1e71deb97d8168513fb787b52ea4f6bfdcbbaab6fc0ed528704e94661c2c454244bd9d022ff1636
6
+ metadata.gz: c8d297ddd76d6f6c2167f934169a041c7b1df0b76a39a661ddc308405159407fa0800a3a41cf0bd8aeca2a1684ebd45482672f83985f442f500b6f4ce6e9edad
7
+ data.tar.gz: 8e5e40ab3a364110604e825a43f05a83ea1b8da136f50f8ee6ae475edb69d25c6f70e076c20175eabacf6dddb9032e4d111a1d251ed22d9e5f393ee3c06ef90f
Binary file
@@ -5,4 +5,4 @@
5
5
  # Note to authors: this should not include dashes because 'gem' barfs if
6
6
  # you include a dash in the version string.
7
7
 
8
- LOGSTASH_CORE_VERSION = "6.0.0-alpha1"
8
+ LOGSTASH_CORE_VERSION = "6.0.0-alpha2"
@@ -25,7 +25,7 @@ class LogStash::Agent
25
25
  include LogStash::Util::Loggable
26
26
  STARTED_AT = Time.now.freeze
27
27
 
28
- attr_reader :metric, :name, :pipelines, :settings, :webserver, :dispatcher
28
+ attr_reader :metric, :name, :settings, :webserver, :dispatcher
29
29
  attr_accessor :logger
30
30
 
31
31
  # initialize method for LogStash::Agent
@@ -38,7 +38,10 @@ class LogStash::Agent
38
38
  @settings = settings
39
39
  @auto_reload = setting("config.reload.automatic")
40
40
 
41
+ # Do not use @pipelines directly. Use #with_pipelines which does locking
41
42
  @pipelines = {}
43
+ @pipelines_lock = java.util.concurrent.locks.ReentrantLock.new
44
+
42
45
  @name = setting("node.name")
43
46
  @http_host = setting("http.host")
44
47
  @http_port = setting("http.port")
@@ -48,14 +51,13 @@ class LogStash::Agent
48
51
 
49
52
  # This is for backward compatibility in the tests
50
53
  if source_loader.nil?
51
- @source_loader = LogStash::Config::SOURCE_LOADER
54
+ @source_loader = LogStash::Config::SourceLoader.new
52
55
  @source_loader.add_source(LogStash::Config::Source::Local.new(@settings))
53
56
  else
54
57
  @source_loader = source_loader
55
58
  end
56
59
 
57
60
  @reload_interval = setting("config.reload.interval")
58
- @pipelines_mutex = Mutex.new
59
61
 
60
62
  @collect_metric = setting("metric.collect")
61
63
 
@@ -129,6 +131,17 @@ class LogStash::Agent
129
131
  !@running.value
130
132
  end
131
133
 
134
+ # Safely perform an operation on the pipelines hash
135
+ # Using the correct synchronization
136
+ def with_pipelines
137
+ begin
138
+ @pipelines_lock.lock
139
+ yield @pipelines
140
+ ensure
141
+ @pipelines_lock.unlock
142
+ end
143
+ end
144
+
132
145
  def converge_state_and_update
133
146
  results = @source_loader.fetch
134
147
 
@@ -145,13 +158,14 @@ class LogStash::Agent
145
158
  # content of it.
146
159
  converge_result = nil
147
160
 
148
- @pipelines_mutex.synchronize do
161
+ # we don't use the variable here, but we want the locking
162
+ with_pipelines do |pipelines|
149
163
  pipeline_actions = resolve_actions(results.response)
150
164
  converge_result = converge_state(pipeline_actions)
165
+ update_metrics(converge_result)
151
166
  end
152
167
 
153
168
  report_currently_running_pipelines(converge_result)
154
- update_metrics(converge_result)
155
169
  dispatch_events(converge_result)
156
170
 
157
171
  converge_result
@@ -220,52 +234,64 @@ class LogStash::Agent
220
234
  end
221
235
 
222
236
  def get_pipeline(pipeline_id)
223
- @pipelines_mutex.synchronize do
224
- @pipelines[pipeline_id]
237
+ with_pipelines do |pipelines|
238
+ pipelines[pipeline_id]
225
239
  end
226
240
  end
227
241
 
228
242
  def pipelines_count
229
- @pipelines_mutex.synchronize do
243
+ with_pipelines do |pipelines|
230
244
  pipelines.size
231
245
  end
232
246
  end
233
247
 
234
- def running_pipelines
235
- @pipelines_mutex.synchronize do
236
- @pipelines.select {|pipeline_id, _| running_pipeline?(pipeline_id) }
248
+ def with_running_pipelines
249
+ with_pipelines do |pipelines|
250
+ yield pipelines.select {|pipeline_id, _| running_pipeline?(pipeline_id) }
237
251
  end
238
252
  end
239
253
 
240
254
  def running_pipelines?
241
- @pipelines_mutex.synchronize do
242
- @pipelines.select {|pipeline_id, _| running_pipeline?(pipeline_id) }.any?
255
+ running_pipelines_count > 0
256
+ end
257
+
258
+ def running_pipelines_count
259
+ with_running_pipelines do |pipelines|
260
+ pipelines.size
243
261
  end
244
262
  end
245
263
 
246
264
  def running_user_defined_pipelines?
247
- running_user_defined_pipelines.any?
265
+ with_running_user_defined_pipelines do |pipelines|
266
+ pipelines.size > 0
267
+ end
248
268
  end
249
269
 
250
- def running_user_defined_pipelines
251
- @pipelines_mutex.synchronize do
252
- @pipelines.select do |_, pipeline|
270
+ def with_running_user_defined_pipelines
271
+ with_pipelines do |pipelines|
272
+ found = pipelines.select do |_, pipeline|
253
273
  pipeline.running? && !pipeline.system?
254
274
  end
275
+
276
+ yield found
255
277
  end
256
278
  end
257
279
 
258
280
  def close_pipeline(id)
259
- pipeline = @pipelines[id]
260
- if pipeline
261
- @logger.warn("closing pipeline", :id => id)
262
- pipeline.close
281
+ with_pipelines do |pipelines|
282
+ pipeline = pipelines[id]
283
+ if pipeline
284
+ @logger.warn("closing pipeline", :id => id)
285
+ pipeline.close
286
+ end
263
287
  end
264
288
  end
265
289
 
266
290
  def close_pipelines
267
- @pipelines.each do |id, _|
268
- close_pipeline(id)
291
+ with_pipelines do |pipelines|
292
+ pipelines.each do |id, _|
293
+ close_pipeline(id)
294
+ end
269
295
  end
270
296
  end
271
297
 
@@ -308,20 +334,22 @@ class LogStash::Agent
308
334
  #
309
335
  # This give us a bit more extensibility with the current startup/validation model
310
336
  # that we currently have.
311
- begin
312
- logger.debug("Executing action", :action => action)
313
- action_result = action.execute(self, @pipelines)
314
- converge_result.add(action, action_result)
315
-
316
- unless action_result.successful?
317
- logger.error("Failed to execute action", :id => action.pipeline_id,
318
- :action_type => action_result.class, :message => action_result.message)
337
+ with_pipelines do |pipelines|
338
+ begin
339
+ logger.debug("Executing action", :action => action)
340
+ action_result = action.execute(self, pipelines)
341
+ converge_result.add(action, action_result)
342
+
343
+ unless action_result.successful?
344
+ logger.error("Failed to execute action", :id => action.pipeline_id,
345
+ :action_type => action_result.class, :message => action_result.message)
346
+ end
347
+ rescue SystemExit => e
348
+ converge_result.add(action, e)
349
+ rescue Exception => e
350
+ logger.error("Failed to execute action", :action => action, :exception => e.class.name, :message => e.message)
351
+ converge_result.add(action, e)
319
352
  end
320
- rescue SystemExit => e
321
- converge_result.add(action, e)
322
- rescue Exception => e
323
- logger.error("Failed to execute action", :action => action, :exception => e.class.name, :message => e.message)
324
- converge_result.add(action, e)
325
353
  end
326
354
  end
327
355
 
@@ -335,13 +363,17 @@ class LogStash::Agent
335
363
  end
336
364
 
337
365
  def resolve_actions(pipeline_configs)
338
- @state_resolver.resolve(@pipelines, pipeline_configs)
366
+ with_pipelines do |pipelines|
367
+ @state_resolver.resolve(pipelines, pipeline_configs)
368
+ end
339
369
  end
340
370
 
341
371
  def report_currently_running_pipelines(converge_result)
342
372
  if converge_result.success? && converge_result.total > 0
343
- number_of_running_pipeline = running_pipelines.size
344
- logger.info("Pipelines running", :count => number_of_running_pipeline, :pipelines => running_pipelines.values.collect(&:pipeline_id) )
373
+ with_running_pipelines do |pipelines|
374
+ number_of_running_pipeline = pipelines.size
375
+ logger.info("Pipelines running", :count => number_of_running_pipeline, :pipelines => pipelines.values.collect(&:pipeline_id) )
376
+ end
345
377
  end
346
378
  end
347
379
 
@@ -394,9 +426,11 @@ class LogStash::Agent
394
426
  end
395
427
 
396
428
  def force_shutdown_pipelines!
397
- @pipelines.each do |_, pipeline|
398
- # TODO(ph): should it be his own action?
399
- pipeline.force_shutdown!
429
+ with_pipelines do |pipelines|
430
+ pipelines.each do |_, pipeline|
431
+ # TODO(ph): should it be his own action?
432
+ pipeline.force_shutdown!
433
+ end
400
434
  end
401
435
  end
402
436
 
@@ -406,19 +440,21 @@ class LogStash::Agent
406
440
  # In this context I could just call shutdown, but I've decided to
407
441
  # use the stop action implementation for that so we have the same code.
408
442
  # This also give us some context into why a shutdown is failing
409
- @pipelines_mutex.synchronize do
443
+ with_pipelines do |pipelines|
410
444
  pipeline_actions = resolve_actions([]) # We stop all the pipeline, so we converge to a empty state
411
445
  converge_state(pipeline_actions)
412
446
  end
413
447
  end
414
448
 
415
449
  def running_pipeline?(pipeline_id)
416
- thread = @pipelines[pipeline_id].thread
450
+ thread = get_pipeline(pipeline_id).thread
417
451
  thread.is_a?(Thread) && thread.alive?
418
452
  end
419
453
 
420
454
  def clean_state?
421
- @pipelines.empty?
455
+ with_pipelines do |pipelines|
456
+ pipelines.empty?
457
+ end
422
458
  end
423
459
 
424
460
  def setting(key)
@@ -1,4 +1,5 @@
1
1
  # encoding: utf-8
2
+ java_import 'org.logstash.instrument.reports.ThreadsReport'
2
3
 
3
4
  class HotThreadsReport
4
5
  STRING_SEPARATOR_LENGTH = 80.freeze
@@ -7,8 +8,7 @@ class HotThreadsReport
7
8
  def initialize(cmd, options)
8
9
  @cmd = cmd
9
10
  filter = { :stacktrace_size => options.fetch(:stacktrace_size, HOT_THREADS_STACK_TRACES_SIZE_DEFAULT) }
10
- jr_dump = JRMonitor.threads.generate(filter)
11
- @thread_dump = ::LogStash::Util::ThreadDump.new(options.merge(:dump => jr_dump))
11
+ @thread_dump = ::LogStash::Util::ThreadDump.new(options.merge(:dump => ThreadsReport.generate(filter)))
12
12
  end
13
13
 
14
14
  def to_s
@@ -42,7 +42,7 @@ class HotThreadsReport
42
42
  _hash["thread.stacktrace"].each do |trace|
43
43
  traces << trace
44
44
  end
45
- thread[:traces] = traces unless traces.empty?
45
+ thread[:traces] = traces
46
46
  hash[:threads] << thread
47
47
  end
48
48
  { :hot_threads => hash }
@@ -9,7 +9,7 @@ module LogStash
9
9
 
10
10
  def all(selected_fields=[])
11
11
  payload = {
12
- :pipeline => pipeline,
12
+ :pipelines => pipelines,
13
13
  :os => os,
14
14
  :jvm => jvm
15
15
  }
@@ -17,12 +17,20 @@ module LogStash
17
17
  payload
18
18
  end
19
19
 
20
- def pipeline(pipeline_id = LogStash::SETTINGS.get("pipeline.id").to_sym)
21
- stats = extract_metrics(
22
- [:stats, :pipelines, pipeline_id, :config],
20
+ def pipelines
21
+ pipeline_ids = service.get_shallow(:stats, :pipelines).keys
22
+ pipeline_ids.each_with_object({}) do |pipeline_id, result|
23
+ result[pipeline_id] = pipeline(pipeline_id)
24
+ end
25
+ end
26
+
27
+ def pipeline(pipeline_id)
28
+ extract_metrics(
29
+ [:stats, :pipelines, pipeline_id.to_sym, :config],
23
30
  :workers, :batch_size, :batch_delay, :config_reload_automatic, :config_reload_interval
24
31
  )
25
- stats.merge(:id => pipeline_id)
32
+ rescue
33
+ {}
26
34
  end
27
35
 
28
36
  def os
@@ -40,7 +48,6 @@ module LogStash
40
48
  {
41
49
  :pid => ManagementFactory.getRuntimeMXBean().getName().split("@").first.to_i,
42
50
  :version => java.lang.System.getProperty("java.version"),
43
- :vm_name => java.lang.System.getProperty("java.vm.name"),
44
51
  :vm_version => java.lang.System.getProperty("java.version"),
45
52
  :vm_vendor => java.lang.System.getProperty("java.vendor"),
46
53
  :vm_name => java.lang.System.getProperty("java.vm.name"),
@@ -41,20 +41,26 @@ module LogStash
41
41
  def events
42
42
  extract_metrics(
43
43
  [:stats, :events],
44
- :in, :filtered, :out, :duration_in_millis
44
+ :in, :filtered, :out, :duration_in_millis, :queue_push_duration_in_millis
45
45
  )
46
46
  end
47
47
 
48
- def pipeline(pipeline_id = LogStash::SETTINGS.get("pipeline.id").to_sym)
49
- stats = service.get_shallow(:stats, :pipelines, pipeline_id)
50
- stats = PluginsStats.report(stats)
51
- stats.merge(:id => pipeline_id)
48
+ def pipeline(pipeline_id = nil)
49
+ if pipeline_id.nil?
50
+ pipeline_ids = service.get_shallow(:stats, :pipelines).keys
51
+ pipeline_ids.each_with_object({}) do |pipeline_id, result|
52
+ result[pipeline_id] = plugins_stats_report(pipeline_id)
53
+ end
54
+ else
55
+ { pipeline_id => plugins_stats_report(pipeline_id) }
56
+ end
57
+ rescue # failed to find pipeline
58
+ {}
52
59
  end
53
60
 
54
61
  def memory
55
62
  memory = service.get_shallow(:jvm, :memory)
56
63
  {
57
- :heap_used_in_bytes => memory[:heap][:used_in_bytes],
58
64
  :heap_used_percent => memory[:heap][:used_percent],
59
65
  :heap_committed_in_bytes => memory[:heap][:committed_in_bytes],
60
66
  :heap_max_in_bytes => memory[:heap][:max_in_bytes],
@@ -85,6 +91,12 @@ module LogStash
85
91
  HotThreadsReport.new(self, options)
86
92
  end
87
93
 
94
+ private
95
+ def plugins_stats_report(pipeline_id)
96
+ stats = service.get_shallow(:stats, :pipelines, pipeline_id.to_sym)
97
+ PluginsStats.report(stats)
98
+ end
99
+
88
100
  module PluginsStats
89
101
  module_function
90
102
 
@@ -20,6 +20,13 @@ module LogStash
20
20
  respond_with(node.hot_threads(options), {:as => as})
21
21
  end
22
22
 
23
+ get "/pipelines/:id" do
24
+ pipeline_id = params["id"]
25
+ payload = node.pipeline(pipeline_id)
26
+ halt(404) if payload.empty?
27
+ respond_with(:pipelines => { pipeline_id => payload } )
28
+ end
29
+
23
30
  get "/?:filter?" do
24
31
  selected_fields = extract_fields(params["filter"].to_s.strip)
25
32
  values = node.all(selected_fields)
@@ -8,12 +8,19 @@ module LogStash
8
8
  @stats = factory.build(:stats)
9
9
  end
10
10
 
11
+ get "/pipelines/:id?" do
12
+ payload = pipeline_payload(params["id"])
13
+ halt(404) if payload.empty?
14
+ respond_with(:pipelines => payload)
15
+ end
16
+
11
17
  get "/?:filter?" do
12
18
  payload = {
13
19
  :jvm => jvm_payload,
14
20
  :process => process_payload,
15
- :pipeline => pipeline_payload,
16
- :reloads => reloads,
21
+ :events => events_payload,
22
+ :pipelines => pipeline_payload,
23
+ :reloads => reloads_payload,
17
24
  :os => os_payload
18
25
  }
19
26
  respond_with(payload, {:filter => params["filter"]})
@@ -32,7 +39,7 @@ module LogStash
32
39
  @stats.jvm
33
40
  end
34
41
 
35
- def reloads
42
+ def reloads_payload
36
43
  @stats.reloads
37
44
  end
38
45
 
@@ -44,8 +51,8 @@ module LogStash
44
51
  @stats.memory
45
52
  end
46
53
 
47
- def pipeline_payload
48
- @stats.pipeline
54
+ def pipeline_payload(val = nil)
55
+ @stats.pipeline(val)
49
56
  end
50
57
  end
51
58
  end
@@ -4,13 +4,9 @@ require "logstash/errors"
4
4
  module LogStash module BootstrapCheck
5
5
  class DefaultConfig
6
6
  def self.check(settings)
7
- if settings.get("config.string").nil? && settings.get("path.config").nil?
8
- raise LogStash::BootstrapCheckError, I18n.t("logstash.runner.missing-configuration")
9
- end
10
-
11
- if settings.get("config.reload.automatic") && settings.get("path.config").nil?
12
- # there's nothing to reload
13
- raise LogStash::BootstrapCheckError, I18n.t("logstash.runner.reload-without-config-path")
7
+ # currently none of the checks applies if there are multiple pipelines
8
+ if settings.get("config.reload.automatic") && settings.get_setting("config.string").set?
9
+ raise LogStash::BootstrapCheckError, I18n.t("logstash.runner.reload-with-config-string")
14
10
  end
15
11
  end
16
12
  end