logstash-core 5.3.3-java → 5.4.0-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/gemspec_jars.rb +2 -0
- data/lib/logstash-core/logstash-core.jar +0 -0
- data/lib/logstash-core/version.rb +1 -1
- data/lib/logstash-core_jars.rb +4 -0
- data/lib/logstash/agent.rb +15 -6
- data/lib/logstash/api/modules/base.rb +1 -1
- data/lib/logstash/api/rack_app.rb +1 -1
- data/lib/logstash/config/config_ast.rb +13 -13
- data/lib/logstash/config/mixin.rb +33 -28
- data/lib/logstash/environment.rb +11 -0
- data/lib/logstash/event.rb +56 -0
- data/lib/logstash/event_dispatcher.rb +2 -2
- data/lib/logstash/execution_context.rb +10 -0
- data/lib/logstash/filter_delegator.rb +3 -2
- data/lib/logstash/inputs/base.rb +15 -1
- data/lib/logstash/instrument/collector.rb +1 -1
- data/lib/logstash/instrument/metric.rb +4 -2
- data/lib/logstash/instrument/metric_store.rb +9 -5
- data/lib/logstash/instrument/null_metric.rb +1 -0
- data/lib/logstash/instrument/periodic_poller/cgroup.rb +3 -3
- data/lib/logstash/instrument/periodic_poller/jvm.rb +11 -8
- data/lib/logstash/instrument/periodic_poller/load_average.rb +4 -2
- data/lib/logstash/instrument/wrapped_write_client.rb +59 -0
- data/lib/logstash/java_integration.rb +2 -2
- data/lib/logstash/output_delegator.rb +2 -2
- data/lib/logstash/output_delegator_strategies/legacy.rb +5 -2
- data/lib/logstash/output_delegator_strategies/shared.rb +2 -1
- data/lib/logstash/output_delegator_strategies/single.rb +2 -1
- data/lib/logstash/outputs/base.rb +8 -0
- data/lib/logstash/patches/cabin.rb +1 -1
- data/lib/logstash/patches/stronger_openssl_defaults.rb +1 -1
- data/lib/logstash/pipeline.rb +47 -19
- data/lib/logstash/plugin.rb +3 -1
- data/lib/logstash/plugins/hooks_registry.rb +6 -6
- data/lib/logstash/plugins/registry.rb +2 -2
- data/lib/logstash/queue_factory.rb +7 -5
- data/lib/logstash/runner.rb +15 -1
- data/lib/logstash/settings.rb +14 -2
- data/lib/logstash/string_interpolation.rb +18 -0
- data/lib/logstash/timestamp.rb +27 -0
- data/lib/logstash/util.rb +1 -1
- data/lib/logstash/util/prctl.rb +1 -1
- data/lib/logstash/util/retryable.rb +1 -1
- data/lib/logstash/util/wrapped_acked_queue.rb +53 -22
- data/lib/logstash/util/wrapped_synchronous_queue.rb +51 -33
- data/lib/logstash/version.rb +1 -1
- data/locales/en.yml +4 -2
- data/logstash-core.gemspec +0 -3
- data/spec/api/lib/api/node_stats_spec.rb +2 -1
- data/spec/api/spec_helper.rb +1 -1
- data/spec/logstash/acked_queue_concurrent_stress_spec.rb +291 -0
- data/spec/logstash/agent_spec.rb +24 -0
- data/spec/logstash/config/mixin_spec.rb +11 -2
- data/spec/logstash/event_dispatcher_spec.rb +8 -1
- data/spec/logstash/event_spec.rb +346 -0
- data/spec/logstash/execution_context_spec.rb +13 -0
- data/spec/logstash/filter_delegator_spec.rb +4 -2
- data/spec/logstash/inputs/base_spec.rb +41 -0
- data/spec/logstash/instrument/metric_spec.rb +2 -1
- data/spec/logstash/instrument/metric_store_spec.rb +14 -0
- data/spec/logstash/instrument/namespaced_metric_spec.rb +2 -1
- data/spec/logstash/instrument/periodic_poller/cgroup_spec.rb +1 -1
- data/spec/logstash/instrument/periodic_poller/jvm_spec.rb +35 -0
- data/spec/logstash/instrument/periodic_poller/load_average_spec.rb +1 -5
- data/spec/logstash/instrument/wrapped_write_client_spec.rb +113 -0
- data/spec/logstash/json_spec.rb +1 -1
- data/spec/logstash/legacy_ruby_event_spec.rb +636 -0
- data/spec/logstash/legacy_ruby_timestamp_spec.rb +170 -0
- data/spec/logstash/output_delegator_spec.rb +6 -3
- data/spec/logstash/outputs/base_spec.rb +23 -0
- data/spec/logstash/pipeline_pq_file_spec.rb +18 -8
- data/spec/logstash/pipeline_spec.rb +41 -5
- data/spec/logstash/plugin_spec.rb +15 -3
- data/spec/logstash/plugins/hooks_registry_spec.rb +2 -2
- data/spec/logstash/runner_spec.rb +33 -2
- data/spec/logstash/settings/port_range_spec.rb +1 -1
- data/spec/logstash/settings_spec.rb +21 -0
- data/spec/logstash/timestamp_spec.rb +29 -0
- data/spec/logstash/util/accessors_spec.rb +179 -0
- data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +4 -11
- data/spec/logstash/util_spec.rb +1 -1
- data/spec/logstash/webserver_spec.rb +1 -1
- data/spec/support/mocks_classes.rb +65 -53
- metadata +25 -30
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: b75f61585cb54f56f39ba3df892c501842fe618b
|
4
|
+
data.tar.gz: 1a2c44621cdc55841d2d6a9f03e869cc4b58af81
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 62549f54412120fafee19c383bd22885ea7e53fc091b65291aeda0d2b42b2ee885e36776385ff88a5f8996c326d8607b44f6a8430315e4940e4c92f1c446a7f8
|
7
|
+
data.tar.gz: be4c9ab4a85b0e86c33d74a0013d9cd49f44cb45aeba386b92d3489e330ce4bcb41568fdc0d8efa0dca19907d505a15d5c6e34acbba87592a32e75c2acc6a717
|
data/gemspec_jars.rb
CHANGED
@@ -6,3 +6,5 @@ gem.requirements << "jar org.apache.logging.log4j:log4j-api, 2.6.2"
|
|
6
6
|
gem.requirements << "jar org.apache.logging.log4j:log4j-core, 2.6.2"
|
7
7
|
gem.requirements << "jar com.fasterxml.jackson.core:jackson-core, 2.7.4"
|
8
8
|
gem.requirements << "jar com.fasterxml.jackson.core:jackson-databind, 2.7.4"
|
9
|
+
gem.requirements << "jar com.fasterxml.jackson.module:jackson-module-afterburner, 2.7.4"
|
10
|
+
gem.requirements << "jar com.fasterxml.jackson.dataformat:jackson-dataformat-cbor, 2.7.4"
|
Binary file
|
data/lib/logstash-core_jars.rb
CHANGED
@@ -3,16 +3,20 @@ begin
|
|
3
3
|
require 'jar_dependencies'
|
4
4
|
rescue LoadError
|
5
5
|
require 'org/apache/logging/log4j/log4j-core/2.6.2/log4j-core-2.6.2.jar'
|
6
|
+
require 'com/fasterxml/jackson/module/jackson-module-afterburner/2.7.4/jackson-module-afterburner-2.7.4.jar'
|
6
7
|
require 'org/apache/logging/log4j/log4j-api/2.6.2/log4j-api-2.6.2.jar'
|
7
8
|
require 'com/fasterxml/jackson/core/jackson-core/2.7.4/jackson-core-2.7.4.jar'
|
8
9
|
require 'com/fasterxml/jackson/core/jackson-annotations/2.7.0/jackson-annotations-2.7.0.jar'
|
10
|
+
require 'com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.7.4/jackson-dataformat-cbor-2.7.4.jar'
|
9
11
|
require 'com/fasterxml/jackson/core/jackson-databind/2.7.4/jackson-databind-2.7.4.jar'
|
10
12
|
end
|
11
13
|
|
12
14
|
if defined? Jars
|
13
15
|
require_jar( 'org.apache.logging.log4j', 'log4j-core', '2.6.2' )
|
16
|
+
require_jar( 'com.fasterxml.jackson.module', 'jackson-module-afterburner', '2.7.4' )
|
14
17
|
require_jar( 'org.apache.logging.log4j', 'log4j-api', '2.6.2' )
|
15
18
|
require_jar( 'com.fasterxml.jackson.core', 'jackson-core', '2.7.4' )
|
16
19
|
require_jar( 'com.fasterxml.jackson.core', 'jackson-annotations', '2.7.0' )
|
20
|
+
require_jar( 'com.fasterxml.jackson.dataformat', 'jackson-dataformat-cbor', '2.7.4' )
|
17
21
|
require_jar( 'com.fasterxml.jackson.core', 'jackson-databind', '2.7.4' )
|
18
22
|
end
|
data/lib/logstash/agent.rb
CHANGED
@@ -61,7 +61,7 @@ class LogStash::Agent
|
|
61
61
|
end
|
62
62
|
|
63
63
|
def execute
|
64
|
-
@thread = Thread.current # this var is
|
64
|
+
@thread = Thread.current # this var is implicitly used by Stud.stop?
|
65
65
|
@logger.debug("starting agent")
|
66
66
|
|
67
67
|
start_pipelines
|
@@ -75,8 +75,8 @@ class LogStash::Agent
|
|
75
75
|
Stud.interval(@reload_interval) { reload_state! }
|
76
76
|
else
|
77
77
|
while !Stud.stop?
|
78
|
-
if clean_state? ||
|
79
|
-
sleep
|
78
|
+
if clean_state? || running_user_defined_pipelines?
|
79
|
+
sleep(0.5)
|
80
80
|
else
|
81
81
|
break
|
82
82
|
end
|
@@ -189,6 +189,15 @@ class LogStash::Agent
|
|
189
189
|
end
|
190
190
|
end
|
191
191
|
|
192
|
+
def running_user_defined_pipelines?
|
193
|
+
@upgrade_mutex.synchronize do
|
194
|
+
@pipelines.select do |pipeline_id, _|
|
195
|
+
pipeline = @pipelines[pipeline_id]
|
196
|
+
pipeline.running? && !pipeline.system?
|
197
|
+
end.any?
|
198
|
+
end
|
199
|
+
end
|
200
|
+
|
192
201
|
def close_pipeline(id)
|
193
202
|
pipeline = @pipelines[id]
|
194
203
|
if pipeline
|
@@ -298,7 +307,7 @@ class LogStash::Agent
|
|
298
307
|
end
|
299
308
|
|
300
309
|
# check if this pipeline is not reloadable. it should not happen as per the check below
|
301
|
-
# but keep it here as a safety net if a reloadable pipeline was
|
310
|
+
# but keep it here as a safety net if a reloadable pipeline was reloaded with a non reloadable pipeline
|
302
311
|
if !old_pipeline.reloadable?
|
303
312
|
@logger.error("pipeline is not reloadable", :pipeline => id)
|
304
313
|
return
|
@@ -370,7 +379,7 @@ class LogStash::Agent
|
|
370
379
|
return
|
371
380
|
end
|
372
381
|
|
373
|
-
# pipeline started
|
382
|
+
# pipeline started successfully, update reload success metrics
|
374
383
|
@instance_reload_metric.increment(:successes)
|
375
384
|
@pipeline_reload_metric.namespace([pipeline_id.to_sym, :reloads]).tap do |n|
|
376
385
|
n.increment(:successes)
|
@@ -424,7 +433,7 @@ class LogStash::Agent
|
|
424
433
|
@pipelines.each do |id, pipeline|
|
425
434
|
start_pipeline(id)
|
426
435
|
pipeline.collect_stats
|
427
|
-
# no reloads yet,
|
436
|
+
# no reloads yet, initialize all the reload metrics
|
428
437
|
init_pipeline_reload_metrics(id)
|
429
438
|
end
|
430
439
|
end
|
@@ -31,7 +31,7 @@ module LogStash
|
|
31
31
|
end
|
32
32
|
|
33
33
|
not_found do
|
34
|
-
# We cannot raise here because it
|
34
|
+
# We cannot raise here because it won't be catched by the `error` handler.
|
35
35
|
# So we manually create a new instance of NotFound and just pass it down.
|
36
36
|
respond_with(NotFoundError.new)
|
37
37
|
end
|
@@ -80,7 +80,7 @@ module LogStash
|
|
80
80
|
# Custom logger object. Rack CommonLogger does not work with cabin
|
81
81
|
use ApiLogger, logger
|
82
82
|
|
83
|
-
# In test env we want errors to
|
83
|
+
# In test env we want errors to propagate up the chain
|
84
84
|
# so we get easy to understand test failures.
|
85
85
|
# In production / dev we don't want a bad API endpoint
|
86
86
|
# to crash the process
|
@@ -60,20 +60,20 @@ end
|
|
60
60
|
|
61
61
|
module LogStash; module Config; module AST
|
62
62
|
|
63
|
-
def self.
|
64
|
-
@
|
63
|
+
def self.deferred_conditionals=(val)
|
64
|
+
@deferred_conditionals = val
|
65
65
|
end
|
66
66
|
|
67
|
-
def self.
|
68
|
-
@
|
67
|
+
def self.deferred_conditionals
|
68
|
+
@deferred_conditionals
|
69
69
|
end
|
70
70
|
|
71
|
-
def self.
|
72
|
-
@
|
71
|
+
def self.deferred_conditionals_index
|
72
|
+
@deferred_conditionals_index
|
73
73
|
end
|
74
74
|
|
75
|
-
def self.
|
76
|
-
@
|
75
|
+
def self.deferred_conditionals_index=(val)
|
76
|
+
@deferred_conditionals_index = val
|
77
77
|
end
|
78
78
|
|
79
79
|
def self.plugin_instance_index
|
@@ -92,8 +92,8 @@ module LogStash; module Config; module AST
|
|
92
92
|
|
93
93
|
class Config < Node
|
94
94
|
def compile
|
95
|
-
LogStash::Config::AST.
|
96
|
-
LogStash::Config::AST.
|
95
|
+
LogStash::Config::AST.deferred_conditionals = []
|
96
|
+
LogStash::Config::AST.deferred_conditionals_index = 0
|
97
97
|
LogStash::Config::AST.plugin_instance_index = 0
|
98
98
|
code = []
|
99
99
|
|
@@ -136,7 +136,7 @@ module LogStash; module Config; module AST
|
|
136
136
|
|
137
137
|
code += definitions.join("\n").split("\n", -1).collect { |l| " #{l}" }
|
138
138
|
|
139
|
-
code += LogStash::Config::AST.
|
139
|
+
code += LogStash::Config::AST.deferred_conditionals
|
140
140
|
|
141
141
|
return code.join("\n")
|
142
142
|
end
|
@@ -402,7 +402,7 @@ module LogStash; module Config; module AST
|
|
402
402
|
type = recursive_select_parent(PluginSection).first.plugin_type.text_value
|
403
403
|
|
404
404
|
if type == "filter"
|
405
|
-
i = LogStash::Config::AST.
|
405
|
+
i = LogStash::Config::AST.deferred_conditionals_index += 1
|
406
406
|
source = <<-CODE
|
407
407
|
@generated_objects[:cond_func_#{i}] = lambda do |input_events|
|
408
408
|
result = []
|
@@ -415,7 +415,7 @@ module LogStash; module Config; module AST
|
|
415
415
|
result
|
416
416
|
end
|
417
417
|
CODE
|
418
|
-
LogStash::Config::AST.
|
418
|
+
LogStash::Config::AST.deferred_conditionals << source
|
419
419
|
|
420
420
|
<<-CODE
|
421
421
|
events = @generated_objects[:cond_func_#{i}].call(events)
|
@@ -47,15 +47,32 @@ module LogStash::Config::Mixin
|
|
47
47
|
base.extend(LogStash::Config::Mixin::DSL)
|
48
48
|
end
|
49
49
|
|
50
|
+
# Recursive method to replace environment variable references in parameters
|
51
|
+
def deep_replace(value)
|
52
|
+
if (value.is_a?(Hash))
|
53
|
+
value.each do |valueHashKey, valueHashValue|
|
54
|
+
value[valueHashKey.to_s] = deep_replace(valueHashValue)
|
55
|
+
end
|
56
|
+
else
|
57
|
+
if (value.is_a?(Array))
|
58
|
+
value.each_index do | valueArrayIndex|
|
59
|
+
value[valueArrayIndex] = deep_replace(value[valueArrayIndex])
|
60
|
+
end
|
61
|
+
else
|
62
|
+
return replace_env_placeholders(value)
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
50
67
|
def config_init(params)
|
51
68
|
# Validation will modify the values inside params if necessary.
|
52
69
|
# For example: converting a string to a number, etc.
|
53
|
-
|
70
|
+
|
54
71
|
# Keep a copy of the original config params so that we can later
|
55
72
|
# differentiate between explicit configuration and implicit (default)
|
56
73
|
# configuration.
|
57
74
|
original_params = params.clone
|
58
|
-
|
75
|
+
|
59
76
|
# store the plugin type, turns LogStash::Inputs::Base into 'input'
|
60
77
|
@plugin_type = self.class.ancestors.find { |a| a.name =~ /::Base$/ }.config_name
|
61
78
|
|
@@ -88,7 +105,7 @@ module LogStash::Config::Mixin
|
|
88
105
|
next if params.include?(name.to_s)
|
89
106
|
if opts.include?(:default) and (name.is_a?(Symbol) or name.is_a?(String))
|
90
107
|
# default values should be cloned if possible
|
91
|
-
# cloning prevents
|
108
|
+
# cloning prevents
|
92
109
|
case opts[:default]
|
93
110
|
when FalseClass, TrueClass, NilClass, Numeric
|
94
111
|
params[name.to_s] = opts[:default]
|
@@ -105,19 +122,7 @@ module LogStash::Config::Mixin
|
|
105
122
|
|
106
123
|
# Resolve environment variables references
|
107
124
|
params.each do |name, value|
|
108
|
-
|
109
|
-
value.each do |valueHashKey, valueHashValue|
|
110
|
-
value[valueHashKey.to_s] = replace_env_placeholders(valueHashValue)
|
111
|
-
end
|
112
|
-
else
|
113
|
-
if (value.is_a?(Array))
|
114
|
-
value.each_index do |valueArrayIndex|
|
115
|
-
value[valueArrayIndex] = replace_env_placeholders(value[valueArrayIndex])
|
116
|
-
end
|
117
|
-
else
|
118
|
-
params[name.to_s] = replace_env_placeholders(value)
|
119
|
-
end
|
120
|
-
end
|
125
|
+
params[name.to_s] = deep_replace(value)
|
121
126
|
end
|
122
127
|
|
123
128
|
|
@@ -204,7 +209,7 @@ module LogStash::Config::Mixin
|
|
204
209
|
|
205
210
|
name = name.to_s if name.is_a?(Symbol)
|
206
211
|
@config[name] = opts # ok if this is empty
|
207
|
-
|
212
|
+
|
208
213
|
if name.is_a?(String)
|
209
214
|
define_method(name) { instance_variable_get("@#{name}") }
|
210
215
|
define_method("#{name}=") { |v| instance_variable_set("@#{name}", v) }
|
@@ -343,7 +348,7 @@ module LogStash::Config::Mixin
|
|
343
348
|
:setting => config_key, :plugin => @plugin_name,
|
344
349
|
:type => @plugin_type))
|
345
350
|
is_valid = false
|
346
|
-
end
|
351
|
+
end
|
347
352
|
end
|
348
353
|
|
349
354
|
return is_valid
|
@@ -351,26 +356,26 @@ module LogStash::Config::Mixin
|
|
351
356
|
|
352
357
|
def process_parameter_value(value, config_settings)
|
353
358
|
config_val = config_settings[:validate]
|
354
|
-
|
359
|
+
|
355
360
|
if config_settings[:list]
|
356
361
|
value = Array(value) # coerce scalars to lists
|
357
362
|
# Empty lists are converted to nils
|
358
363
|
return true, nil if value.empty?
|
359
|
-
|
364
|
+
|
360
365
|
validated_items = value.map {|v| validate_value(v, config_val)}
|
361
366
|
is_valid = validated_items.all? {|sr| sr[0] }
|
362
367
|
processed_value = validated_items.map {|sr| sr[1]}
|
363
368
|
else
|
364
369
|
is_valid, processed_value = validate_value(value, config_val)
|
365
370
|
end
|
366
|
-
|
371
|
+
|
367
372
|
return [is_valid, processed_value]
|
368
373
|
end
|
369
374
|
|
370
375
|
def validate_check_parameter_values(params)
|
371
|
-
# Filter out
|
376
|
+
# Filter out parameters that match regexp keys.
|
372
377
|
# These are defined in plugins like this:
|
373
|
-
# config /foo.*/ => ...
|
378
|
+
# config /foo.*/ => ...
|
374
379
|
all_params_valid = true
|
375
380
|
|
376
381
|
params.each do |key, value|
|
@@ -378,10 +383,10 @@ module LogStash::Config::Mixin
|
|
378
383
|
next unless (config_key.is_a?(Regexp) && key =~ config_key) \
|
379
384
|
|| (config_key.is_a?(String) && key == config_key)
|
380
385
|
|
381
|
-
config_settings = @config[config_key]
|
386
|
+
config_settings = @config[config_key]
|
382
387
|
|
383
388
|
is_valid, processed_value = process_parameter_value(value, config_settings)
|
384
|
-
|
389
|
+
|
385
390
|
if is_valid
|
386
391
|
# Accept coerced value if valid
|
387
392
|
# Used for converting values in the config to proper objects.
|
@@ -393,7 +398,7 @@ module LogStash::Config::Mixin
|
|
393
398
|
:value_type => config_settings[:validate],
|
394
399
|
:note => processed_value))
|
395
400
|
end
|
396
|
-
|
401
|
+
|
397
402
|
all_params_valid &&= is_valid
|
398
403
|
|
399
404
|
break # done with this param key
|
@@ -433,7 +438,7 @@ module LogStash::Config::Mixin
|
|
433
438
|
end
|
434
439
|
result = value.first
|
435
440
|
elsif validator.is_a?(Symbol)
|
436
|
-
# TODO(sissel): Factor this out into a
|
441
|
+
# TODO(sissel): Factor this out into a coercion method?
|
437
442
|
# TODO(sissel): Document this stuff.
|
438
443
|
value = hash_or_array(value)
|
439
444
|
|
@@ -539,7 +544,7 @@ module LogStash::Config::Mixin
|
|
539
544
|
if value.size > 1
|
540
545
|
return false, "Expected uri (one value), got #{value.size} values?"
|
541
546
|
end
|
542
|
-
|
547
|
+
|
543
548
|
result = value.first.is_a?(::LogStash::Util::SafeURI) ? value.first : ::LogStash::Util::SafeURI.new(value.first)
|
544
549
|
when :path
|
545
550
|
if value.size > 1 # Only 1 value wanted
|
data/lib/logstash/environment.rb
CHANGED
@@ -25,6 +25,7 @@ module LogStash
|
|
25
25
|
Setting::Numeric.new("config.reload.interval", 3), # in seconds
|
26
26
|
Setting::Boolean.new("metric.collect", true),
|
27
27
|
Setting::String.new("pipeline.id", "main"),
|
28
|
+
Setting::Boolean.new("pipeline.system", false),
|
28
29
|
Setting::PositiveInteger.new("pipeline.workers", LogStash::Config::CpuCoreStrategy.maximum),
|
29
30
|
Setting::PositiveInteger.new("pipeline.output.workers", 1),
|
30
31
|
Setting::PositiveInteger.new("pipeline.batch.size", 125),
|
@@ -41,6 +42,7 @@ module LogStash
|
|
41
42
|
Setting::PortRange.new("http.port", 9600..9700),
|
42
43
|
Setting::String.new("http.environment", "production"),
|
43
44
|
Setting::String.new("queue.type", "memory", true, ["persisted", "memory", "memory_acked"]),
|
45
|
+
Setting::Boolean.new("queue.drain", false),
|
44
46
|
Setting::Bytes.new("queue.page_capacity", "250mb"),
|
45
47
|
Setting::Bytes.new("queue.max_bytes", "1024mb"),
|
46
48
|
Setting::Numeric.new("queue.max_events", 0), # 0 is unlimited
|
@@ -56,6 +58,15 @@ module LogStash
|
|
56
58
|
# Compute the default queue path based on `path.data`
|
57
59
|
default_queue_file_path = ::File.join(SETTINGS.get("path.data"), "queue")
|
58
60
|
SETTINGS.register Setting::WritableDirectory.new("path.queue", default_queue_file_path)
|
61
|
+
|
62
|
+
SETTINGS.on_post_process do |settings|
|
63
|
+
# If the data path is overridden but the queue path isn't recompute the queue path
|
64
|
+
# We need to do this at this stage because of the weird execution order
|
65
|
+
# our monkey-patched Clamp follows
|
66
|
+
if settings.set?("path.data") && !settings.set?("path.queue")
|
67
|
+
settings.set_value("path.queue", ::File.join(settings.get("path.data"), "queue"))
|
68
|
+
end
|
69
|
+
end
|
59
70
|
|
60
71
|
module Environment
|
61
72
|
extend self
|
@@ -0,0 +1,56 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
require "logstash/namespace"
|
4
|
+
require "logstash/json"
|
5
|
+
require "jruby_event_ext"
|
6
|
+
require "jruby_timestamp_ext"
|
7
|
+
require "logstash/timestamp"
|
8
|
+
require "logstash/string_interpolation"
|
9
|
+
|
10
|
+
# transient pipeline events for normal in-flow signaling as opposed to
|
11
|
+
# flow altering exceptions. for now having base classes is adequate and
|
12
|
+
# in the future it might be necessary to refactor using like a BaseEvent
|
13
|
+
# class to have a common interface for all pipeline events to support
|
14
|
+
# eventual queueing persistence for example, TBD.
|
15
|
+
module LogStash
|
16
|
+
class SignalEvent
|
17
|
+
def flush?; raise "abstract method"; end;
|
18
|
+
def shutdown?; raise "abstract method"; end;
|
19
|
+
end
|
20
|
+
|
21
|
+
class ShutdownEvent < SignalEvent
|
22
|
+
def flush?; false; end;
|
23
|
+
def shutdown?; true; end;
|
24
|
+
end
|
25
|
+
|
26
|
+
class FlushEvent < SignalEvent
|
27
|
+
def flush?; true; end;
|
28
|
+
def shutdown?; false; end;
|
29
|
+
end
|
30
|
+
|
31
|
+
class NoSignal < SignalEvent
|
32
|
+
def flush?; false; end;
|
33
|
+
def shutdown?; false; end;
|
34
|
+
end
|
35
|
+
|
36
|
+
FLUSH = FlushEvent.new
|
37
|
+
SHUTDOWN = ShutdownEvent.new
|
38
|
+
NO_SIGNAL = NoSignal.new
|
39
|
+
|
40
|
+
class Event
|
41
|
+
MSG_BRACKETS_METHOD_MISSING = "Direct event field references (i.e. event['field']) have been disabled in favor of using event get and set methods (e.g. event.get('field')). Please consult the Logstash 5.0 breaking changes documentation for more details.".freeze
|
42
|
+
MSG_BRACKETS_EQUALS_METHOD_MISSING = "Direct event field references (i.e. event['field'] = 'value') have been disabled in favor of using event get and set methods (e.g. event.set('field', 'value')). Please consult the Logstash 5.0 breaking changes documentation for more details.".freeze
|
43
|
+
RE_BRACKETS_METHOD = /^\[\]$/.freeze
|
44
|
+
RE_BRACKETS_EQUALS_METHOD = /^\[\]=$/.freeze
|
45
|
+
|
46
|
+
def method_missing(method_name, *arguments, &block)
|
47
|
+
if RE_BRACKETS_METHOD.match(method_name.to_s)
|
48
|
+
raise NoMethodError.new(MSG_BRACKETS_METHOD_MISSING)
|
49
|
+
end
|
50
|
+
if RE_BRACKETS_EQUALS_METHOD.match(method_name.to_s)
|
51
|
+
raise NoMethodError.new(MSG_BRACKETS_EQUALS_METHOD_MISSING)
|
52
|
+
end
|
53
|
+
super
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
@@ -1,13 +1,13 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
module LogStash
|
3
3
|
class EventDispatcher
|
4
|
-
java_import "java.util.concurrent.
|
4
|
+
java_import "java.util.concurrent.CopyOnWriteArraySet"
|
5
5
|
|
6
6
|
attr_reader :emitter
|
7
7
|
|
8
8
|
def initialize(emitter)
|
9
9
|
@emitter = emitter
|
10
|
-
@listeners =
|
10
|
+
@listeners = CopyOnWriteArraySet.new
|
11
11
|
end
|
12
12
|
|
13
13
|
# This operation is slow because we use a CopyOnWriteArrayList
|