logstash-core 5.5.3-java → 5.6.0-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (60) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash-core/logstash-core.jar +0 -0
  3. data/lib/logstash-core/version.rb +1 -1
  4. data/lib/logstash/api/commands/node.rb +2 -2
  5. data/lib/logstash/api/commands/stats.rb +2 -2
  6. data/lib/logstash/config/config_ast.rb +24 -1
  7. data/lib/logstash/config/modules_common.rb +47 -15
  8. data/lib/logstash/config/source/modules.rb +55 -0
  9. data/lib/logstash/config/string_escape.rb +27 -0
  10. data/lib/logstash/elasticsearch_client.rb +24 -2
  11. data/lib/logstash/environment.rb +2 -0
  12. data/lib/logstash/filter_delegator.rb +9 -6
  13. data/lib/logstash/instrument/collector.rb +7 -5
  14. data/lib/logstash/instrument/metric_store.rb +11 -11
  15. data/lib/logstash/instrument/namespaced_metric.rb +4 -0
  16. data/lib/logstash/instrument/namespaced_null_metric.rb +4 -0
  17. data/lib/logstash/instrument/null_metric.rb +10 -0
  18. data/lib/logstash/instrument/periodic_poller/dlq.rb +19 -0
  19. data/lib/logstash/instrument/periodic_pollers.rb +3 -1
  20. data/lib/logstash/instrument/wrapped_write_client.rb +33 -24
  21. data/lib/logstash/logging/logger.rb +26 -19
  22. data/lib/logstash/modules/{importer.rb → elasticsearch_importer.rb} +3 -3
  23. data/lib/logstash/modules/kibana_base.rb +24 -0
  24. data/lib/logstash/modules/kibana_client.rb +124 -0
  25. data/lib/logstash/modules/kibana_config.rb +29 -28
  26. data/lib/logstash/modules/kibana_dashboards.rb +36 -0
  27. data/lib/logstash/modules/kibana_importer.rb +17 -0
  28. data/lib/logstash/modules/kibana_settings.rb +40 -0
  29. data/lib/logstash/modules/logstash_config.rb +89 -17
  30. data/lib/logstash/modules/resource_base.rb +6 -5
  31. data/lib/logstash/modules/scaffold.rb +11 -3
  32. data/lib/logstash/modules/settings_merger.rb +23 -0
  33. data/lib/logstash/modules/util.rb +17 -0
  34. data/lib/logstash/output_delegator.rb +7 -5
  35. data/lib/logstash/pipeline.rb +34 -2
  36. data/lib/logstash/runner.rb +8 -13
  37. data/lib/logstash/settings.rb +20 -1
  38. data/lib/logstash/util/wrapped_acked_queue.rb +5 -24
  39. data/lib/logstash/util/wrapped_synchronous_queue.rb +14 -24
  40. data/lib/logstash/version.rb +1 -1
  41. data/locales/en.yml +11 -4
  42. data/spec/logstash/agent_spec.rb +19 -6
  43. data/spec/logstash/api/modules/node_spec.rb +2 -1
  44. data/spec/logstash/config/config_ast_spec.rb +47 -8
  45. data/spec/logstash/config/string_escape_spec.rb +24 -0
  46. data/spec/logstash/event_spec.rb +9 -0
  47. data/spec/logstash/filter_delegator_spec.rb +21 -7
  48. data/spec/logstash/instrument/periodic_poller/dlq_spec.rb +17 -0
  49. data/spec/logstash/instrument/periodic_poller/jvm_spec.rb +1 -1
  50. data/spec/logstash/legacy_ruby_event_spec.rb +4 -4
  51. data/spec/logstash/modules/logstash_config_spec.rb +56 -0
  52. data/spec/logstash/modules/scaffold_spec.rb +234 -0
  53. data/spec/logstash/output_delegator_spec.rb +15 -5
  54. data/spec/logstash/pipeline_spec.rb +76 -26
  55. data/spec/logstash/runner_spec.rb +46 -25
  56. data/spec/logstash/settings/splittable_string_array_spec.rb +51 -0
  57. data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +0 -22
  58. metadata +22 -4
  59. data/lib/logstash/modules/kibana_base_resource.rb +0 -10
  60. data/lib/logstash/program.rb +0 -14
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 35460f220b8242afa8c61bcd9f1ed826cd67d94b
4
- data.tar.gz: 06f3bf3f2408e650320fb5a01e7dd124112411ce
3
+ metadata.gz: c0f19e6dd83b6582139637f263865a2249965e84
4
+ data.tar.gz: d80ae0275d1526e640468e236b7b809fd90f9577
5
5
  SHA512:
6
- metadata.gz: a0e8fb932fac79ae5e867f337a0ad1e6c3e305128eb14ee9270b3580c7086b7c4dab3aa0466388a6b5d5c546266d18546d13c8c2ad9a59d34272e7a7f292d603
7
- data.tar.gz: 1c7022d6dcf395d708fd90cd71a0bb680af59bcbc894b1fde3b7dee5ccdd03fdefd55ca88760e55e6ff8983fdd10017c63752a4e456d7a6d208fa70e0f48e250
6
+ metadata.gz: cbdbb0a428571a5409bdadb6cb100f2b494d513a2a8a6b0b8d40a90bd3f046956a5feeee71f64eea7ea0f5a1f63584e599e9cc8325839baa541e7309de3c1def
7
+ data.tar.gz: d1c50059807f9a8fc6b87a65d713c659261356106a7097523561688e86fb9d592c069a35ae89fda59a66d0ce082f91030811296d8ac779e9557ead6c64821093
@@ -5,4 +5,4 @@
5
5
  # Note to authors: this should not include dashes because 'gem' barfs if
6
6
  # you include a dash in the version string.
7
7
 
8
- LOGSTASH_CORE_VERSION = "5.5.3"
8
+ LOGSTASH_CORE_VERSION = "5.6.0"
@@ -20,8 +20,8 @@ module LogStash
20
20
  def pipeline(pipeline_id = LogStash::SETTINGS.get("pipeline.id").to_sym)
21
21
  stats = extract_metrics(
22
22
  [:stats, :pipelines, pipeline_id, :config],
23
- :workers, :batch_size, :batch_delay, :config_reload_automatic, :config_reload_interval
24
- )
23
+ :workers, :batch_size, :batch_delay, :config_reload_automatic, :config_reload_interval, :dead_letter_queue_enabled, :dead_letter_queue_path
24
+ ).reject{|_, v|v.nil?}
25
25
  stats.merge(:id => pipeline_id)
26
26
  end
27
27
 
@@ -107,8 +107,8 @@ module LogStash
107
107
  },
108
108
  :reloads => stats[:reloads],
109
109
  :queue => stats[:queue]
110
- }
111
- end
110
+ }.merge(stats[:dlq] ? {:dead_letter_queue => stats[:dlq]} : {})
111
+ end
112
112
  end # module PluginsStats
113
113
  end
114
114
  end
@@ -1,5 +1,6 @@
1
1
  # encoding: utf-8
2
2
  require 'logstash/errors'
3
+ require "logstash/config/string_escape"
3
4
  require "treetop"
4
5
 
5
6
  class Treetop::Runtime::SyntaxNode
@@ -9,6 +10,18 @@ class Treetop::Runtime::SyntaxNode
9
10
  return elements.collect(&:compile).reject(&:empty?).join("")
10
11
  end
11
12
 
13
+ def get_meta(key)
14
+ @ast_metadata ||= {}
15
+ return @ast_metadata[key] if @ast_metadata[key]
16
+ return self.parent.get_meta(key) if self.parent
17
+ nil
18
+ end
19
+
20
+ def set_meta(key, value)
21
+ @ast_metadata ||= {}
22
+ @ast_metadata[key] = value
23
+ end
24
+
12
25
  # Traverse the syntax tree recursively.
13
26
  # The order should respect the order of the configuration file as it is read
14
27
  # and written by humans (and the order in which it is parsed).
@@ -59,6 +72,7 @@ end
59
72
 
60
73
 
61
74
  module LogStash; module Config; module AST
75
+ PROCESS_ESCAPE_SEQUENCES = :process_escape_sequences
62
76
 
63
77
  def self.deferred_conditionals=(val)
64
78
  @deferred_conditionals = val
@@ -91,6 +105,11 @@ module LogStash; module Config; module AST
91
105
  end
92
106
 
93
107
  class Config < Node
108
+ def process_escape_sequences=(val)
109
+ set_meta(PROCESS_ESCAPE_SEQUENCES, val)
110
+ end
111
+
112
+
94
113
  def compile
95
114
  LogStash::Config::AST.deferred_conditionals = []
96
115
  LogStash::Config::AST.deferred_conditionals_index = 0
@@ -333,7 +352,11 @@ module LogStash; module Config; module AST
333
352
  end
334
353
  class String < Value
335
354
  def compile
336
- return Unicode.wrap(text_value[1...-1])
355
+ if get_meta(PROCESS_ESCAPE_SEQUENCES)
356
+ Unicode.wrap(LogStash::Config::StringEscape.process_escapes(text_value[1...-1]))
357
+ else
358
+ Unicode.wrap(text_value[1...-1])
359
+ end
337
360
  end
338
361
  end
339
362
  class RegExp < Value
@@ -1,18 +1,33 @@
1
1
  # encoding: utf-8
2
2
  require "logstash/util/loggable"
3
3
  require "logstash/elasticsearch_client"
4
- require "logstash/modules/importer"
4
+ require "logstash/modules/kibana_client"
5
+ require "logstash/modules/elasticsearch_importer"
6
+ require "logstash/modules/kibana_importer"
7
+ require "logstash/modules/settings_merger"
5
8
  require "logstash/errors"
6
9
 
7
10
  module LogStash module Config
8
11
  class ModulesCommon # extracted here for bwc with 5.x
9
12
  include LogStash::Util::Loggable
10
13
 
14
+ MODULES_MAX_PIPELINES = 1
15
+
11
16
  def self.pipeline_configs(settings)
12
17
  pipelines = []
13
18
  plugin_modules = LogStash::PLUGIN_REGISTRY.plugins_with_type(:modules)
14
19
 
15
- modules_array = settings.get("modules.cli").empty? ? settings.get("modules") : settings.get("modules.cli")
20
+ cli_settings = settings.get("modules.cli")
21
+ yml_settings = settings.get("modules")
22
+
23
+ modules_array = if !(cli_settings.empty? && yml_settings.empty?)
24
+ LogStash::Modules::SettingsMerger.merge(cli_settings, yml_settings)
25
+ elsif cli_settings.empty?
26
+ yml_settings
27
+ else
28
+ cli_settings
29
+ end
30
+
16
31
  if modules_array.empty?
17
32
  # no specifed modules
18
33
  return pipelines
@@ -20,6 +35,11 @@ module LogStash module Config
20
35
  logger.debug("Specified modules", :modules_array => modules_array.to_s)
21
36
 
22
37
  module_names = modules_array.collect {|module_hash| module_hash["name"]}
38
+ if module_names.size > MODULES_MAX_PIPELINES
39
+ error_message = I18n.t("logstash.modules.configuration.modules-too-many-specified", :max => MODULES_MAX_PIPELINES, :specified_modules => module_names.join(', '))
40
+ raise LogStash::ConfigLoadingError, error_message
41
+ end
42
+
23
43
  if module_names.length > module_names.uniq.length
24
44
  duplicate_modules = module_names.group_by(&:to_s).select { |_,v| v.size > 1 }.keys
25
45
  raise LogStash::ConfigLoadingError, I18n.t("logstash.modules.configuration.modules-must-be-unique", :duplicate_modules => duplicate_modules)
@@ -36,29 +56,41 @@ module LogStash module Config
36
56
  specified_and_available_names.each do |module_name|
37
57
  connect_fail_args = {}
38
58
  begin
59
+ module_settings = settings.clone
60
+
39
61
  module_hash = modules_array.find {|m| m["name"] == module_name}
40
62
  current_module = plugin_modules.find { |allmodules| allmodules.module_name == module_name }
41
63
 
42
64
  alt_name = "module-#{module_name}"
43
65
  pipeline_id = alt_name
44
-
66
+ module_settings.set("pipeline.id", pipeline_id)
45
67
  current_module.with_settings(module_hash)
46
- esclient = LogStash::ElasticsearchClient.build(module_hash)
47
68
  config_test = settings.get("config.test_and_exit")
48
- if esclient.can_connect? || config_test
49
- if !config_test
50
- current_module.import(LogStash::Modules::Importer.new(esclient))
69
+ modul_setup = settings.get("modules_setup")
70
+ # Only import data if it's not a config test and --setup is true
71
+ if !config_test && modul_setup
72
+ esclient = LogStash::ElasticsearchClient.build(module_hash)
73
+ kbnclient = LogStash::Modules::KibanaClient.new(module_hash)
74
+ esconnected = esclient.can_connect?
75
+ kbnconnected = kbnclient.can_connect?
76
+ if esconnected && kbnconnected
77
+ current_module.add_kibana_version(kbnclient.version_parts)
78
+ current_module.import(
79
+ LogStash::Modules::ElasticsearchImporter.new(esclient),
80
+ LogStash::Modules::KibanaImporter.new(kbnclient)
81
+ )
82
+ else
83
+ connect_fail_args[:module_name] = module_name
84
+ connect_fail_args[:elasticsearch_hosts] = esclient.host_settings
85
+ connect_fail_args[:kibana_hosts] = kbnclient.host_settings
51
86
  end
52
-
53
- config_string = current_module.config_string
54
-
55
- pipelines << {"pipeline_id" => pipeline_id, "alt_name" => alt_name, "config_string" => config_string, "settings" => settings}
56
- else
57
- connect_fail_args[:module_name] = module_name
58
- connect_fail_args[:hosts] = esclient.host_settings
59
87
  end
88
+ config_string = current_module.config_string
89
+ pipelines << {"pipeline_id" => pipeline_id, "alt_name" => alt_name, "config_string" => config_string, "settings" => module_settings}
60
90
  rescue => e
61
- raise LogStash::ConfigLoadingError, I18n.t("logstash.modules.configuration.parse-failed", :error => e.message)
91
+ new_error = LogStash::ConfigLoadingError.new(I18n.t("logstash.modules.configuration.parse-failed", :error => e.message))
92
+ new_error.set_backtrace(e.backtrace)
93
+ raise new_error
62
94
  end
63
95
 
64
96
  if !connect_fail_args.empty?
@@ -0,0 +1,55 @@
1
+ # encoding: utf-8
2
+ require "logstash/config/source/base"
3
+ require "logstash/config/modules_common"
4
+ require "logstash/config/pipeline_config"
5
+ require "logstash/util/loggable"
6
+ require "logstash/errors"
7
+
8
+ module LogStash module Config module Source
9
+ class Modules < Base
10
+ include LogStash::Util::Loggable
11
+ def pipeline_configs
12
+ if config_conflict? # double check
13
+ raise ConfigurationError, @conflict_messages.join(", ")
14
+ end
15
+
16
+ pipelines = LogStash::Config::ModulesCommon.pipeline_configs(@settings)
17
+ pipelines.map do |hash|
18
+ PipelineConfig.new(self, hash["pipeline_id"].to_sym,
19
+ org.logstash.common.SourceWithMetadata.new("module", hash["alt_name"], 0, 0, hash["config_string"]),
20
+ hash["settings"])
21
+ end
22
+ end
23
+
24
+ def match?
25
+ # see basic settings predicates and getters defined in the base class
26
+ (modules_cli? || modules?) && !(config_string? || config_path?) && !automatic_reload_with_modules?
27
+ end
28
+
29
+ def config_conflict?
30
+ @conflict_messages.clear
31
+ # Make note that if modules are configured in both cli and logstash.yml that cli module
32
+ # settings will overwrite the logstash.yml modules settings
33
+ if modules_cli? && modules?
34
+ logger.info(I18n.t("logstash.runner.cli-module-override"))
35
+ end
36
+
37
+ if automatic_reload_with_modules?
38
+ @conflict_messages << I18n.t("logstash.runner.reload-with-modules")
39
+ end
40
+
41
+ # Check if config (-f or -e) and modules are configured
42
+ if (modules_cli? || modules?) && (config_string? || config_path?)
43
+ @conflict_messages << I18n.t("logstash.runner.config-module-exclusive")
44
+ end
45
+
46
+ @conflict_messages.any?
47
+ end
48
+
49
+ private
50
+
51
+ def automatic_reload_with_modules?
52
+ (modules_cli? || modules?) && config_reload_automatic?
53
+ end
54
+ end
55
+ end end end
@@ -0,0 +1,27 @@
1
+
2
+
3
+ module LogStash; module Config; module StringEscape
4
+ class << self
5
+ def process_escapes(input)
6
+ input.gsub(/\\./) do |value|
7
+ process(value)
8
+ end
9
+ end
10
+
11
+ private
12
+ def process(value)
13
+ case value[1]
14
+ when '"', "'", "\\"
15
+ value[1]
16
+ when "n"
17
+ "\n"
18
+ when "r"
19
+ "\r"
20
+ when "t"
21
+ "\t"
22
+ else
23
+ value
24
+ end
25
+ end
26
+ end
27
+ end end end
@@ -26,6 +26,24 @@ module LogStash class ElasticsearchClient
26
26
  @settings = settings
27
27
  @logger = logger
28
28
  @client_args = client_args
29
+
30
+ ssl_options = {}
31
+
32
+ if @settings["var.elasticsearch.ssl.enabled"] == "true"
33
+ ssl_options[:verify] = @settings.fetch("var.elasticsearch.ssl.verification_mode", true)
34
+ ssl_options[:ca_file] = @settings.fetch("var.elasticsearch.ssl.certificate_authority", nil)
35
+ ssl_options[:client_cert] = @settings.fetch("var.elasticsearch.ssl.certificate", nil)
36
+ ssl_options[:client_key] = @settings.fetch("var.elasticsearch.ssl.key", nil)
37
+ end
38
+
39
+ @client_args[:ssl] = ssl_options
40
+
41
+ username = @settings["var.elasticsearch.username"]
42
+ password = @settings["var.elasticsearch.password"]
43
+ if username
44
+ @client_args[:transport_options] = { :headers => { "Authorization" => 'Basic ' + Base64.encode64( "#{username}:#{password}" ).chomp } }
45
+ end
46
+
29
47
  @client = Elasticsearch::Client.new(@client_args)
30
48
  end
31
49
 
@@ -90,7 +108,11 @@ module LogStash class ElasticsearchClient
90
108
  end
91
109
 
92
110
  def unpack_hosts
93
- @settings.fetch("var.output.elasticsearch.hosts", "localhost:9200").split(',').map(&:strip)
111
+ setting = @settings.fetch("var.elasticsearch.hosts", "localhost:9200")
112
+ if setting.is_a?(String)
113
+ return setting.split(',').map(&:strip)
114
+ end
115
+ setting
94
116
  end
95
117
  end
96
118
 
@@ -117,4 +139,4 @@ module LogStash class ElasticsearchClient
117
139
  def host_settings
118
140
  @client.host_settings
119
141
  end
120
- end end # class LogStash::ModulesImporter
142
+ end end
@@ -22,8 +22,10 @@ module LogStash
22
22
  Setting::NullableString.new("config.string", nil, false),
23
23
  Setting.new("modules.cli", Array, []),
24
24
  Setting.new("modules", Array, []),
25
+ Setting::Boolean.new("modules_setup", false),
25
26
  Setting::Boolean.new("config.test_and_exit", false),
26
27
  Setting::Boolean.new("config.reload.automatic", false),
28
+ Setting::Boolean.new("config.support_escapes", false),
27
29
  Setting::Numeric.new("config.reload.interval", 3), # in seconds
28
30
  Setting::Boolean.new("metric.collect", true),
29
31
  Setting::String.new("pipeline.id", "main"),
@@ -26,6 +26,9 @@ module LogStash
26
26
  @filter.execution_context = execution_context
27
27
 
28
28
  @metric_events = namespaced_metric.namespace(:events)
29
+ @metric_events_in = @metric_events.counter(:in)
30
+ @metric_events_out = @metric_events.counter(:out)
31
+ @metric_events_time = @metric_events.counter(:duration_in_millis)
29
32
  namespaced_metric.gauge(:name, config_name)
30
33
 
31
34
  # Not all the filters will do bufferings
@@ -37,19 +40,19 @@ module LogStash
37
40
  end
38
41
 
39
42
  def multi_filter(events)
40
- @metric_events.increment(:in, events.size)
43
+ @metric_events_in.increment(events.size)
41
44
 
42
- clock = @metric_events.time(:duration_in_millis)
45
+ start_time = java.lang.System.current_time_millis
43
46
  new_events = @filter.multi_filter(events)
44
- clock.stop
47
+ @metric_events_time.increment(java.lang.System.current_time_millis - start_time)
45
48
 
46
49
  # There is no guarantee in the context of filter
47
50
  # that EVENTS_INT == EVENTS_OUT, see the aggregates and
48
51
  # the split filter
49
52
  c = new_events.count { |event| !event.cancelled? }
50
- @metric_events.increment(:out, c) if c > 0
51
53
 
52
- return new_events
54
+ @metric_events_out.increment(c) if c > 0
55
+ new_events
53
56
  end
54
57
 
55
58
  private
@@ -61,7 +64,7 @@ module LogStash
61
64
 
62
65
  # Filter plugins that does buffering or spooling of events like the
63
66
  # `Logstash-filter-aggregates` can return `NIL` and will flush on the next flush ticks.
64
- @metric_events.increment(:out, new_events.size) if new_events && new_events.size > 0
67
+ @metric_events_out.increment(new_events.size) if new_events && new_events.size > 0
65
68
  new_events
66
69
  end
67
70
  end
@@ -33,11 +33,7 @@ module LogStash module Instrument
33
33
  #
34
34
  def push(namespaces_path, key, type, *metric_type_params)
35
35
  begin
36
- metric = @metric_store.fetch_or_store(namespaces_path, key) do
37
- LogStash::Instrument::MetricType.create(type, namespaces_path, key)
38
- end
39
-
40
- metric.execute(*metric_type_params)
36
+ get(namespaces_path, key, type).execute(*metric_type_params)
41
37
  rescue MetricStore::NamespacesExpectedError => e
42
38
  logger.error("Collector: Cannot record metric", :exception => e)
43
39
  rescue NameError => e
@@ -51,6 +47,12 @@ module LogStash module Instrument
51
47
  end
52
48
  end
53
49
 
50
+ def get(namespaces_path, key, type)
51
+ @metric_store.fetch_or_store(namespaces_path, key) do
52
+ LogStash::Instrument::MetricType.create(type, namespaces_path, key)
53
+ end
54
+ end
55
+
54
56
  # Snapshot the current Metric Store and return it immediately,
55
57
  # This is useful if you want to get access to the current metric store without
56
58
  # waiting for a periodic call.
@@ -41,26 +41,26 @@ module LogStash module Instrument
41
41
  # @param [Symbol] The metric key
42
42
  # @return [Object] Return the new_value of the retrieve object in the tree
43
43
  def fetch_or_store(namespaces, key, default_value = nil)
44
- provided_value = block_given? ? yield(key) : default_value
45
44
 
46
45
  # We first check in the `@fast_lookup` store to see if we have already see that metrics before,
47
46
  # This give us a `o(1)` access, which is faster than searching through the structured
48
47
  # data store (Which is a `o(n)` operation where `n` is the number of element in the namespace and
49
- # the value of the key). If the metric is already present in the `@fast_lookup`, the call to
50
- # `#put_if_absent` will return the value. This value is send back directly to the caller.
48
+ # the value of the key). If the metric is already present in the `@fast_lookup`, then that value is sent
49
+ # back directly to the caller.
51
50
  #
52
- # BUT. If the value is not present in the `@fast_lookup` the value will be inserted and
53
- # `#puf_if_absent` will return nil. With this returned value of nil we assume that we don't
51
+ # BUT. If the value is not present in the `@fast_lookup` the value will be inserted and we assume that we don't
54
52
  # have it in the `@metric_store` for structured search so we add it there too.
55
- if found_value = @fast_lookup.put_if_absent(namespaces.dup << key, provided_value)
56
- return found_value
57
- else
53
+
54
+ value = @fast_lookup.get(namespaces.dup << key)
55
+ if value.nil?
56
+ value = block_given? ? yield(key) : default_value
57
+ @fast_lookup.put(namespaces.dup << key, value)
58
58
  @structured_lookup_mutex.synchronize do
59
- # If we cannot find the value this mean we need to save it in the store.
60
- fetch_or_store_namespaces(namespaces).fetch_or_store(key, provided_value)
59
+ # If we cannot find the value this mean we need to save it in the store.
60
+ fetch_or_store_namespaces(namespaces).fetch_or_store(key, value)
61
61
  end
62
- return provided_value
63
62
  end
63
+ return value;
64
64
  end
65
65
 
66
66
  # This method allow to retrieve values for a specific path,