logstash-core 5.5.3-java → 5.6.0-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (60) hide show
  1. checksums.yaml +4 -4
  2. data/lib/logstash-core/logstash-core.jar +0 -0
  3. data/lib/logstash-core/version.rb +1 -1
  4. data/lib/logstash/api/commands/node.rb +2 -2
  5. data/lib/logstash/api/commands/stats.rb +2 -2
  6. data/lib/logstash/config/config_ast.rb +24 -1
  7. data/lib/logstash/config/modules_common.rb +47 -15
  8. data/lib/logstash/config/source/modules.rb +55 -0
  9. data/lib/logstash/config/string_escape.rb +27 -0
  10. data/lib/logstash/elasticsearch_client.rb +24 -2
  11. data/lib/logstash/environment.rb +2 -0
  12. data/lib/logstash/filter_delegator.rb +9 -6
  13. data/lib/logstash/instrument/collector.rb +7 -5
  14. data/lib/logstash/instrument/metric_store.rb +11 -11
  15. data/lib/logstash/instrument/namespaced_metric.rb +4 -0
  16. data/lib/logstash/instrument/namespaced_null_metric.rb +4 -0
  17. data/lib/logstash/instrument/null_metric.rb +10 -0
  18. data/lib/logstash/instrument/periodic_poller/dlq.rb +19 -0
  19. data/lib/logstash/instrument/periodic_pollers.rb +3 -1
  20. data/lib/logstash/instrument/wrapped_write_client.rb +33 -24
  21. data/lib/logstash/logging/logger.rb +26 -19
  22. data/lib/logstash/modules/{importer.rb → elasticsearch_importer.rb} +3 -3
  23. data/lib/logstash/modules/kibana_base.rb +24 -0
  24. data/lib/logstash/modules/kibana_client.rb +124 -0
  25. data/lib/logstash/modules/kibana_config.rb +29 -28
  26. data/lib/logstash/modules/kibana_dashboards.rb +36 -0
  27. data/lib/logstash/modules/kibana_importer.rb +17 -0
  28. data/lib/logstash/modules/kibana_settings.rb +40 -0
  29. data/lib/logstash/modules/logstash_config.rb +89 -17
  30. data/lib/logstash/modules/resource_base.rb +6 -5
  31. data/lib/logstash/modules/scaffold.rb +11 -3
  32. data/lib/logstash/modules/settings_merger.rb +23 -0
  33. data/lib/logstash/modules/util.rb +17 -0
  34. data/lib/logstash/output_delegator.rb +7 -5
  35. data/lib/logstash/pipeline.rb +34 -2
  36. data/lib/logstash/runner.rb +8 -13
  37. data/lib/logstash/settings.rb +20 -1
  38. data/lib/logstash/util/wrapped_acked_queue.rb +5 -24
  39. data/lib/logstash/util/wrapped_synchronous_queue.rb +14 -24
  40. data/lib/logstash/version.rb +1 -1
  41. data/locales/en.yml +11 -4
  42. data/spec/logstash/agent_spec.rb +19 -6
  43. data/spec/logstash/api/modules/node_spec.rb +2 -1
  44. data/spec/logstash/config/config_ast_spec.rb +47 -8
  45. data/spec/logstash/config/string_escape_spec.rb +24 -0
  46. data/spec/logstash/event_spec.rb +9 -0
  47. data/spec/logstash/filter_delegator_spec.rb +21 -7
  48. data/spec/logstash/instrument/periodic_poller/dlq_spec.rb +17 -0
  49. data/spec/logstash/instrument/periodic_poller/jvm_spec.rb +1 -1
  50. data/spec/logstash/legacy_ruby_event_spec.rb +4 -4
  51. data/spec/logstash/modules/logstash_config_spec.rb +56 -0
  52. data/spec/logstash/modules/scaffold_spec.rb +234 -0
  53. data/spec/logstash/output_delegator_spec.rb +15 -5
  54. data/spec/logstash/pipeline_spec.rb +76 -26
  55. data/spec/logstash/runner_spec.rb +46 -25
  56. data/spec/logstash/settings/splittable_string_array_spec.rb +51 -0
  57. data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +0 -22
  58. metadata +22 -4
  59. data/lib/logstash/modules/kibana_base_resource.rb +0 -10
  60. data/lib/logstash/program.rb +0 -14
@@ -43,6 +43,10 @@ module LogStash module Instrument
43
43
  def collector
44
44
  @metric.collector
45
45
  end
46
+
47
+ def counter(key)
48
+ collector.get(@namespace_name, key, :counter)
49
+ end
46
50
 
47
51
  def namespace(name)
48
52
  NamespacedMetric.new(metric, namespace_name + Array(name))
@@ -44,6 +44,10 @@ module LogStash module Instrument
44
44
  @metric.collector
45
45
  end
46
46
 
47
+ def counter(_)
48
+ ::LogStash::Instrument::NullMetric::NullGauge
49
+ end
50
+
47
51
  def namespace(name)
48
52
  NamespacedNullMetric.new(metric, namespace_name + Array(name))
49
53
  end
@@ -39,6 +39,10 @@ module LogStash module Instrument
39
39
  end
40
40
  end
41
41
 
42
+ def counter(_)
43
+ NullGauge
44
+ end
45
+
42
46
  def namespace(name)
43
47
  raise MetricNoNamespaceProvided if name.nil? || name.empty?
44
48
  NamespacedNullMetric.new(self, name)
@@ -49,6 +53,12 @@ module LogStash module Instrument
49
53
  end
50
54
 
51
55
  private
56
+
57
+ class NullGauge
58
+ def self.increment(_)
59
+ end
60
+ end
61
+
52
62
  # Null implementation of the internal timer class
53
63
  #
54
64
  # @see LogStash::Instrument::TimedExecution`
@@ -0,0 +1,19 @@
1
+ # encoding: utf-8
2
+ require 'logstash/instrument/periodic_poller/base'
3
+
4
+ module LogStash module Instrument module PeriodicPoller
5
+ class DeadLetterQueue < Base
6
+ def initialize(metric, agent, options = {})
7
+ super(metric, options)
8
+ @metric = metric
9
+ @agent = agent
10
+ end
11
+
12
+ def collect
13
+ _, pipeline = @agent.running_pipelines.first
14
+ unless pipeline.nil?
15
+ pipeline.collect_dlq_stats
16
+ end
17
+ end
18
+ end
19
+ end end end
@@ -1,4 +1,5 @@
1
1
  # encoding: utf-8
2
+ require "logstash/instrument/periodic_poller/dlq"
2
3
  require "logstash/instrument/periodic_poller/os"
3
4
  require "logstash/instrument/periodic_poller/jvm"
4
5
  require "logstash/instrument/periodic_poller/pq"
@@ -14,7 +15,8 @@ module LogStash module Instrument
14
15
  @metric = metric
15
16
  @periodic_pollers = [PeriodicPoller::Os.new(metric),
16
17
  PeriodicPoller::JVM.new(metric),
17
- PeriodicPoller::PersistentQueue.new(metric, queue_type, pipelines)]
18
+ PeriodicPoller::PersistentQueue.new(metric, queue_type, pipelines),
19
+ PeriodicPoller::DeadLetterQueue.new(metric, pipelines)]
18
20
  end
19
21
 
20
22
  def start
@@ -10,7 +10,12 @@ module LogStash module Instrument
10
10
  @events_metrics = metric.namespace([:stats, :events])
11
11
  @pipeline_metrics = metric.namespace([:stats, :pipelines, pipeline_id, :events])
12
12
  @plugin_events_metrics = metric.namespace([:stats, :pipelines, pipeline_id, :plugins, plugin_type, plugin.id.to_sym, :events])
13
-
13
+ @events_metrics_counter = @events_metrics.counter(:in)
14
+ @events_metrics_time = @events_metrics.counter(:queue_push_duration_in_millis)
15
+ @pipeline_metrics_counter = @pipeline_metrics.counter(:in)
16
+ @pipeline_metrics_time = @pipeline_metrics.counter(:queue_push_duration_in_millis)
17
+ @plugin_events_metrics_counter = @plugin_events_metrics.counter(:out)
18
+ @plugin_events_metrics_time = @plugin_events_metrics.counter(:queue_push_duration_in_millis)
14
19
  define_initial_metrics_values
15
20
  end
16
21
 
@@ -19,41 +24,45 @@ module LogStash module Instrument
19
24
  end
20
25
 
21
26
  def push(event)
22
- record_metric { @write_client.push(event) }
27
+ increment_counters(1)
28
+ start_time = java.lang.System.current_time_millis
29
+ result = @write_client.push(event)
30
+ report_execution_time(start_time)
31
+ result
23
32
  end
33
+
24
34
  alias_method(:<<, :push)
25
35
 
26
36
  def push_batch(batch)
27
- record_metric(batch.size) { @write_client.push_batch(batch) }
37
+ increment_counters(batch.size)
38
+ start_time = java.lang.System.current_time_millis
39
+ result = @write_client.push_batch(batch)
40
+ report_execution_time(start_time)
41
+ result
28
42
  end
29
43
 
30
44
  private
31
- def record_metric(size = 1)
32
- @events_metrics.increment(:in, size)
33
- @pipeline_metrics.increment(:in, size)
34
- @plugin_events_metrics.increment(:out, size)
35
-
36
- clock = @events_metrics.time(:queue_push_duration_in_millis)
37
45
 
38
- result = yield
39
-
40
- # Reuse the same values for all the endpoints to make sure we don't have skew in times.
41
- execution_time = clock.stop
42
-
43
- @pipeline_metrics.report_time(:queue_push_duration_in_millis, execution_time)
44
- @plugin_events_metrics.report_time(:queue_push_duration_in_millis, execution_time)
46
+ def increment_counters(size)
47
+ @events_metrics_counter.increment(size)
48
+ @pipeline_metrics_counter.increment(size)
49
+ @plugin_events_metrics_counter.increment(size)
50
+ end
45
51
 
46
- result
52
+ def report_execution_time(start_time)
53
+ execution_time = java.lang.System.current_time_millis - start_time
54
+ @events_metrics_time.increment(execution_time)
55
+ @pipeline_metrics_time.increment(execution_time)
56
+ @plugin_events_metrics_time.increment(execution_time)
47
57
  end
48
58
 
49
59
  def define_initial_metrics_values
50
- @events_metrics.increment(:in, 0)
51
- @pipeline_metrics.increment(:in, 0)
52
- @plugin_events_metrics.increment(:out, 0)
53
-
54
- @events_metrics.report_time(:queue_push_duration_in_millis, 0)
55
- @pipeline_metrics.report_time(:queue_push_duration_in_millis, 0)
56
- @plugin_events_metrics.report_time(:queue_push_duration_in_millis, 0)
60
+ @events_metrics_counter.increment(0)
61
+ @pipeline_metrics_counter.increment(0)
62
+ @plugin_events_metrics_counter.increment(0)
63
+ @events_metrics_time.increment(0)
64
+ @pipeline_metrics_time.increment(0)
65
+ @plugin_events_metrics_time.increment(0)
57
66
  end
58
67
  end
59
68
  end end
@@ -8,10 +8,12 @@ module LogStash
8
8
  java_import org.apache.logging.log4j.core.config.Configurator
9
9
  java_import org.apache.logging.log4j.core.config.DefaultConfiguration
10
10
  java_import org.apache.logging.log4j.core.config.LoggerConfig
11
+ java_import org.logstash.log.LogstashLoggerContextFactory
12
+ java_import org.apache.logging.log4j.core.LoggerContext
13
+ java_import java.net.URI
11
14
 
12
15
  class Logger
13
16
  @@config_mutex = Mutex.new
14
- @@logging_context = nil
15
17
 
16
18
  def initialize(name)
17
19
  @logger = LogManager.getLogger(name)
@@ -71,45 +73,50 @@ module LogStash
71
73
  raise ArgumentError, "invalid level[#{level}] for logger[#{path}]"
72
74
  end
73
75
 
74
- def self.initialize(config_location)
76
+ def self.reconfigure(config_location)
75
77
  @@config_mutex.synchronize do
76
- if @@logging_context.nil?
77
- file_path = URI(config_location).path
78
- if ::File.exists?(file_path)
79
- logs_location = java.lang.System.getProperty("ls.logs")
80
- puts "Sending Logstash's logs to #{logs_location} which is now configured via log4j2.properties"
81
- @@logging_context = Configurator.initialize(nil, config_location)
82
- else
83
- # fall back to default config
84
- puts "Could not find log4j2 configuration at path #{file_path}. Using default config which logs to console"
85
- @@logging_context = Configurator.initialize(DefaultConfiguration.new)
86
- end
78
+ config_location_uri = URI.create(config_location)
79
+ file_path = config_location_uri.path
80
+ if ::File.exists?(file_path)
81
+ logs_location = java.lang.System.getProperty("ls.logs")
82
+ puts "Sending Logstash's logs to #{logs_location} which is now configured via log4j2.properties"
83
+ #reconfigure the default context to use our log4j2.properties file
84
+ get_logging_context.setConfigLocation(URI.create(config_location))
85
+ #ensure everyone agrees which context to use for the LogManager
86
+ context_factory = LogstashLoggerContextFactory.new(get_logging_context)
87
+ LogManager.setFactory(context_factory)
88
+ else
89
+ # fall back to default config
90
+ puts "Could not find log4j2 configuration at path #{file_path}. Using default config which logs errors to the console"
87
91
  end
88
92
  end
89
93
  end
90
94
 
95
+ # until dev_utils/rspec/spec_helper is changed, we need to have both methods
96
+ singleton_class.send(:alias_method, :initialize, :reconfigure)
97
+
91
98
  def self.get_logging_context
92
- return @@logging_context
99
+ return LoggerContext.getContext(false)
93
100
  end
94
101
 
95
- # Clone of org.apache.logging.log4j.core.config.Configurator.setLevel(), but using initialized @@logging_context
102
+ # Clone of org.apache.logging.log4j.core.config.Configurator.setLevel(), but ensure the proper context is used
96
103
  def self.set_level(_level, path)
97
- configuration = @@logging_context.getConfiguration()
104
+ configuration = get_logging_context.getConfiguration()
98
105
  level = Level.valueOf(_level)
99
106
  if path.nil? || path.strip.empty?
100
107
  root_logger = configuration.getRootLogger()
101
108
  if root_logger.getLevel() != level
102
109
  root_logger.setLevel(level)
103
- @@logging_context.updateLoggers()
110
+ get_logging_context.updateLoggers()
104
111
  end
105
112
  else
106
113
  package_logger = configuration.getLoggerConfig(path)
107
114
  if package_logger.name != path #no package logger found
108
115
  configuration.addLogger(path, LoggerConfig.new(path, level, true))
109
- @@logging_context.updateLoggers()
116
+ get_logging_context.updateLoggers()
110
117
  elsif package_logger.getLevel() != level
111
118
  package_logger.setLevel(level)
112
- @@logging_context.updateLoggers()
119
+ get_logging_context.updateLoggers()
113
120
  end
114
121
  end
115
122
  end
@@ -2,7 +2,7 @@
2
2
  require "logstash/namespace"
3
3
  require "logstash/logging"
4
4
 
5
- module LogStash module Modules class Importer
5
+ module LogStash module Modules class ElasticsearchImporter
6
6
  include LogStash::Util::Loggable
7
7
 
8
8
  def initialize(client)
@@ -11,7 +11,7 @@ module LogStash module Modules class Importer
11
11
 
12
12
  def put(resource, overwrite = true)
13
13
  path = resource.import_path
14
- logger.info("Attempting PUT", :url_path => path, :file_path => resource.content_path)
14
+ logger.debug("Attempting PUT", :url_path => path, :file_path => resource.content_path)
15
15
  if !overwrite && content_exists?(path)
16
16
  logger.debug("Found existing Elasticsearch resource.", :resource => path)
17
17
  return
@@ -31,7 +31,7 @@ module LogStash module Modules class Importer
31
31
 
32
32
  def content_exists?(path)
33
33
  response = @client.head(path)
34
- response.status >= 200 && response.status <= 299
34
+ response.status >= 200 && response.status < 300
35
35
  end
36
36
 
37
37
  end end end # class LogStash::Modules::Importer
@@ -0,0 +1,24 @@
1
+ # encoding: utf-8
2
+ require "logstash/namespace"
3
+ require "logstash/json"
4
+
5
+ module LogStash module Modules class KibanaBase
6
+ attr_reader :import_path, :content
7
+
8
+ def initialize(import_path, content)
9
+ @import_path, @content = import_path, content
10
+ end
11
+
12
+ def import(client)
13
+ raise NotImplementedError, "#{self.class.name} needs to implement `#import`"
14
+ end
15
+
16
+ def to_s
17
+ import_path
18
+ end
19
+
20
+ def content_as_object
21
+ return content unless content.is_a?(String)
22
+ LogStash::Json.load(content) rescue nil
23
+ end
24
+ end end end
@@ -0,0 +1,124 @@
1
+ # encoding: utf-8
2
+ require "logstash/namespace"
3
+ require "logstash/logging"
4
+ require "logstash/json"
5
+ require "manticore/client"
6
+
7
+ module LogStash module Modules class KibanaClient
8
+ include LogStash::Util::Loggable
9
+
10
+ class Response
11
+ # to create a custom response with body as an Object (Hash or Array)
12
+ attr_reader :status, :body, :headers
13
+ def initialize(status, body, headers={})
14
+ @status, @body, @headers = status, body, headers
15
+ @body = body.is_a?(String) ? LogStash::Json.load(body) : body
16
+ end
17
+
18
+ def succeeded?
19
+ @status >= 200 && @status < 300
20
+ end
21
+
22
+ def failed?
23
+ !succeeded?
24
+ end
25
+ end
26
+
27
+ attr_reader :version
28
+
29
+ def initialize(settings)
30
+ @settings = settings
31
+
32
+ client_options = {
33
+ request_timeout: 5,
34
+ connect_timeout: 5,
35
+ socket_timeout: 5,
36
+ pool_max: 10,
37
+ pool_max_per_route: 2
38
+ }
39
+
40
+ ssl_options = {}
41
+
42
+ if @settings["var.kibana.ssl.enabled"] == "true"
43
+ ssl_options[:verify] = @settings.fetch("var.kibana.ssl.verification_mode", "strict").to_sym
44
+ ssl_options[:ca_file] = @settings.fetch("var.kibana.ssl.certificate_authority", nil)
45
+ ssl_options[:client_cert] = @settings.fetch("var.kibana.ssl.certificate", nil)
46
+ ssl_options[:client_key] = @settings.fetch("var.kibana.ssl.key", nil)
47
+ end
48
+
49
+ client_options[:ssl] = ssl_options
50
+
51
+ @client = Manticore::Client.new(client_options)
52
+ @host = @settings.fetch("var.kibana.host", "localhost:5601")
53
+ username = @settings["var.kibana.username"]
54
+ password = @settings["var.kibana.password"]
55
+
56
+ @scheme = @settings.fetch("var.kibana.scheme", "http")
57
+ @http_options = {:headers => {'Content-Type' => 'application/json'}}
58
+ if username
59
+ @http_options[:headers]['Authorization'] = 'Basic ' + Base64.encode64( "#{username}:#{password}" ).chomp
60
+ end
61
+
62
+ # e.g. {"name":"Elastics-MacBook-Pro.local","version":{"number":"6.0.0-alpha3","build_hash":"41e69","build_number":15613,"build_snapshot":true}..}
63
+ @version = "0.0.0"
64
+ response = get("api/status")
65
+ if response.succeeded?
66
+ status = response.body
67
+ if status["version"].is_a?(Hash)
68
+ @version = status["version"]["number"]
69
+ if status["version"]["build_snapshot"]
70
+ @version.concat("-SNAPSHOT")
71
+ end
72
+ else
73
+ @version = status["version"]
74
+ end
75
+ end
76
+ @http_options[:headers]['kbn-version'] = @version
77
+ end
78
+
79
+ def version_parts
80
+ @version.split(/\.|\-/)
81
+ end
82
+
83
+ def host_settings
84
+ "[\"#{@host}\"]"
85
+ end
86
+
87
+ def get(relative_path)
88
+ # e.g. api/kibana/settings
89
+ safely(:get, relative_path, @http_options)
90
+ end
91
+
92
+ # content will be converted to a json string
93
+ def post(relative_path, content, headers = nil)
94
+
95
+ body = content.is_a?(String) ? content : LogStash::Json.dump(content)
96
+ options = {:body => body}.merge(headers || @http_options)
97
+ safely(:post, relative_path, options)
98
+ end
99
+
100
+ def head(relative_path)
101
+ safely(:head, relative_path, @http_options)
102
+ end
103
+
104
+ def can_connect?
105
+ head("api/status").succeeded?
106
+ end
107
+
108
+ private
109
+
110
+ def safely(method_sym, relative_path, options = {})
111
+ begin
112
+ resp = @client.http(method_sym, full_url(relative_path), options).call
113
+ Response.new(resp.code, resp.body, resp.headers)
114
+ rescue Manticore::ManticoreException => e
115
+ logger.error("Error when executing Kibana client request", :error => e)
116
+ body = {"statusCode" => 0, "error" => e.message}
117
+ Response.new(0, body, {})
118
+ end
119
+ end
120
+
121
+ def full_url(relative)
122
+ "#{@scheme}://#{@host}/#{relative}"
123
+ end
124
+ end end end
@@ -3,22 +3,26 @@ require "logstash/namespace"
3
3
  require "logstash/logging"
4
4
 
5
5
  require_relative "file_reader"
6
+ require_relative "kibana_settings"
7
+ require_relative "kibana_dashboards"
6
8
  require_relative "kibana_resource"
7
- require_relative "kibana_base_resource"
8
9
 
9
10
  module LogStash module Modules class KibanaConfig
10
11
  include LogStash::Util::Loggable
11
12
 
12
13
  ALLOWED_DIRECTORIES = ["search", "visualization"]
13
-
14
- attr_reader :index_name
14
+ attr_reader :index_name # not used when importing via kibana but for BWC with ElastsearchConfig
15
15
 
16
16
  # We name it `modul` here because `module` has meaning in Ruby.
17
17
  def initialize(modul, settings)
18
18
  @directory = ::File.join(modul.directory, "kibana")
19
19
  @name = modul.module_name
20
20
  @settings = settings
21
- @index_name = settings.fetch("dashboards.kibana_index", ".kibana")
21
+ @index_name = "kibana"
22
+ @pattern_name = "#{@name}-*"
23
+ @kibana_settings = [
24
+ KibanaSettings::Setting.new("defaultIndex", @pattern_name)
25
+ ]
22
26
  end
23
27
 
24
28
  def dashboards
@@ -30,21 +34,20 @@ module LogStash module Modules class KibanaConfig
30
34
  end
31
35
 
32
36
  def index_pattern
33
- pattern_name = "#{@name}-*"
34
- default_index_json = '{"defaultIndex": "#{pattern_name}"}'
35
- default_index_content_id = @settings.fetch("index_pattern.kibana_version", "5.5.1")
36
- [
37
- KibanaResource.new(@index_name, "index-pattern", dynamic("index-pattern"),nil, pattern_name),
38
- KibanaResource.new(@index_name, "config", nil, default_index_json, default_index_content_id)
39
- ]
37
+ [KibanaResource.new(@index_name, "index-pattern", dynamic("index-pattern"),nil, @pattern_name)]
40
38
  end
41
39
 
42
40
  def resources
43
41
  list = index_pattern
44
42
  dashboards.each do |board|
43
+ list << board
45
44
  extract_panels_into(board, list)
46
45
  end
47
- list.concat(extract_saved_searches(list))
46
+ list.concat(extract_saved_searches_into(list))
47
+ [
48
+ KibanaSettings.new("api/kibana/settings", @kibana_settings),
49
+ KibanaDashboards.new("api/kibana/dashboards/import", list)
50
+ ]
48
51
  end
49
52
 
50
53
  private
@@ -54,9 +57,7 @@ module LogStash module Modules class KibanaConfig
54
57
  end
55
58
 
56
59
  def extract_panels_into(dashboard, list)
57
- list << dashboard
58
-
59
- dash = FileReader.read_json(dashboard.content_path)
60
+ dash = dashboard.content_as_object
60
61
 
61
62
  if !dash.is_a?(Hash)
62
63
  logger.warn("Kibana dashboard JSON is not an Object", :module => @name)
@@ -85,20 +86,20 @@ module LogStash module Modules class KibanaConfig
85
86
  logger.warn("panelJSON contained unknown type", :type => panel_type)
86
87
  end
87
88
  end
89
+ end
88
90
 
89
- def extract_saved_searches(list)
90
- result = [] # must not add to list while iterating
91
- list.each do |resource|
92
- next unless resource.contains?("savedSearchId")
93
- content = resource.content_as_object
94
- next if content.nil?
95
- saved_search = content["savedSearchId"]
96
- next if saved_search.nil?
97
- ss_resource = KibanaResource.new(@index_name, "search", dynamic("search", saved_search))
98
- next if list.member?(ss_resource) || result.member?(ss_resource)
99
- result << ss_resource
100
- end
101
- result
91
+ def extract_saved_searches_into(list)
92
+ result = [] # must not add to list while iterating
93
+ list.each do |resource|
94
+ content = resource.content_as_object
95
+ next if content.nil?
96
+ next unless content.keys.include?("savedSearchId")
97
+ saved_search = content["savedSearchId"]
98
+ next if saved_search.nil?
99
+ ss_resource = KibanaResource.new(@index_name, "search", dynamic("search", saved_search))
100
+ next if list.member?(ss_resource) || result.member?(ss_resource)
101
+ result << ss_resource
102
102
  end
103
+ result
103
104
  end
104
105
  end end end