logstash-filter-kafka_time_machine 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 4afd292b610638a04cf07a7c272c59819c561d43941f3784a3db4d67a91d4e40
4
+ data.tar.gz: 55b7b510230ff6549506b45a47fdd7457d7129a51188fd5820411bc76d1ccb5b
5
+ SHA512:
6
+ metadata.gz: 511df4078b94243f61877b2d2391692ab4623b1dbff23f064cda7a14323ef5a84482d96755a75fb7f592834c9762cb4d3c9071596e88ffe1d850fd5b18a666ae
7
+ data.tar.gz: e0c80cceba1e9f214cf481ab3d6aa65874007e5c80696401c610d1a519c79d6d7a97392f179b35461cef6b0b0b25c8ab59a030879af9280c29c10ddc57aa4bc1
data/Gemfile ADDED
@@ -0,0 +1,10 @@
1
+ source 'https://rubygems.org'
2
+ gemspec
3
+
4
+ logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash"
5
+ use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
6
+
7
+ if Dir.exist?(logstash_path) && use_logstash_source
8
+ gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
9
+ gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
10
+ end
data/README.md ADDED
@@ -0,0 +1,3 @@
1
+ # logstash-filter-kafka_time_machine
2
+
3
+ TBD
@@ -0,0 +1,78 @@
1
+ # encoding: utf-8
2
+ require "logstash/filters/base"
3
+ require "logstash/namespace"
4
+ require "logstash/event"
5
+
6
+ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
7
+
8
+ config_name "kafkatimemachine"
9
+
10
+ public
11
+ def register
12
+
13
+ end
14
+
15
+ public
16
+ def filter(event)
17
+
18
+ # Extract shipper data and check for validity; note that kafka_datacenter_shipper is used for both shipper and indexer arrays
19
+ kafka_datacenter_shipper = event.get("[@metadata][kafka_datacenter_shipper]")
20
+ kafka_topic_shipper = event.get("[@metadata][kafka_topic_shipper]")
21
+ kafka_consumer_group_shipper = event.get("[@metadata][kafka_consumer_group_shipper]")
22
+ kafka_append_time_shipper = Float(event.get("[@metadata][kafka_append_time_shipper]")) rescue nil
23
+ logstash_kafka_read_time_shipper = Float(event.get("[@metadata][logstash_kafka_read_time_shipper]")) rescue nil
24
+
25
+ kafka_shipper_array = Array[kafka_datacenter_shipper, kafka_topic_shipper, kafka_consumer_group_shipper, kafka_append_time_shipper, logstash_kafka_read_time_shipper]
26
+ @logger.debug("kafka_shipper_array: #{kafka_shipper_array}")
27
+
28
+ if (kafka_shipper_array.any? { |text| text.nil? || text.to_s.empty? })
29
+ @logger.debug("kafka_shipper_array invalid: Found null")
30
+ error_string_shipper = "Error in shipper data: #{kafka_shipper_array}"
31
+ shipper_valid = false
32
+ else
33
+ @logger.debug("kafka_shipper_array valid")
34
+ shipper_valid = true
35
+ logstash_kafka_read_time_shipper = logstash_kafka_read_time_shipper.to_i
36
+ kafka_append_time_shipper = kafka_append_time_shipper.to_i
37
+ kafka_shipper_lag_ms = logstash_kafka_read_time_shipper - kafka_append_time_shipper
38
+ end
39
+
40
+ # Extract indexer data and check for validity
41
+ kafka_topic_indexer = event.get("[@metadata][kafka_topic_indexer]")
42
+ kafka_consumer_group_indexer = event.get("[@metadata][kafka_consumer_group_indexer]")
43
+ kafka_append_time_indexer = Float(event.get("[@metadata][kafka_append_time_indexer]")) rescue nil
44
+ logstash_kafka_read_time_indexer = Float(event.get("[@metadata][logstash_kafka_read_time_indexer]")) rescue nil
45
+
46
+ kafka_indexer_array = Array[kafka_datacenter_shipper, kafka_topic_indexer, kafka_consumer_group_indexer, kafka_append_time_indexer, logstash_kafka_read_time_indexer]
47
+ @logger.debug("kafka_indexer_array: #{kafka_indexer_array}")
48
+
49
+ if (kafka_indexer_array.any? { |text| text.nil? || text.to_s.empty? })
50
+ @logger.debug("kafka_indexer_array invalid: Found null")
51
+ error_string_indexer = "Error in indexer data: #{kafka_indexer_array}"
52
+ indexer_valid = false
53
+ else
54
+ @logger.debug("kafka_indexer_array valid")
55
+ indexer_valid = true
56
+ logstash_kafka_read_time_indexer = logstash_kafka_read_time_indexer.to_i
57
+ kafka_append_time_indexer = kafka_append_time_indexer.to_i
58
+ kafka_indexer_lag_ms = logstash_kafka_read_time_indexer - kafka_append_time_indexer
59
+ end
60
+
61
+ if (shipper_valid == true && indexer_valid == true)
62
+ kafka_total_lag_ms = logstash_kafka_read_time_indexer - kafka_append_time_shipper
63
+ event.set("[_ktm]", {"lag_total" => kafka_total_lag_ms, "lag_indexer" => kafka_indexer_lag_ms, "lag_shipper" => kafka_shipper_lag_ms, "datacenter_shipper" => kafka_datacenter_shipper, "kafka_topic_indexer" => kafka_topic_indexer, "kafka_consumer_group_indexer" => kafka_consumer_group_indexer, "kafka_topic_shipper" => kafka_topic_shipper, "kafka_consumer_group_shipper" => kafka_consumer_group_shipper, "tags" => ["ktm_lag_complete"] })
64
+ elsif (shipper_valid == true && indexer_valid == false)
65
+ event.set("[_ktm]", {"lag_shipper" => kafka_shipper_lag_ms, "datacenter_shipper" => kafka_datacenter_shipper, "kafka_topic_shipper" => kafka_topic_shipper, "kafka_consumer_group_shipper" => kafka_consumer_group_shipper, "tags" => ["ktm_lag_shipper"] })
66
+ elsif (indexer_valid == true && shipper_valid == false)
67
+ event.set("[_ktm]", {"lag_indexer" => kafka_indexer_lag_ms, "datacenter_shipper" => kafka_datacenter_shipper, "kafka_topic_indexer" => kafka_topic_indexer, "kafka_consumer_group_indexer" => kafka_consumer_group_indexer, "tags" => ["ktm_lag_indexer"] })
68
+ elsif (indexer_valid == false && shipper_valid == false)
69
+ @logger.error("Error kafkatimemachine: Could not build valid response --> #{error_string_shipper}, #{error_string_indexer}")
70
+ # event.set("[_ktm]", {"error_shipper" => error_string_shipper, "error_indexer" => error_string_indexer, "datacenter_shipper" => kafka_datacenter_shipper, "tags" => ["ktm_error"] })
71
+ end
72
+
73
+ # filter_matched should go in the last line of our successful code
74
+ filter_matched(event)
75
+
76
+ end # def filter
77
+
78
+ end # class LogStash::Filters::KafkaTimeMachine
@@ -0,0 +1,24 @@
1
+ Gem::Specification.new do |s|
2
+ s.name = 'logstash-filter-kafka_time_machine'
3
+ s.version = '0.2.0'
4
+ s.licenses = ['Apache-2.0']
5
+ s.summary = "Calculate total time of logstash event that traversed 2 Kafka queues from a shipper site to an indexer site"
6
+ s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
7
+ s.authors = ["Chris Foster"]
8
+ s.email = 'chrifost@cisco.com'
9
+ s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
10
+ s.require_paths = ["lib"]
11
+
12
+ # Files
13
+ s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
14
+
15
+ # Tests
16
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
17
+
18
+ # Special flag to let us know this is actually a logstash plugin
19
+ s.metadata = { "logstash_plugin" => "true", "logstash_group" => "filter" }
20
+
21
+ # Gem dependencies
22
+ s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
23
+ s.add_development_dependency 'logstash-devutils', '~> 0'
24
+ end
metadata ADDED
@@ -0,0 +1,85 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: logstash-filter-kafka_time_machine
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.2.0
5
+ platform: ruby
6
+ authors:
7
+ - Chris Foster
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2021-06-15 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: logstash-core-plugin-api
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: '1.60'
20
+ - - "<="
21
+ - !ruby/object:Gem::Version
22
+ version: '2.99'
23
+ type: :runtime
24
+ prerelease: false
25
+ version_requirements: !ruby/object:Gem::Requirement
26
+ requirements:
27
+ - - ">="
28
+ - !ruby/object:Gem::Version
29
+ version: '1.60'
30
+ - - "<="
31
+ - !ruby/object:Gem::Version
32
+ version: '2.99'
33
+ - !ruby/object:Gem::Dependency
34
+ name: logstash-devutils
35
+ requirement: !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - "~>"
38
+ - !ruby/object:Gem::Version
39
+ version: '0'
40
+ type: :development
41
+ prerelease: false
42
+ version_requirements: !ruby/object:Gem::Requirement
43
+ requirements:
44
+ - - "~>"
45
+ - !ruby/object:Gem::Version
46
+ version: '0'
47
+ description: This gem is a logstash plugin required to be installed on top of the
48
+ Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
49
+ gem is not a stand-alone program
50
+ email: chrifost@cisco.com
51
+ executables: []
52
+ extensions: []
53
+ extra_rdoc_files: []
54
+ files:
55
+ - Gemfile
56
+ - README.md
57
+ - lib/logstash/filters/kafkatimemachine.rb
58
+ - logstash-filter-kafka_time_machine.gemspec
59
+ homepage: http://www.elastic.co/guide/en/logstash/current/index.html
60
+ licenses:
61
+ - Apache-2.0
62
+ metadata:
63
+ logstash_plugin: 'true'
64
+ logstash_group: filter
65
+ post_install_message:
66
+ rdoc_options: []
67
+ require_paths:
68
+ - lib
69
+ required_ruby_version: !ruby/object:Gem::Requirement
70
+ requirements:
71
+ - - ">="
72
+ - !ruby/object:Gem::Version
73
+ version: '0'
74
+ required_rubygems_version: !ruby/object:Gem::Requirement
75
+ requirements:
76
+ - - ">="
77
+ - !ruby/object:Gem::Version
78
+ version: '0'
79
+ requirements: []
80
+ rubygems_version: 3.0.3
81
+ signing_key:
82
+ specification_version: 4
83
+ summary: Calculate total time of logstash event that traversed 2 Kafka queues from
84
+ a shipper site to an indexer site
85
+ test_files: []