logstash-codec-kafka_time_machine 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/Gemfile +10 -0
- data/lib/logstash/codecs/kafkatimemachine.rb +109 -0
- data/logstash-codec-kafka_time_machine.gemspec +24 -0
- metadata +85 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 6a00a02e273acd58d4a7e6a06573f02e100202d2
|
4
|
+
data.tar.gz: 6c0dfc8295cb4d4cc769c26beb3deca3220b5fb1
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 3163b97371f86c32c2725e5813781c2e20c4bd458ba5090d0dad0cc8fccf116b1a7c499cbdee940cf20fcdae98d8aa1c2d3a31423af2b233d17df85bfc71595a
|
7
|
+
data.tar.gz: f61e0581276644f12d0d301145f9ac9be37a78e198329ade28bcfc92ddc1850b25c99b68bfa78aeaeeef80413df85e622a7431d2e1ec0bd7df2b916414a81257
|
data/Gemfile
ADDED
@@ -0,0 +1,10 @@
|
|
1
|
+
source 'https://rubygems.org'
|
2
|
+
gemspec
|
3
|
+
|
4
|
+
logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash"
|
5
|
+
use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
|
6
|
+
|
7
|
+
if Dir.exist?(logstash_path) && use_logstash_source
|
8
|
+
gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
|
9
|
+
gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
|
10
|
+
end
|
@@ -0,0 +1,109 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/codecs/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
require "logstash/event"
|
5
|
+
|
6
|
+
class LogStash::Codecs::KafkaTimeMachine < LogStash::Codecs::Base
|
7
|
+
|
8
|
+
config_name "kafkatimemachine"
|
9
|
+
|
10
|
+
@write_end_of_log = true
|
11
|
+
|
12
|
+
# Enable debug log file writes
|
13
|
+
config :enable_log, :validate => :boolean, :default => false
|
14
|
+
|
15
|
+
def file_output( output_line )
|
16
|
+
|
17
|
+
# Limit max file size to 5MB to protect integrity of host
|
18
|
+
max_file_size = 5242880
|
19
|
+
|
20
|
+
# Open file and append until max size reached
|
21
|
+
File.open("/tmp/kafkatimemachine.txt", "a") do |f|
|
22
|
+
if (f.size <= max_file_size)
|
23
|
+
f.puts(output_line)
|
24
|
+
@write_end_of_log = true
|
25
|
+
elsif (f.size > max_file_size && @write_end_of_log == true)
|
26
|
+
f.puts("Maximum file size of #{max_file_size} bytes reached; delete /tmp/kafkatimemachine.txt to resume writing")
|
27
|
+
@write_end_of_log = false
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
end
|
32
|
+
|
33
|
+
public
|
34
|
+
def register
|
35
|
+
|
36
|
+
end
|
37
|
+
|
38
|
+
public
|
39
|
+
def decode(data)
|
40
|
+
raise "Not implemented"
|
41
|
+
end # def decode
|
42
|
+
|
43
|
+
public
|
44
|
+
def encode(event)
|
45
|
+
|
46
|
+
# Extract producer data and check for validity
|
47
|
+
kafka_datacenter_producer = event.get("[@metadata][kafka_datacenter_producer]")
|
48
|
+
kafka_topic_producer = event.get("[@metadata][kafka_topic_producer]")
|
49
|
+
kafka_consumer_group_producer = event.get("[@metadata][kafka_consumer_group_producer]")
|
50
|
+
kafka_append_time_producer = Integer(event.get("[@metadata][kafka_append_time_producer]")) rescue nil
|
51
|
+
logstash_kafka_read_time_producer = Integer(event.get("[@metadata][logstash_kafka_read_time_producer]")) rescue nil
|
52
|
+
|
53
|
+
kafka_producer_array = Array[kafka_datacenter_producer, kafka_topic_producer, kafka_consumer_group_producer, kafka_append_time_producer, logstash_kafka_read_time_producer]
|
54
|
+
@logger.debug("kafka_producer_array: #{kafka_producer_array}")
|
55
|
+
|
56
|
+
if (kafka_producer_array.any? { |text| text.nil? || text.to_s.empty? })
|
57
|
+
@logger.debug("kafka_producer_array invalid: Found null")
|
58
|
+
error_string_producer = "Error in producer data: #{kafka_producer_array}"
|
59
|
+
producer_valid = false
|
60
|
+
else
|
61
|
+
@logger.debug("kafka_producer_array valid")
|
62
|
+
producer_valid = true
|
63
|
+
kafka_producer_lag_ms = logstash_kafka_read_time_producer - kafka_append_time_producer
|
64
|
+
end
|
65
|
+
|
66
|
+
# Extract aggregate data and check for validity
|
67
|
+
kafka_datacenter_aggregate = event.get("[@metadata][kafka_datacenter_aggregate]")
|
68
|
+
kafka_topic_aggregate = event.get("[@metadata][kafka_topic_aggregate]")
|
69
|
+
kafka_consumer_group_aggregate = event.get("[@metadata][kafka_consumer_group_aggregate]")
|
70
|
+
kafka_append_time_aggregate = Integer(event.get("[@metadata][kafka_append_time_aggregate]")) rescue nil
|
71
|
+
logstash_kafka_read_time_aggregate = Integer(event.get("[@metadata][logstash_kafka_read_time_aggregate]")) rescue nil
|
72
|
+
|
73
|
+
kafka_aggregate_array = Array[kafka_datacenter_aggregate, kafka_topic_aggregate, kafka_consumer_group_aggregate, kafka_append_time_aggregate, logstash_kafka_read_time_aggregate]
|
74
|
+
@logger.debug("kafka_aggregate_array: #{kafka_aggregate_array}")
|
75
|
+
|
76
|
+
if (kafka_aggregate_array.any? { |text| text.nil? || text.to_s.empty? })
|
77
|
+
@logger.debug("kafka_aggregate_array invalid: Found null")
|
78
|
+
error_string_aggregate = "Error in aggregate data: #{kafka_aggregate_array}"
|
79
|
+
aggregate_valid = false
|
80
|
+
else
|
81
|
+
@logger.debug("kafka_aggregate_array valid")
|
82
|
+
aggregate_valid = true
|
83
|
+
kafka_aggregate_lag_ms = logstash_kafka_read_time_aggregate - kafka_append_time_aggregate
|
84
|
+
end
|
85
|
+
|
86
|
+
# Get current time for influxdb timestamp
|
87
|
+
kafka_logstash_influx_metric_time = (Time.now.to_f * (1000*1000*1000)).to_i
|
88
|
+
|
89
|
+
if (producer_valid == true && aggregate_valid == true)
|
90
|
+
kafka_total_lag_ms = logstash_kafka_read_time_aggregate - kafka_append_time_producer
|
91
|
+
influx_line_protocol = "kafka_lag_time,meta_source=lma,meta_type=ktm,meta_datacenter=#{kafka_datacenter_producer},ktm_lag_type=complete,kafka_topic_aggregate=#{kafka_topic_aggregate},kafka_consumer_group_aggregate=#{kafka_consumer_group_aggregate},kafka_topic_producer=#{kafka_topic_producer},kafka_consumer_group_producer=#{kafka_consumer_group_producer} kafka_total_lag_ms=#{kafka_total_lag_ms},kafka_aggregate_lag_ms=#{kafka_aggregate_lag_ms},kafka_producer_lag_ms=#{kafka_producer_lag_ms} #{kafka_logstash_influx_metric_time}"
|
92
|
+
elsif (producer_valid == true && aggregate_valid == false)
|
93
|
+
influx_line_protocol = "kafka_lag_time,meta_source=lma,meta_type=ktm,meta_datacenter=#{kafka_datacenter_producer},ktm_lag_type=producer,kafka_topic_producer=#{kafka_topic_producer},kafka_consumer_group_producer=#{kafka_consumer_group_producer} kafka_producer_lag_ms=#{kafka_producer_lag_ms} #{kafka_logstash_influx_metric_time}"
|
94
|
+
elsif (aggregate_valid == true && producer_valid == false)
|
95
|
+
influx_line_protocol = "kafka_lag_time,meta_source=lma,meta_type=ktm,meta_datacenter=#{kafka_datacenter_aggregate},ktm_lag_type=aggregate,kafka_topic_aggregate=#{kafka_topic_aggregate},kafka_consumer_group_aggregate=#{kafka_consumer_group_aggregate} kafka_aggregate_lag_ms=#{kafka_aggregate_lag_ms} #{kafka_logstash_influx_metric_time}"
|
96
|
+
elsif (aggregate_valid == false && producer_valid == false)
|
97
|
+
@logger.error("Error kafkatimemachine: Could not build valid response --> #{error_string_producer}, #{error_string_aggregate}")
|
98
|
+
influx_line_protocol = nil
|
99
|
+
end
|
100
|
+
|
101
|
+
if (!influx_line_protocol.nil? && @enable_log == true)
|
102
|
+
file_output(influx_line_protocol)
|
103
|
+
end
|
104
|
+
|
105
|
+
@on_event.call(event, event.sprintf(influx_line_protocol))
|
106
|
+
|
107
|
+
end # def encode
|
108
|
+
|
109
|
+
end # class LogStash::Codecs::KafkaTimeMachine
|
@@ -0,0 +1,24 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
s.name = 'logstash-codec-kafka_time_machine'
|
3
|
+
s.version = '0.1.0'
|
4
|
+
s.licenses = ['Apache-2.0']
|
5
|
+
s.summary = "Calculate total time of logstash event that traversed 2 Kafka queues from a producer site to an aggregate site"
|
6
|
+
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
7
|
+
s.authors = ["Chris Foster"]
|
8
|
+
s.email = 'chrifost@cisco.com'
|
9
|
+
s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
|
10
|
+
s.require_paths = ["lib"]
|
11
|
+
|
12
|
+
# Files
|
13
|
+
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
|
14
|
+
|
15
|
+
# Tests
|
16
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
17
|
+
|
18
|
+
# Special flag to let us know this is actually a logstash plugin
|
19
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "codec" }
|
20
|
+
|
21
|
+
# Gem dependencies
|
22
|
+
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
23
|
+
s.add_development_dependency 'logstash-devutils', '~> 0'
|
24
|
+
end
|
metadata
ADDED
@@ -0,0 +1,85 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: logstash-codec-kafka_time_machine
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Chris Foster
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2020-05-11 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: logstash-core-plugin-api
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ">="
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '1.60'
|
20
|
+
- - "<="
|
21
|
+
- !ruby/object:Gem::Version
|
22
|
+
version: '2.99'
|
23
|
+
type: :runtime
|
24
|
+
prerelease: false
|
25
|
+
version_requirements: !ruby/object:Gem::Requirement
|
26
|
+
requirements:
|
27
|
+
- - ">="
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: '1.60'
|
30
|
+
- - "<="
|
31
|
+
- !ruby/object:Gem::Version
|
32
|
+
version: '2.99'
|
33
|
+
- !ruby/object:Gem::Dependency
|
34
|
+
name: logstash-devutils
|
35
|
+
requirement: !ruby/object:Gem::Requirement
|
36
|
+
requirements:
|
37
|
+
- - "~>"
|
38
|
+
- !ruby/object:Gem::Version
|
39
|
+
version: '0'
|
40
|
+
type: :development
|
41
|
+
prerelease: false
|
42
|
+
version_requirements: !ruby/object:Gem::Requirement
|
43
|
+
requirements:
|
44
|
+
- - "~>"
|
45
|
+
- !ruby/object:Gem::Version
|
46
|
+
version: '0'
|
47
|
+
description: This gem is a logstash plugin required to be installed on top of the
|
48
|
+
Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
|
49
|
+
gem is not a stand-alone program
|
50
|
+
email: chrifost@cisco.com
|
51
|
+
executables: []
|
52
|
+
extensions: []
|
53
|
+
extra_rdoc_files: []
|
54
|
+
files:
|
55
|
+
- Gemfile
|
56
|
+
- lib/logstash/codecs/kafkatimemachine.rb
|
57
|
+
- logstash-codec-kafka_time_machine.gemspec
|
58
|
+
homepage: http://www.elastic.co/guide/en/logstash/current/index.html
|
59
|
+
licenses:
|
60
|
+
- Apache-2.0
|
61
|
+
metadata:
|
62
|
+
logstash_plugin: 'true'
|
63
|
+
logstash_group: codec
|
64
|
+
post_install_message:
|
65
|
+
rdoc_options: []
|
66
|
+
require_paths:
|
67
|
+
- lib
|
68
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
69
|
+
requirements:
|
70
|
+
- - ">="
|
71
|
+
- !ruby/object:Gem::Version
|
72
|
+
version: '0'
|
73
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
74
|
+
requirements:
|
75
|
+
- - ">="
|
76
|
+
- !ruby/object:Gem::Version
|
77
|
+
version: '0'
|
78
|
+
requirements: []
|
79
|
+
rubyforge_project:
|
80
|
+
rubygems_version: 2.5.2.3
|
81
|
+
signing_key:
|
82
|
+
specification_version: 4
|
83
|
+
summary: Calculate total time of logstash event that traversed 2 Kafka queues from
|
84
|
+
a producer site to an aggregate site
|
85
|
+
test_files: []
|