logstash-codec-kafka_time_machine 0.1.1 → 0.1.2
Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 5ee3fa202b98f0398d64cad83c1cf8750fba7ab3
|
4
|
+
data.tar.gz: 1a51b4b13aaffce958a3d415bb4390bb670d3b50
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 7bed4e8fef9a11732f3b5e8ec94542741374ce35b80838346d64989c4431575befb725719b3adb4f8f1227c32cd6ddd5fc6c7d69a704e70d41016bc4c5ea9e56
|
7
|
+
data.tar.gz: 97b3b72e386c3f75ee93fee7a5217ca459ee6cd53cedd8ecd8a10821d33c099afd0d6e2c66e08ec04fe45d3583b283d6fb93871ceb245b8073dd9c6a7f296bb1
|
@@ -43,7 +43,7 @@ class LogStash::Codecs::KafkaTimeMachine < LogStash::Codecs::Base
|
|
43
43
|
public
|
44
44
|
def encode(event)
|
45
45
|
|
46
|
-
# Extract producer data and check for validity
|
46
|
+
# Extract producer data and check for validity; note that kafka_datacenter_producer is used for both producer and aggregate arrays
|
47
47
|
kafka_datacenter_producer = event.get("[@metadata][kafka_datacenter_producer]")
|
48
48
|
kafka_topic_producer = event.get("[@metadata][kafka_topic_producer]")
|
49
49
|
kafka_consumer_group_producer = event.get("[@metadata][kafka_consumer_group_producer]")
|
@@ -66,13 +66,12 @@ class LogStash::Codecs::KafkaTimeMachine < LogStash::Codecs::Base
|
|
66
66
|
end
|
67
67
|
|
68
68
|
# Extract aggregate data and check for validity
|
69
|
-
kafka_datacenter_aggregate = event.get("[@metadata][kafka_datacenter_aggregate]")
|
70
69
|
kafka_topic_aggregate = event.get("[@metadata][kafka_topic_aggregate]")
|
71
70
|
kafka_consumer_group_aggregate = event.get("[@metadata][kafka_consumer_group_aggregate]")
|
72
71
|
kafka_append_time_aggregate = Float(event.get("[@metadata][kafka_append_time_aggregate]")) rescue nil
|
73
72
|
logstash_kafka_read_time_aggregate = Float(event.get("[@metadata][logstash_kafka_read_time_aggregate]")) rescue nil
|
74
73
|
|
75
|
-
kafka_aggregate_array = Array[
|
74
|
+
kafka_aggregate_array = Array[kafka_datacenter_producer, kafka_topic_aggregate, kafka_consumer_group_aggregate, kafka_append_time_aggregate, logstash_kafka_read_time_aggregate]
|
76
75
|
@logger.debug("kafka_aggregate_array: #{kafka_aggregate_array}")
|
77
76
|
|
78
77
|
if (kafka_aggregate_array.any? { |text| text.nil? || text.to_s.empty? })
|
@@ -96,7 +95,7 @@ class LogStash::Codecs::KafkaTimeMachine < LogStash::Codecs::Base
|
|
96
95
|
elsif (producer_valid == true && aggregate_valid == false)
|
97
96
|
influx_line_protocol = "kafka_lag_time,meta_source=lma,meta_type=ktm,meta_datacenter=#{kafka_datacenter_producer},ktm_lag_type=producer,kafka_topic_producer=#{kafka_topic_producer},kafka_consumer_group_producer=#{kafka_consumer_group_producer} kafka_producer_lag_ms=#{kafka_producer_lag_ms} #{kafka_logstash_influx_metric_time}"
|
98
97
|
elsif (aggregate_valid == true && producer_valid == false)
|
99
|
-
influx_line_protocol = "kafka_lag_time,meta_source=lma,meta_type=ktm,meta_datacenter=#{
|
98
|
+
influx_line_protocol = "kafka_lag_time,meta_source=lma,meta_type=ktm,meta_datacenter=#{kafka_datacenter_producer},ktm_lag_type=aggregate,kafka_topic_aggregate=#{kafka_topic_aggregate},kafka_consumer_group_aggregate=#{kafka_consumer_group_aggregate} kafka_aggregate_lag_ms=#{kafka_aggregate_lag_ms} #{kafka_logstash_influx_metric_time}"
|
100
99
|
elsif (aggregate_valid == false && producer_valid == false)
|
101
100
|
@logger.error("Error kafkatimemachine: Could not build valid response --> #{error_string_producer}, #{error_string_aggregate}")
|
102
101
|
influx_line_protocol = nil
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-codec-kafka_time_machine'
|
3
|
-
s.version = '0.1.
|
3
|
+
s.version = '0.1.2'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = "Calculate total time of logstash event that traversed 2 Kafka queues from a producer site to an aggregate site"
|
6
6
|
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-codec-kafka_time_machine
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Chris Foster
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2020-05-
|
11
|
+
date: 2020-05-13 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: logstash-core-plugin-api
|