logstash-filter-kafka_time_machine 2.0.0 → 2.0.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: a0ab8be37285b43b8785aea650cc6eb6d456d701ba36aeb0d5c2cbcb020d470d
4
- data.tar.gz: d6ccd08fa8024d7cced6c529708ad5cd5f15ef683c043206966c58b989d28f42
3
+ metadata.gz: dcb429711e99220eb57f095154d68f2b3f477956587762c4ba03ef7bc434ba30
4
+ data.tar.gz: 3c66923274a218187bef908747091baa540523729c6d961f02ab002eb6701fce
5
5
  SHA512:
6
- metadata.gz: 73b806dbef6c52765e674dc7acec647c90427ff40270c9665eaf07c29818ecca558c564cb812aa9cd81601a9b3009ccdd5b7d465ef18352a4f146c195f9daf16
7
- data.tar.gz: 3f318fbafd7bd1283599ac68b4d7b29ea9187f7772d8353235d0d4d2cc06be34afc2bf440d226600ba125ad4a9a34d058d77f05c87c94c45d9f3339cbdc79e31
6
+ metadata.gz: 840b0fbdef1e7096c2e51cdaac3a4ef38e5e7830fb8263e8511d8e09116a13a92a90dcf13ffb72ffef51da4f66aaf898235d30ee50c6f7adddab3c79a428bb80
7
+ data.tar.gz: 6ffbb0731c74f2b7168dccd79f4a78f00b168baab2d5d111ac9a872b39c24f3aca8fa5b92e67053a7f5abef71a0fc44561604f56f669c40812a10c237e5b65e9
@@ -41,6 +41,12 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
41
41
  # Current time since EPOCH in ms that should be set in the influxdb generated metric
42
42
  config :event_time_ms, :validate => :string, :required => true
43
43
 
44
+ # Current time since EPOCH in ms that should be set in the influxdb generated metric
45
+ config :elasticsearch_cluster, :validate => :string, :required => true
46
+
47
+ # Current time since EPOCH in ms that should be set in the influxdb generated metric
48
+ config :elasticsearch_cluster_index, :validate => :string, :required => true
49
+
44
50
  public
45
51
  def register
46
52
 
@@ -61,6 +67,8 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
61
67
  shipper_kafka_consumer_group = event.sprintf(@kafka_consumer_group_shipper)
62
68
  indexer_kafka_topic = event.sprintf(@kafka_topic_indexer)
63
69
  indexer_kafka_consumer_group = event.sprintf(@kafka_consumer_group_indexer)
70
+ elasticsearch_cluster = event.sprintf(@elasticsearch_cluster)
71
+ elasticsearch_cluster_index = event.sprintf(@elasticsearch_cluster_index)
64
72
 
65
73
  # Extract all the "time" related values to local variables. This need special handling due to the Float() operation.
66
74
  #
@@ -72,10 +80,11 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
72
80
  indexer_logstash_kafka_read_time = get_numeric(event.sprintf(@logstash_kafka_read_time_indexer))
73
81
 
74
82
  # Validate the shipper data
75
- shipper_kafka_array = Array[shipper_kafka_datacenter, shipper_kafka_topic, shipper_kafka_consumer_group, shipper_kafka_append_time, shipper_logstash_kafka_read_time, event_owner, event_time_ms]
83
+ shipper_kafka_array = Array[shipper_kafka_datacenter, shipper_kafka_topic, shipper_kafka_consumer_group, shipper_kafka_append_time, shipper_logstash_kafka_read_time, event_owner, event_time_ms, elasticsearch_cluster, elasticsearch_cluster_index]
76
84
  if (shipper_kafka_array.any? { |text| text.nil? || text.to_s.empty? })
77
85
  @logger.debug("shipper_kafka_array invalid: Found null")
78
86
  error_string_shipper = sprintf("Error in shipper data: %s", shipper_kafka_array)
87
+ @logger.debug(error_string_shipper)
79
88
  shipper_valid = false
80
89
  else
81
90
  @logger.debug("shipper_kafka_array valid")
@@ -86,10 +95,11 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
86
95
  end
87
96
 
88
97
  # Validate the indexer data
89
- indexer_kafka_array = Array[shipper_kafka_datacenter, indexer_kafka_topic, indexer_kafka_consumer_group, indexer_kafka_append_time, indexer_logstash_kafka_read_time, event_owner, event_time_ms]
98
+ indexer_kafka_array = Array[shipper_kafka_datacenter, indexer_kafka_topic, indexer_kafka_consumer_group, indexer_kafka_append_time, indexer_logstash_kafka_read_time, event_owner, event_time_ms, elasticsearch_cluster, elasticsearch_cluster_index]
90
99
  if (indexer_kafka_array.any? { |text| text.nil? || text.to_s.empty? })
91
100
  @logger.debug("indexer_kafka_array invalid: Found null")
92
101
  error_string_indexer = sprintf("Error in indexer data: %s", indexer_kafka_array)
102
+ @logger.debug(error_string_indexer)
93
103
  indexer_valid = false
94
104
  else
95
105
  @logger.debug("indexer_kafka_array valid")
@@ -118,26 +128,26 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
118
128
  if (shipper_valid == true && indexer_valid == true && epoch_time_ns != nil)
119
129
  total_kafka_lag_ms = indexer_logstash_kafka_read_time - shipper_kafka_append_time
120
130
 
121
- point_influxdb = create_influxdb_point_ktm(shipper_kafka_datacenter, event_owner, payload_bytesize, "total", total_kafka_lag_ms, epoch_time_ns)
131
+ point_influxdb = create_influxdb_point_ktm(shipper_kafka_datacenter, event_owner, payload_bytesize, "total", total_kafka_lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
122
132
  ktm_metric_event_array.push point_influxdb
123
133
 
124
134
  elsif (shipper_valid == true && indexer_valid == false && epoch_time_ns != nil)
125
- point_influxdb = create_influxdb_point_ktm(shipper_kafka_datacenter, event_owner, payload_bytesize, "shipper", shipper_kafka_lag_ms, epoch_time_ns)
135
+ point_influxdb = create_influxdb_point_ktm(shipper_kafka_datacenter, event_owner, payload_bytesize, "shipper", shipper_kafka_lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
126
136
  ktm_metric_event_array.push point_influxdb
127
137
 
128
- point_influxdb = create_influxdb_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "indexer")
138
+ point_influxdb = create_influxdb_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "indexer", elasticsearch_cluster, elasticsearch_cluster_index)
129
139
  ktm_metric_event_array.push point_influxdb
130
140
 
131
141
  elsif (indexer_valid == true && shipper_valid == false && epoch_time_ns != nil)
132
- point_influxdb = create_influxdb_point_ktm(shipper_kafka_datacenter, event_owner, payload_bytesize, "indexer", indexer_kafka_lag_ms, epoch_time_ns)
142
+ point_influxdb = create_influxdb_point_ktm(shipper_kafka_datacenter, event_owner, payload_bytesize, "indexer", indexer_kafka_lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
133
143
  ktm_metric_event_array.push point_influxdb
134
144
 
135
- point_influxdb = create_influxdb_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "shipper")
145
+ point_influxdb = create_influxdb_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "shipper", elasticsearch_cluster, elasticsearch_cluster_index)
136
146
  ktm_metric_event_array.push point_influxdb
137
147
 
138
148
  elsif (indexer_valid == false && shipper_valid == false)
139
149
 
140
- point_influxdb = create_influxdb_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "insufficient_data")
150
+ point_influxdb = create_influxdb_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "insufficient_data", elasticsearch_cluster, elasticsearch_cluster_index)
141
151
  ktm_metric_event_array.push point_influxdb
142
152
 
143
153
  error_string = sprintf("Error kafka_time_machine: Could not build valid response --> %s, %s", error_string_shipper, error_string_indexer)
@@ -145,7 +155,7 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
145
155
 
146
156
  else
147
157
 
148
- point_influxdb = create_influxdb_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "unknown")
158
+ point_influxdb = create_influxdb_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "unknown", elasticsearch_cluster, elasticsearch_cluster_index)
149
159
  ktm_metric_event_array.push point_influxdb
150
160
 
151
161
  error_string = "Unknown error encountered"
@@ -171,10 +181,10 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
171
181
 
172
182
  # Creates an Influx DB line-protocol data point to return
173
183
  public
174
- def create_influxdb_point_ktm(datacenter, event_owner, payload_size_bytes, lag_type, lag_ms, epoch_time_ns)
184
+ def create_influxdb_point_ktm(datacenter, event_owner, payload_size_bytes, lag_type, lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
175
185
 
176
186
  point = InfluxDB2::Point.new( name: "ktm",
177
- tags: {datacenter: datacenter, owner: event_owner, lag_type: lag_type},
187
+ tags: {datacenter: datacenter, owner: event_owner, lag_type: lag_type, es_cluster: elasticsearch_cluster, es_cluster_index: elasticsearch_cluster_index},
178
188
  fields: {payload_size_bytes: payload_size_bytes, lag_ms: lag_ms},
179
189
  time: epoch_time_ns)
180
190
 
@@ -185,7 +195,7 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
185
195
 
186
196
  # Creates an Influx DB line-protocol data point to return
187
197
  public
188
- def create_influxdb_point_ktm_error(datacenter, event_owner, epoch_time_ns, type)
198
+ def create_influxdb_point_ktm_error(datacenter, event_owner, epoch_time_ns, type, elasticsearch_cluster, elasticsearch_cluster_index)
189
199
 
190
200
  # Check for nil values
191
201
  if (nil == datacenter)
@@ -202,7 +212,7 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
202
212
  end
203
213
 
204
214
  point = InfluxDB2::Point.new( name: "ktm_error",
205
- tags: {datacenter: datacenter, owner: event_owner, source: type},
215
+ tags: {datacenter: datacenter, owner: event_owner, source: type, es_cluster: elasticsearch_cluster, es_cluster_index: elasticsearch_cluster_index},
206
216
  fields: {count: 1},
207
217
  time: epoch_time_ns)
208
218
 
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-filter-kafka_time_machine'
3
- s.version = '2.0.0'
3
+ s.version = '2.0.1'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Calculate total time of logstash event that traversed 2 Kafka queues from a shipper site to an indexer site"
6
6
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-filter-kafka_time_machine
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.0
4
+ version: 2.0.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Chris Foster
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-10-21 00:00:00.000000000 Z
11
+ date: 2021-10-28 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: logstash-core-plugin-api