logstash-filter-kafka_time_machine 3.0.2.pre → 3.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f0493db2db2981e58eb360408a6ca29529f267e3b2f3206275329bc8c05ac473
4
- data.tar.gz: 02266346ba816a9fef2b12f4aba8bc283847fa7f55ebbaae507ca107798458f8
3
+ metadata.gz: a70e3da32df44cecce8132e66ea3a069f1b9b62c55960567b8d349cb731debff
4
+ data.tar.gz: 7bffdb48f24f3ded475464bbc7146d32eab8797b8b0490334f19f485067f7781
5
5
  SHA512:
6
- metadata.gz: bea392a66af7f726c13d953ca1f03bfaebfe0782c5df11944cb8b8db845650f5619e0f1d8dab31e22de50372929dd1c68288678bcf5d91931688593887249bec
7
- data.tar.gz: 48ba960f4285a64b5c5f81be76e0380c39581e9adcdfa62c46062e7b77ff6b4d422524a4cafeeebae43d5c17511ef665d84e3a842a6e0919c4ad031a1ec94624
6
+ metadata.gz: f195f4368df2939e9cbb3e7185ba638aebdd2f9374fb0823b017d603b8a242c7bf4b5b7206cf4bcc4adc257f1fefe11d928edfba85f74ab1514981511e6cf670
7
+ data.tar.gz: '0389d1c446d9e27f124cc5da4f9fc2eae296216dbe4b2702a45e04b3a6bfa6c1d1f39ca1fbb73bcdfc14268f064331f8f3af5d1cb030ce8c94fc69b64be89f5a'
@@ -38,9 +38,6 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
38
38
  # Owner of the event currenty being process.
39
39
  config :event_owner, :validate => :string, :required => true
40
40
 
41
- # tag of the event currenty being process.
42
- config :event_tag, :validate => :string, :required => true
43
-
44
41
  # Current time since EPOCH in ms that should be set in the generated metric
45
42
  config :event_time_ms, :validate => :string, :required => true
46
43
 
@@ -82,11 +79,8 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
82
79
  indexer_kafka_append_time = get_numeric(event.sprintf(@kafka_append_time_indexer))
83
80
  indexer_logstash_kafka_read_time = get_numeric(event.sprintf(@logstash_kafka_read_time_indexer))
84
81
 
85
- # Extract tags
86
- event_tag = event.sprintf(@event_tag)
87
-
88
82
  # Validate the shipper data
89
- shipper_kafka_array = Array[shipper_kafka_datacenter, shipper_kafka_topic, shipper_kafka_consumer_group, shipper_kafka_append_time, shipper_logstash_kafka_read_time, event_owner, event_tag, event_time_ms, elasticsearch_cluster, elasticsearch_cluster_index]
83
+ shipper_kafka_array = Array[shipper_kafka_datacenter, shipper_kafka_topic, shipper_kafka_consumer_group, shipper_kafka_append_time, shipper_logstash_kafka_read_time, event_owner, event_time_ms, elasticsearch_cluster, elasticsearch_cluster_index]
90
84
  if (shipper_kafka_array.any? { |text| text.nil? || text.to_s.empty? })
91
85
  @logger.debug("shipper_kafka_array invalid: Found null")
92
86
  error_string_shipper = sprintf("Error in shipper data: %s", shipper_kafka_array)
@@ -101,7 +95,7 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
101
95
  end
102
96
 
103
97
  # Validate the indexer data
104
- indexer_kafka_array = Array[shipper_kafka_datacenter, indexer_kafka_topic, indexer_kafka_consumer_group, indexer_kafka_append_time, indexer_logstash_kafka_read_time, event_owner, event_tag, event_time_ms, elasticsearch_cluster, elasticsearch_cluster_index]
98
+ indexer_kafka_array = Array[shipper_kafka_datacenter, indexer_kafka_topic, indexer_kafka_consumer_group, indexer_kafka_append_time, indexer_logstash_kafka_read_time, event_owner, event_time_ms, elasticsearch_cluster, elasticsearch_cluster_index]
105
99
  if (indexer_kafka_array.any? { |text| text.nil? || text.to_s.empty? })
106
100
  @logger.debug("indexer_kafka_array invalid: Found null")
107
101
  error_string_indexer = sprintf("Error in indexer data: %s", indexer_kafka_array)
@@ -142,26 +136,26 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
142
136
  if (shipper_valid == true && indexer_valid == true && epoch_time_ns != nil)
143
137
  total_kafka_lag_ms = indexer_logstash_kafka_read_time - shipper_kafka_append_time
144
138
 
145
- point_ktm = create_point_ktm(shipper_kafka_datacenter, event_owner, event_tag, payload_size_bytes, "total", total_kafka_lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
139
+ point_ktm = create_point_ktm(shipper_kafka_datacenter, event_owner, payload_size_bytes, "total", total_kafka_lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
146
140
  ktm_metric_event_array.push point_ktm
147
141
 
148
142
  elsif (shipper_valid == true && indexer_valid == false && epoch_time_ns != nil)
149
- point_ktm = create_point_ktm(shipper_kafka_datacenter, event_owner, event_tag, payload_size_bytes, "shipper", shipper_kafka_lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
143
+ point_ktm = create_point_ktm(shipper_kafka_datacenter, event_owner, payload_size_bytes, "shipper", shipper_kafka_lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
150
144
  ktm_metric_event_array.push point_ktm
151
145
 
152
- point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, event_tag, epoch_time_ns, "indexer", elasticsearch_cluster, elasticsearch_cluster_index)
146
+ point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "indexer", elasticsearch_cluster, elasticsearch_cluster_index)
153
147
  ktm_metric_event_array.push point_ktm
154
148
 
155
149
  elsif (indexer_valid == true && shipper_valid == false && epoch_time_ns != nil)
156
- point_ktm = create_point_ktm(shipper_kafka_datacenter, event_owner, event_tag, payload_size_bytes, "indexer", indexer_kafka_lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
150
+ point_ktm = create_point_ktm(shipper_kafka_datacenter, event_owner, payload_size_bytes, "indexer", indexer_kafka_lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
157
151
  ktm_metric_event_array.push point_ktm
158
152
 
159
- point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, event_tag, epoch_time_ns, "shipper", elasticsearch_cluster, elasticsearch_cluster_index)
153
+ point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "shipper", elasticsearch_cluster, elasticsearch_cluster_index)
160
154
  ktm_metric_event_array.push point_ktm
161
155
 
162
156
  elsif (indexer_valid == false && shipper_valid == false)
163
157
 
164
- point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, event_tag, epoch_time_ns, "insufficient_data", elasticsearch_cluster, elasticsearch_cluster_index)
158
+ point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "insufficient_data", elasticsearch_cluster, elasticsearch_cluster_index)
165
159
  ktm_metric_event_array.push point_ktm
166
160
 
167
161
  error_string = sprintf("Error kafka_time_machine: Could not build valid response --> %s, %s", error_string_shipper, error_string_indexer)
@@ -169,7 +163,7 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
169
163
 
170
164
  else
171
165
 
172
- point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, event_tag, epoch_time_ns, "unknown", elasticsearch_cluster, elasticsearch_cluster_index)
166
+ point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "unknown", elasticsearch_cluster, elasticsearch_cluster_index)
173
167
  ktm_metric_event_array.push point_ktm
174
168
 
175
169
  error_string = "Unknown error encountered"
@@ -193,7 +187,7 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
193
187
 
194
188
  # Creates hash with ktm data point to return
195
189
  public
196
- def create_point_ktm(datacenter, event_owner, event_tag, payload_size_bytes, lag_type, lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
190
+ def create_point_ktm(datacenter, event_owner, payload_size_bytes, lag_type, lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
197
191
 
198
192
  point = Hash.new
199
193
 
@@ -204,7 +198,6 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
204
198
  # tags
205
199
  point["datacenter"] = datacenter
206
200
  point["owner"] = event_owner
207
- point["tag"] = event_tag
208
201
  point["lag_type"] = lag_type
209
202
  point["es_cluster"] = elasticsearch_cluster
210
203
  point["es_cluster_index"] = elasticsearch_cluster_index
@@ -219,7 +212,7 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
219
212
 
220
213
  # Creates hash with ktm data point to return
221
214
  public
222
- def create_point_ktm_error(datacenter, event_owner, event_tag, epoch_time_ns, type, elasticsearch_cluster, elasticsearch_cluster_index)
215
+ def create_point_ktm_error(datacenter, event_owner, epoch_time_ns, type, elasticsearch_cluster, elasticsearch_cluster_index)
223
216
 
224
217
  # Check for nil values
225
218
  if (nil == datacenter)
@@ -230,10 +223,6 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
230
223
  event_owner = "unknown"
231
224
  end
232
225
 
233
- if (nil == event_tag)
234
- event_tag = "unknown"
235
- end
236
-
237
226
  # set time if we didn't recieve it
238
227
  if (nil == epoch_time_ns)
239
228
  epoch_time_ns = ((Time.now.to_f * 1000).to_i)*1000000
@@ -248,7 +237,6 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
248
237
  # tags
249
238
  point["datacenter"] = datacenter
250
239
  point["owner"] = event_owner
251
- point["tag"] = event_tag
252
240
  point["source"] = type
253
241
  point["es_cluster"] = elasticsearch_cluster
254
242
  point["es_cluster_index"] = elasticsearch_cluster_index
@@ -288,4 +276,4 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
288
276
 
289
277
  end # def get_numberic
290
278
 
291
- end # class LogStash::Filters::KafkaTimeMachine
279
+ end # class LogStash::Filters::KafkaTimeMachine
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-filter-kafka_time_machine'
3
- s.version = '3.0.2.pre'
3
+ s.version = '3.0.2'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Calculate total time of logstash event that traversed 2 Kafka queues from a shipper site to an indexer site"
6
6
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-filter-kafka_time_machine
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.0.2.pre
4
+ version: 3.0.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Chris Foster
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2025-01-30 00:00:00.000000000 Z
11
+ date: 2025-03-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: logstash-core-plugin-api
@@ -73,9 +73,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
73
73
  version: '0'
74
74
  required_rubygems_version: !ruby/object:Gem::Requirement
75
75
  requirements:
76
- - - ">"
76
+ - - ">="
77
77
  - !ruby/object:Gem::Version
78
- version: 1.3.1
78
+ version: '0'
79
79
  requirements: []
80
80
  rubygems_version: 3.0.3.1
81
81
  signing_key: