logstash-filter-kafka_time_machine 3.0.1 → 3.0.2.pre

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 61eed3c10584ae85f6fed99b68b4a217e628d37a61471cf6d36fe3a1abcbf74d
4
- data.tar.gz: 9e73985ed0bc52b621c9fc68539c8eac0bc9f6fadfa66f429ec8011d86c67891
3
+ metadata.gz: f0493db2db2981e58eb360408a6ca29529f267e3b2f3206275329bc8c05ac473
4
+ data.tar.gz: 02266346ba816a9fef2b12f4aba8bc283847fa7f55ebbaae507ca107798458f8
5
5
  SHA512:
6
- metadata.gz: b8e9551aa515878242d9528048ba731e6a1c59bc4bbadffd6f916a736685dd6749cdeed9cad1b55498eb1519366e8c07acf7eeb2e74ccf2f71686fa08835c456
7
- data.tar.gz: 332d6d9f0756db77f0176337a370430cb622e6968e20f470727cd3d6f9979509b54472f398701b500651f0f6f30dcdeb4e0ff54db65d34c0c687c76fc661b307
6
+ metadata.gz: bea392a66af7f726c13d953ca1f03bfaebfe0782c5df11944cb8b8db845650f5619e0f1d8dab31e22de50372929dd1c68288678bcf5d91931688593887249bec
7
+ data.tar.gz: 48ba960f4285a64b5c5f81be76e0380c39581e9adcdfa62c46062e7b77ff6b4d422524a4cafeeebae43d5c17511ef665d84e3a842a6e0919c4ad031a1ec94624
@@ -38,6 +38,9 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
38
38
  # Owner of the event currenty being process.
39
39
  config :event_owner, :validate => :string, :required => true
40
40
 
41
+ # tag of the event currenty being process.
42
+ config :event_tag, :validate => :string, :required => true
43
+
41
44
  # Current time since EPOCH in ms that should be set in the generated metric
42
45
  config :event_time_ms, :validate => :string, :required => true
43
46
 
@@ -79,8 +82,11 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
79
82
  indexer_kafka_append_time = get_numeric(event.sprintf(@kafka_append_time_indexer))
80
83
  indexer_logstash_kafka_read_time = get_numeric(event.sprintf(@logstash_kafka_read_time_indexer))
81
84
 
85
+ # Extract tags
86
+ event_tag = event.sprintf(@event_tag)
87
+
82
88
  # Validate the shipper data
83
- shipper_kafka_array = Array[shipper_kafka_datacenter, shipper_kafka_topic, shipper_kafka_consumer_group, shipper_kafka_append_time, shipper_logstash_kafka_read_time, event_owner, event_time_ms, elasticsearch_cluster, elasticsearch_cluster_index]
89
+ shipper_kafka_array = Array[shipper_kafka_datacenter, shipper_kafka_topic, shipper_kafka_consumer_group, shipper_kafka_append_time, shipper_logstash_kafka_read_time, event_owner, event_tag, event_time_ms, elasticsearch_cluster, elasticsearch_cluster_index]
84
90
  if (shipper_kafka_array.any? { |text| text.nil? || text.to_s.empty? })
85
91
  @logger.debug("shipper_kafka_array invalid: Found null")
86
92
  error_string_shipper = sprintf("Error in shipper data: %s", shipper_kafka_array)
@@ -95,7 +101,7 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
95
101
  end
96
102
 
97
103
  # Validate the indexer data
98
- indexer_kafka_array = Array[shipper_kafka_datacenter, indexer_kafka_topic, indexer_kafka_consumer_group, indexer_kafka_append_time, indexer_logstash_kafka_read_time, event_owner, event_time_ms, elasticsearch_cluster, elasticsearch_cluster_index]
104
+ indexer_kafka_array = Array[shipper_kafka_datacenter, indexer_kafka_topic, indexer_kafka_consumer_group, indexer_kafka_append_time, indexer_logstash_kafka_read_time, event_owner, event_tag, event_time_ms, elasticsearch_cluster, elasticsearch_cluster_index]
99
105
  if (indexer_kafka_array.any? { |text| text.nil? || text.to_s.empty? })
100
106
  @logger.debug("indexer_kafka_array invalid: Found null")
101
107
  error_string_indexer = sprintf("Error in indexer data: %s", indexer_kafka_array)
@@ -136,26 +142,26 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
136
142
  if (shipper_valid == true && indexer_valid == true && epoch_time_ns != nil)
137
143
  total_kafka_lag_ms = indexer_logstash_kafka_read_time - shipper_kafka_append_time
138
144
 
139
- point_ktm = create_point_ktm(shipper_kafka_datacenter, event_owner, payload_size_bytes, "total", total_kafka_lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
145
+ point_ktm = create_point_ktm(shipper_kafka_datacenter, event_owner, event_tag, payload_size_bytes, "total", total_kafka_lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
140
146
  ktm_metric_event_array.push point_ktm
141
147
 
142
148
  elsif (shipper_valid == true && indexer_valid == false && epoch_time_ns != nil)
143
- point_ktm = create_point_ktm(shipper_kafka_datacenter, event_owner, payload_size_bytes, "shipper", shipper_kafka_lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
149
+ point_ktm = create_point_ktm(shipper_kafka_datacenter, event_owner, event_tag, payload_size_bytes, "shipper", shipper_kafka_lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
144
150
  ktm_metric_event_array.push point_ktm
145
151
 
146
- point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "indexer", elasticsearch_cluster, elasticsearch_cluster_index)
152
+ point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, event_tag, epoch_time_ns, "indexer", elasticsearch_cluster, elasticsearch_cluster_index)
147
153
  ktm_metric_event_array.push point_ktm
148
154
 
149
155
  elsif (indexer_valid == true && shipper_valid == false && epoch_time_ns != nil)
150
- point_ktm = create_point_ktm(shipper_kafka_datacenter, event_owner, payload_size_bytes, "indexer", indexer_kafka_lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
156
+ point_ktm = create_point_ktm(shipper_kafka_datacenter, event_owner, event_tag, payload_size_bytes, "indexer", indexer_kafka_lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
151
157
  ktm_metric_event_array.push point_ktm
152
158
 
153
- point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "shipper", elasticsearch_cluster, elasticsearch_cluster_index)
159
+ point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, event_tag, epoch_time_ns, "shipper", elasticsearch_cluster, elasticsearch_cluster_index)
154
160
  ktm_metric_event_array.push point_ktm
155
161
 
156
162
  elsif (indexer_valid == false && shipper_valid == false)
157
163
 
158
- point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "insufficient_data", elasticsearch_cluster, elasticsearch_cluster_index)
164
+ point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, event_tag, epoch_time_ns, "insufficient_data", elasticsearch_cluster, elasticsearch_cluster_index)
159
165
  ktm_metric_event_array.push point_ktm
160
166
 
161
167
  error_string = sprintf("Error kafka_time_machine: Could not build valid response --> %s, %s", error_string_shipper, error_string_indexer)
@@ -163,7 +169,7 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
163
169
 
164
170
  else
165
171
 
166
- point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, epoch_time_ns, "unknown", elasticsearch_cluster, elasticsearch_cluster_index)
172
+ point_ktm = create_point_ktm_error(shipper_kafka_datacenter, event_owner, event_tag, epoch_time_ns, "unknown", elasticsearch_cluster, elasticsearch_cluster_index)
167
173
  ktm_metric_event_array.push point_ktm
168
174
 
169
175
  error_string = "Unknown error encountered"
@@ -187,7 +193,7 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
187
193
 
188
194
  # Creates hash with ktm data point to return
189
195
  public
190
- def create_point_ktm(datacenter, event_owner, payload_size_bytes, lag_type, lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
196
+ def create_point_ktm(datacenter, event_owner, event_tag, payload_size_bytes, lag_type, lag_ms, epoch_time_ns, elasticsearch_cluster, elasticsearch_cluster_index)
191
197
 
192
198
  point = Hash.new
193
199
 
@@ -198,6 +204,7 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
198
204
  # tags
199
205
  point["datacenter"] = datacenter
200
206
  point["owner"] = event_owner
207
+ point["tag"] = event_tag
201
208
  point["lag_type"] = lag_type
202
209
  point["es_cluster"] = elasticsearch_cluster
203
210
  point["es_cluster_index"] = elasticsearch_cluster_index
@@ -212,7 +219,7 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
212
219
 
213
220
  # Creates hash with ktm data point to return
214
221
  public
215
- def create_point_ktm_error(datacenter, event_owner, epoch_time_ns, type, elasticsearch_cluster, elasticsearch_cluster_index)
222
+ def create_point_ktm_error(datacenter, event_owner, event_tag, epoch_time_ns, type, elasticsearch_cluster, elasticsearch_cluster_index)
216
223
 
217
224
  # Check for nil values
218
225
  if (nil == datacenter)
@@ -223,6 +230,10 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
223
230
  event_owner = "unknown"
224
231
  end
225
232
 
233
+ if (nil == event_tag)
234
+ event_tag = "unknown"
235
+ end
236
+
226
237
  # set time if we didn't recieve it
227
238
  if (nil == epoch_time_ns)
228
239
  epoch_time_ns = ((Time.now.to_f * 1000).to_i)*1000000
@@ -237,6 +248,7 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
237
248
  # tags
238
249
  point["datacenter"] = datacenter
239
250
  point["owner"] = event_owner
251
+ point["tag"] = event_tag
240
252
  point["source"] = type
241
253
  point["es_cluster"] = elasticsearch_cluster
242
254
  point["es_cluster_index"] = elasticsearch_cluster_index
@@ -276,4 +288,4 @@ class LogStash::Filters::KafkaTimeMachine < LogStash::Filters::Base
276
288
 
277
289
  end # def get_numberic
278
290
 
279
- end # class LogStash::Filters::KafkaTimeMachine
291
+ end # class LogStash::Filters::KafkaTimeMachine
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-filter-kafka_time_machine'
3
- s.version = '3.0.1'
3
+ s.version = '3.0.2.pre'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = "Calculate total time of logstash event that traversed 2 Kafka queues from a shipper site to an indexer site"
6
6
  s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-filter-kafka_time_machine
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.0.1
4
+ version: 3.0.2.pre
5
5
  platform: ruby
6
6
  authors:
7
7
  - Chris Foster
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-02-27 00:00:00.000000000 Z
11
+ date: 2025-01-30 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: logstash-core-plugin-api
@@ -73,9 +73,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
73
73
  version: '0'
74
74
  required_rubygems_version: !ruby/object:Gem::Requirement
75
75
  requirements:
76
- - - ">="
76
+ - - ">"
77
77
  - !ruby/object:Gem::Version
78
- version: '0'
78
+ version: 1.3.1
79
79
  requirements: []
80
80
  rubygems_version: 3.0.3.1
81
81
  signing_key: