fluent-plugin-juniper-telemetry_tech-mocha 0.4.0 → 0.4.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 23a6189ba3255c16f3d40b3b7935d0cbeb1e25761fc61f2b8ead06430d25379a
4
- data.tar.gz: 16871c15cfd83777766c7badd596c9257758676420659b89a0033d8aab58ca36
3
+ metadata.gz: 568c3cbe11ba43bc6c06a4353d90b950b02364d6bb5488667337ed6e7947686a
4
+ data.tar.gz: 2a1ae1503e2728fd24eab9ef11f9be307e9ba5d1fa97b1977f896db20789adf4
5
5
  SHA512:
6
- metadata.gz: e0e77bf2c27b5df8a46fab27f240cfc137829eee14f599d3e8e79510838fc49d0b83509f3e98e1248efea51d7e62c2022f58ce2f48032d09773c03d135edfe1a
7
- data.tar.gz: ee1dd76f80feb7d1bd5544ceeb4ef14f7aeaf53315fc3a170fb0d6053851dd91eb36691b2df906140ce91c3150c15f73b4bc35004fdef402be7fbd7ae1f42576
6
+ metadata.gz: e34dea4bd929feddd940c3f1a1c63f89c93fd1189c9f4017a1a72ba2408132b27f068b9ea976ddaad5a4fa8c6e5ef2db39038de6cc301331976f1a0febb0244d
7
+ data.tar.gz: c41e668af4858c6ac5fd013c3722b4bdfb96ebf90f6f9861af42f1264a4022b05366a1877e802ee37e3b6e31589d10df6270ef28c851def9e9707cc21724db72
@@ -16,6 +16,9 @@ require 'port_exp.pb'
16
16
  require 'protobuf'
17
17
  require 'qmon.pb'
18
18
  require 'logical_port.pb'
19
+ require 'sr_stats_per_if_egress.pb'
20
+ require 'sr_stats_per_if_ingress.pb'
21
+ require 'sr_stats_per_sid.pb'
19
22
  require 'telemetry_top.pb'
20
23
 
21
24
 
@@ -99,6 +102,9 @@ module Fluent
99
102
 
100
103
  datas_sensors = jti_msg_json["enterprise"]["juniperNetworks"]
101
104
  $log.debug "Extracted the following sensor data from device '#{device_name}': #{datas_sensors}"
105
+
106
+ ## Uncomment for DEBUG ONLY!!
107
+ #$log.warn "Extracted the following sensor data from device '#{device_name}': #{datas_sensors}"
102
108
  rescue => e
103
109
  $log.warn "Unable to extract sensor data sensor from jti_msg.enterprise.juniperNetworks, Error during processing: #{$!}"
104
110
  $log.debug "Unable to extract sensor data sensor from jti_msg.enterprise.juniperNetworks, Data Dump : " + jti_msg.inspect.to_s
@@ -155,13 +161,6 @@ module Fluent
155
161
  elsif level_1_key == "counter_name"
156
162
  sensor_data.push({ 'counter_name' => level_1_value })
157
163
  else
158
- # By default, InfluxDB assigns the type of a field based on the type of the first value inserted.
159
- # So, in the "value" field, if an Integer is inserted, then the "value" field will only accept Integer
160
- # values hereon after ... so, a String value insertion will result in an error.
161
- # To alleviate this, we will have "value" as the default field for Integers, so as not to break existing code.
162
- # We will add additional "value_string", "value_float", fields to support different value types. This way,
163
- # we can persist all the various telemetry sensor parameters in InfluxDB, not just the Integer values.
164
-
165
164
  # Create local copy of 'sensor_data' variable.
166
165
  local_sensor_data = sensor_data.dup
167
166
  local_sensor_data = process_value(local_sensor_data, level_1_key, level_1_value, '')
@@ -183,6 +182,347 @@ module Fluent
183
182
 
184
183
 
185
184
 
185
+ #########################################################################
186
+ ## SENSOR: /junos/services/segment-routing/interface/egress/usage/ ##
187
+ #########################################################################
188
+ elsif sensor == "jnpr_sr_stats_per_if_egress_ext"
189
+
190
+ resource = "/junos/services/segment-routing/interface/egress/usage/"
191
+ $log.debug "Processing sensor '#{sensor}' with resource '#{resource}'"
192
+
193
+ # At this point in the code, 'data_sensors' has the following value:
194
+ =begin
195
+ {
196
+ "jnpr_sr_stats_per_if_egress_ext": {
197
+ "per_if_records": [
198
+ {
199
+ "if_name": "et-1/1/0.0",
200
+ "counter_name": "oc-3847",
201
+ "egress_stats": {
202
+ "packets": 2878057633,
203
+ "bytes": 1387223773994,
204
+ "packet_rate": 20001,
205
+ "byte_rate": 9640765
206
+ }
207
+ }
208
+ ]
209
+ }
210
+ }
211
+ =end
212
+ # Iterate over each record contained within the 'per_if_records' array ...
213
+ datas_sensors[sensor]['per_if_records'].each do |datas|
214
+
215
+ # Save all extracted sensor data in a list.
216
+ sensor_data = []
217
+
218
+ # Block to catch exceptions during sensor data parsing.
219
+ begin
220
+
221
+ # Add the device name to "sensor_data" for correlation purposes.
222
+ sensor_data.push({ 'device' => device_name })
223
+
224
+ # Each of the child elements under "per_if_records" is going to be either a "leaf" node (eg. Integer, String, Float, etc.)
225
+ # or a "branch" node (eg. Array or Hash), in which case these branch sections need additional level of processing.
226
+ # For the leaf nodes, these values can be written directly to "sensor_data"
227
+
228
+ datas.each do |level_1_key, level_1_value|
229
+ # If the node currently being processed is a "branch node" (ie. it has child nodes)
230
+ if level_1_value.is_a?(Hash) || level_1_value.is_a?(Array)
231
+
232
+ # From the proto file, we know that the level_1 branch nodes are all Hash values, so we can ignore the conditional
233
+ # below testing for an array
234
+ if level_1_value.is_a?(Array)
235
+ # Do nothing, as per reasons cited above.
236
+ # If the branch node is not an Array, then we can simply write the key/value pairs straight to "sensor_data".
237
+ else
238
+ level_1_value.each do |level_2_key, level_2_value|
239
+ ## For debug only ...
240
+ #$log.debug "Value of 'level_2_key': '#{level_2_key}'"
241
+ #$log.debug "Value of 'level_2_value': '#{level_2_value}'"
242
+
243
+ # Create local copy of 'sensor_data' variable.
244
+ local_sensor_data = sensor_data.dup
245
+ local_sensor_data = process_value(local_sensor_data, level_2_key, level_2_value, level_1_key)
246
+
247
+ record = build_record(output_format, local_sensor_data)
248
+ ## For debug only ...
249
+ #$log.debug "Value of 'local_sensor_data': '#{local_sensor_data}'"
250
+ yield gpb_time, record
251
+ end
252
+ end
253
+
254
+ # If the node currently being processed is a "leaf node" (ie. it has NO child nodes)
255
+ else
256
+
257
+ ## For debug only ...
258
+ #$log.debug "Value of 'level_1_key': '#{level_1_key}'"
259
+ #$log.debug "Value of 'level_1_value': '#{level_1_value}'"
260
+
261
+ if level_1_key == "if_name"
262
+ sensor_data.push({ 'interface' => level_1_value })
263
+ else
264
+ # Create local copy of 'sensor_data' variable.
265
+ local_sensor_data = sensor_data.dup
266
+ local_sensor_data = process_value(local_sensor_data, level_1_key, level_1_value, '')
267
+
268
+ record = build_record(output_format, local_sensor_data)
269
+ ## For debug only ...
270
+ #$log.debug "Value of 'local_sensor_data': '#{local_sensor_data}'"
271
+ #$log.debug "Value of 'record': '#{record}'"
272
+ yield gpb_time, record
273
+ end
274
+ end
275
+ end
276
+
277
+ rescue => e
278
+ $log.warn "Unable to parse '" + sensor + "' sensor, Error during processing: #{$!}"
279
+ $log.debug "Unable to parse '" + sensor + "' sensor, Data Dump: " + datas.inspect.to_s
280
+ end
281
+ end
282
+
283
+
284
+
285
+
286
+ ##########################################################################
287
+ ## SENSOR: /junos/services/segment-routing/interface/ingress/usage/ ##
288
+ ##########################################################################
289
+ elsif sensor == "jnpr_sr_stats_per_if_ingress_ext"
290
+
291
+ resource = "/junos/services/segment-routing/interface/ingress/usage/"
292
+ $log.debug "Processing sensor '#{sensor}' with resource '#{resource}'"
293
+
294
+ # At this point in the code, 'data_sensors' has the following value:
295
+ =begin
296
+ {
297
+ "jnpr_sr_stats_per_if_ingress_ext": {
298
+ "per_if_records": [
299
+ {
300
+ "if_name": "xe-1/0/3:0.100",
301
+ "ingress_stats": {
302
+ "packets": 0,
303
+ "bytes": 0,
304
+ "packet_rate": 0,
305
+ "byte_rate": 0
306
+ }
307
+ },
308
+ ...
309
+ {
310
+ "if_name": "et-1/1/0.0",
311
+ "ingress_stats": {
312
+ "packets": 0,
313
+ "bytes": 0,
314
+ "packet_rate": 0,
315
+ "byte_rate": 0
316
+ }
317
+ }
318
+ ]
319
+ }
320
+ }
321
+ =end
322
+ # Iterate over each record contained within the 'per_if_records' array ...
323
+ datas_sensors[sensor]['per_if_records'].each do |datas|
324
+
325
+ # Save all extracted sensor data in a list.
326
+ sensor_data = []
327
+
328
+ # Block to catch exceptions during sensor data parsing.
329
+ begin
330
+
331
+ # Add the device name to "sensor_data" for correlation purposes.
332
+ sensor_data.push({ 'device' => device_name })
333
+
334
+ # Each of the child elements under "per_if_records" is going to be either a "leaf" node (eg. Integer, String, Float, etc.)
335
+ # or a "branch" node (eg. Array or Hash), in which case these branch sections need additional level of processing.
336
+ # For the leaf nodes, these values can be written directly to "sensor_data"
337
+
338
+ datas.each do |level_1_key, level_1_value|
339
+ # If the node currently being processed is a "branch node" (ie. it has child nodes)
340
+ if level_1_value.is_a?(Hash) || level_1_value.is_a?(Array)
341
+
342
+ # From the proto file, we know that the level_1 branch nodes are all Hash values, so we can ignore the conditional
343
+ # below testing for an array
344
+ if level_1_value.is_a?(Array)
345
+ # Do nothing, as per reasons cited above.
346
+ # If the branch node is not an Array, then we can simply write the key/value pairs straight to "sensor_data".
347
+ else
348
+ level_1_value.each do |level_2_key, level_2_value|
349
+ ## For debug only ...
350
+ #$log.debug "Value of 'level_2_key': '#{level_2_key}'"
351
+ #$log.debug "Value of 'level_2_value': '#{level_2_value}'"
352
+
353
+ # Create local copy of 'sensor_data' variable.
354
+ local_sensor_data = sensor_data.dup
355
+ local_sensor_data = process_value(local_sensor_data, level_2_key, level_2_value, level_1_key)
356
+
357
+ record = build_record(output_format, local_sensor_data)
358
+ ## For debug only ...
359
+ #$log.debug "Value of 'local_sensor_data': '#{local_sensor_data}'"
360
+ yield gpb_time, record
361
+ end
362
+ end
363
+
364
+ # If the node currently being processed is a "leaf node" (ie. it has NO child nodes)
365
+ else
366
+
367
+ ## For debug only ...
368
+ #$log.debug "Value of 'level_1_key': '#{level_1_key}'"
369
+ #$log.debug "Value of 'level_1_value': '#{level_1_value}'"
370
+
371
+ if level_1_key == "if_name"
372
+ sensor_data.push({ 'interface' => level_1_value })
373
+ else
374
+ # Create local copy of 'sensor_data' variable.
375
+ local_sensor_data = sensor_data.dup
376
+ local_sensor_data = process_value(local_sensor_data, level_1_key, level_1_value, '')
377
+
378
+ record = build_record(output_format, local_sensor_data)
379
+ ## For debug only ...
380
+ #$log.debug "Value of 'local_sensor_data': '#{local_sensor_data}'"
381
+ #$log.debug "Value of 'record': '#{record}'"
382
+ yield gpb_time, record
383
+ end
384
+ end
385
+ end
386
+
387
+ rescue => e
388
+ $log.warn "Unable to parse '" + sensor + "' sensor, Error during processing: #{$!}"
389
+ $log.debug "Unable to parse '" + sensor + "' sensor, Data Dump: " + datas.inspect.to_s
390
+ end
391
+ end
392
+
393
+
394
+
395
+
396
+ #############################################################
397
+ ## SENSOR: /junos/services/segment-routing/sid/usage/ ##
398
+ #############################################################
399
+ elsif sensor == "jnpr_sr_stats_per_sid_ext"
400
+
401
+ resource = "/junos/services/segment-routing/sid/usage/"
402
+ $log.debug "Processing sensor '#{sensor}' with resource '#{resource}'"
403
+
404
+ # At this point in the code, 'data_sensors' has the following value:
405
+ =begin
406
+ {
407
+ "jnpr_sr_stats_per_sid_ext": {
408
+ "sid_stats": [
409
+ {
410
+ "sid_identifier": "25",
411
+ "instance_identifier": 0,
412
+ "counter_name": "oc-4",
413
+ "ingress_stats": {
414
+ "packets": 0,
415
+ "bytes": 0,
416
+ "packet_rate": 0,
417
+ "byte_rate": 0
418
+ }
419
+ },
420
+ {
421
+ "sid_identifier": "16896",
422
+ "instance_identifier": 0,
423
+ "counter_name": "oc-5",
424
+ "ingress_stats": {
425
+ "packets": 0,
426
+ "bytes": 0,
427
+ "packet_rate": 0,
428
+ "byte_rate": 0
429
+ }
430
+ },
431
+ ...
432
+ {
433
+ "sid_identifier": "18957",
434
+ "instance_identifier": 0,
435
+ "counter_name": "oc-181",
436
+ "ingress_stats": {
437
+ "packets": 0,
438
+ "bytes": 0,
439
+ "packet_rate": 0,
440
+ "byte_rate": 0
441
+ }
442
+ },
443
+ {
444
+ "sid_identifier": "19213",
445
+ "instance_identifier": 0,
446
+ "counter_name": "oc-18"
447
+ }
448
+ ]
449
+ }
450
+ }
451
+ =end
452
+ # Iterate over each record contained within the 'sid_stats' array ...
453
+ datas_sensors[sensor]['sid_stats'].each do |datas|
454
+
455
+ # Save all extracted sensor data in a list.
456
+ sensor_data = []
457
+
458
+ # Block to catch exceptions during sensor data parsing.
459
+ begin
460
+
461
+ # Add the device name to "sensor_data" for correlation purposes.
462
+ sensor_data.push({ 'device' => device_name })
463
+
464
+ # Each of the child elements under "per_if_records" is going to be either a "leaf" node (eg. Integer, String, Float, etc.)
465
+ # or a "branch" node (eg. Array or Hash), in which case these branch sections need additional level of processing.
466
+ # For the leaf nodes, these values can be written directly to "sensor_data"
467
+
468
+ datas.each do |level_1_key, level_1_value|
469
+ # If the node currently being processed is a "branch node" (ie. it has child nodes)
470
+ if level_1_value.is_a?(Hash) || level_1_value.is_a?(Array)
471
+
472
+ # From the proto file, we know that the level_1 branch nodes are all Hash values, so we can ignore the conditional
473
+ # below testing for an array
474
+ if level_1_value.is_a?(Array)
475
+ # Do nothing, as per reasons cited above.
476
+ # If the branch node is not an Array, then we can simply write the key/value pairs straight to "sensor_data".
477
+ else
478
+ level_1_value.each do |level_2_key, level_2_value|
479
+ ## For debug only ...
480
+ #$log.debug "Value of 'level_2_key': '#{level_2_key}'"
481
+ #$log.debug "Value of 'level_2_value': '#{level_2_value}'"
482
+
483
+ # Create local copy of 'sensor_data' variable.
484
+ local_sensor_data = sensor_data.dup
485
+ local_sensor_data = process_value(local_sensor_data, level_2_key, level_2_value, level_1_key)
486
+
487
+ record = build_record(output_format, local_sensor_data)
488
+ ## For debug only ...
489
+ #$log.debug "Value of 'local_sensor_data': '#{local_sensor_data}'"
490
+ yield gpb_time, record
491
+ end
492
+ end
493
+
494
+ # If the node currently being processed is a "leaf node" (ie. it has NO child nodes)
495
+ else
496
+
497
+ ## For debug only ...
498
+ #$log.debug "Value of 'level_1_key': '#{level_1_key}'"
499
+ #$log.debug "Value of 'level_1_value': '#{level_1_value}'"
500
+
501
+ if level_1_key == "sid_identifier"
502
+ sensor_data.push({ 'sid_identifier' => level_1_value })
503
+ else
504
+ # Create local copy of 'sensor_data' variable.
505
+ local_sensor_data = sensor_data.dup
506
+ local_sensor_data = process_value(local_sensor_data, level_1_key, level_1_value, '')
507
+
508
+ record = build_record(output_format, local_sensor_data)
509
+ ## For debug only ...
510
+ #$log.debug "Value of 'local_sensor_data': '#{local_sensor_data}'"
511
+ #$log.debug "Value of 'record': '#{record}'"
512
+ yield gpb_time, record
513
+ end
514
+ end
515
+ end
516
+
517
+ rescue => e
518
+ $log.warn "Unable to parse '" + sensor + "' sensor, Error during processing: #{$!}"
519
+ $log.debug "Unable to parse '" + sensor + "' sensor, Data Dump: " + datas.inspect.to_s
520
+ end
521
+ end
522
+
523
+
524
+
525
+
186
526
  ####################################################
187
527
  ## SENSOR: /junos/system/linecard/cpu/memory/ ##
188
528
  ####################################################
@@ -308,13 +648,6 @@ module Fluent
308
648
  if level_1_key == "name"
309
649
  sensor_data.push({ 'cpu_mem_partition_name' => level_1_value })
310
650
  else
311
- # By default, InfluxDB assigns the type of a field based on the type of the first value inserted.
312
- # So, in the "value" field, if an Integer is inserted, then the "value" field will only accept Integer
313
- # values hereon after ... so, a String value insertion will result in an error.
314
- # To alleviate this, we will have "value" as the default field for Integers, so as not to break existing code.
315
- # We will add additional "value_string", "value_float", fields to support different value types. This way,
316
- # we can persist all the various telemetry sensor parameters in InfluxDB, not just the Integer values.
317
-
318
651
  # Create local copy of 'sensor_data' variable.
319
652
  local_sensor_data = sensor_data.dup
320
653
  local_sensor_data = process_value(local_sensor_data, level_1_key, level_1_value, '')
@@ -696,13 +1029,6 @@ module Fluent
696
1029
  elsif level_1_key == "init_time"
697
1030
  # do nothing.
698
1031
  else
699
- # By default, InfluxDB assigns the type of a field based on the type of the first value inserted.
700
- # So, in the "value" field, if an Integer is inserted, then the "value" field will only accept Integer
701
- # values hereon after ... so, a String value insertion will result in an error.
702
- # To alleviate this, we will have "value" as the default field for Integers, so as not to break existing code.
703
- # We will add additional "value_string", "value_float", fields to support different value types. This way,
704
- # we can persist all the various telemetry sensor parameters in InfluxDB, not just the Integer values.
705
-
706
1032
  # Create local copy of 'sensor_data' variable.
707
1033
  local_sensor_data = sensor_data.dup
708
1034
  local_sensor_data = process_value(local_sensor_data, level_1_key, level_1_value, '')
@@ -871,13 +1197,6 @@ module Fluent
871
1197
  elsif level_1_key == "init_time"
872
1198
  # do nothing.
873
1199
  else
874
- # By default, InfluxDB assigns the type of a field based on the type of the first value inserted.
875
- # So, in the "value" field, if an Integer is inserted, then the "value" field will only accept Integer
876
- # values hereon after ... so, a String value insertion will result in an error.
877
- # To alleviate this, we will have "value" as the default field for Integers, so as not to break existing code.
878
- # We will add additional "value_string", "value_float", fields to support different value types. This way,
879
- # we can persist all the various telemetry sensor parameters in InfluxDB, not just the Integer values.
880
-
881
1200
  # Create local copy of 'sensor_data' variable.
882
1201
  local_sensor_data = sensor_data.dup
883
1202
  local_sensor_data = process_value(local_sensor_data, level_1_key, level_1_value, '')
@@ -1068,13 +1387,6 @@ module Fluent
1068
1387
  elsif level_1_key == "init_time"
1069
1388
  # do nothing.
1070
1389
  else
1071
- # By default, InfluxDB assigns the type of a field based on the type of the first value inserted.
1072
- # So, in the "value" field, if an Integer is inserted, then the "value" field will only accept Integer
1073
- # values hereon after ... so, a String value insertion will result in an error.
1074
- # To alleviate this, we will have "value" as the default field for Integers, so as not to break existing code.
1075
- # We will add additional "value_string", "value_float", fields to support different value types. This way,
1076
- # we can persist all the various telemetry sensor parameters in InfluxDB, not just the Integer values.
1077
-
1078
1390
  # Create local copy of 'sensor_data' variable.
1079
1391
  local_sensor_data = sensor_data.dup
1080
1392
  local_sensor_data = process_value(local_sensor_data, level_1_key, level_1_value, '')
@@ -1105,6 +1417,13 @@ module Fluent
1105
1417
 
1106
1418
  def process_value(local_sensor_data, key, value, parent_key)
1107
1419
 
1420
+ # By default, InfluxDB assigns the type of a field based on the type of the first value inserted.
1421
+ # So, in the "value" field, if an Integer is inserted, then the "value" field will only accept Integer
1422
+ # values hereon after ... so, a String value insertion will result in an error.
1423
+ # To alleviate this, we will have "value" as the default field for Integers, so as not to break existing code.
1424
+ # We will add additional "value_string", "value_float", fields to support different value types. This way,
1425
+ # we can persist all the various telemetry sensor parameters in InfluxDB, not just the Integer values.
1426
+
1108
1427
  if value.is_a?(Integer)
1109
1428
  if parent_key == ''
1110
1429
  local_sensor_data.push({ 'type' => key })
@@ -0,0 +1,52 @@
1
+ # encoding: utf-8
2
+
3
+ ##
4
+ # This file is auto-generated. DO NOT EDIT!
5
+ #
6
+ require 'protobuf'
7
+
8
+
9
+ ##
10
+ # Imports
11
+ #
12
+ require 'telemetry_top.pb'
13
+
14
+
15
+ ##
16
+ # Message Classes
17
+ #
18
+ class SrStatsPerIfEgress < ::Protobuf::Message; end
19
+ class SegmentRoutingInterfaceRecord < ::Protobuf::Message; end
20
+ class SegmentRoutingIfStats < ::Protobuf::Message; end
21
+
22
+
23
+ ##
24
+ # Message Fields
25
+ #
26
+ class SrStatsPerIfEgress
27
+ repeated ::SegmentRoutingInterfaceRecord, :per_if_records, 1
28
+ end
29
+
30
+ class SegmentRoutingInterfaceRecord
31
+ required :string, :if_name, 1, :".telemetry_options" => { :is_key => true }
32
+ optional :string, :parent_ae_name, 2, :".telemetry_options" => { :is_key => true }
33
+ optional :string, :counter_name, 3, :".telemetry_options" => { :is_key => true }
34
+ optional ::SegmentRoutingIfStats, :ingress_stats, 4
35
+ optional ::SegmentRoutingIfStats, :egress_stats, 5
36
+ end
37
+
38
+ class SegmentRoutingIfStats
39
+ optional :uint64, :packets, 1, :".telemetry_options" => { :is_counter => true }
40
+ optional :uint64, :bytes, 2, :".telemetry_options" => { :is_counter => true }
41
+ optional :uint64, :packet_rate, 3, :".telemetry_options" => { :is_gauge => true }
42
+ optional :uint64, :byte_rate, 4, :".telemetry_options" => { :is_gauge => true }
43
+ end
44
+
45
+
46
+ ##
47
+ # Extended Message Fields
48
+ #
49
+ class ::JuniperNetworksSensors < ::Protobuf::Message
50
+ optional ::SrStatsPerIfEgress, :".jnpr_sr_stats_per_if_egress_ext", 17, :extension => true
51
+ end
52
+
@@ -0,0 +1,52 @@
1
+ # encoding: utf-8
2
+
3
+ ##
4
+ # This file is auto-generated. DO NOT EDIT!
5
+ #
6
+ require 'protobuf'
7
+
8
+
9
+ ##
10
+ # Imports
11
+ #
12
+ require 'telemetry_top.pb'
13
+
14
+
15
+ ##
16
+ # Message Classes
17
+ #
18
+ class SrStatsPerIfIngress < ::Protobuf::Message; end
19
+ class SegmentRoutingIngIfRecord < ::Protobuf::Message; end
20
+ class SegmentRoutingIngIfStats < ::Protobuf::Message; end
21
+
22
+
23
+ ##
24
+ # Message Fields
25
+ #
26
+ class SrStatsPerIfIngress
27
+ repeated ::SegmentRoutingIngIfRecord, :per_if_records, 1
28
+ end
29
+
30
+ class SegmentRoutingIngIfRecord
31
+ required :string, :if_name, 1, :".telemetry_options" => { :is_key => true }
32
+ optional :string, :parent_ae_name, 2, :".telemetry_options" => { :is_key => true }
33
+ optional :string, :counter_name, 3, :".telemetry_options" => { :is_key => true }
34
+ optional ::SegmentRoutingIngIfStats, :ingress_stats, 4
35
+ optional ::SegmentRoutingIngIfStats, :egress_stats, 5
36
+ end
37
+
38
+ class SegmentRoutingIngIfStats
39
+ optional :uint64, :packets, 1, :".telemetry_options" => { :is_counter => true }
40
+ optional :uint64, :bytes, 2, :".telemetry_options" => { :is_counter => true }
41
+ optional :uint64, :packet_rate, 3, :".telemetry_options" => { :is_gauge => true }
42
+ optional :uint64, :byte_rate, 4, :".telemetry_options" => { :is_gauge => true }
43
+ end
44
+
45
+
46
+ ##
47
+ # Extended Message Fields
48
+ #
49
+ class ::JuniperNetworksSensors < ::Protobuf::Message
50
+ optional ::SrStatsPerIfIngress, :".jnpr_sr_stats_per_if_ingress_ext", 19, :extension => true
51
+ end
52
+
@@ -0,0 +1,52 @@
1
+ # encoding: utf-8
2
+
3
+ ##
4
+ # This file is auto-generated. DO NOT EDIT!
5
+ #
6
+ require 'protobuf'
7
+
8
+
9
+ ##
10
+ # Imports
11
+ #
12
+ require 'telemetry_top.pb'
13
+
14
+
15
+ ##
16
+ # Message Classes
17
+ #
18
+ class SrStatsPerSid < ::Protobuf::Message; end
19
+ class SegmentRoutingRecord < ::Protobuf::Message; end
20
+ class SegmentRoutingStats < ::Protobuf::Message; end
21
+
22
+
23
+ ##
24
+ # Message Fields
25
+ #
26
+ class SrStatsPerSid
27
+ repeated ::SegmentRoutingRecord, :sid_stats, 1
28
+ end
29
+
30
+ class SegmentRoutingRecord
31
+ required :string, :sid_identifier, 1, :".telemetry_options" => { :is_key => true }
32
+ optional :uint32, :instance_identifier, 2, :".telemetry_options" => { :is_key => true }
33
+ required :string, :counter_name, 3, :".telemetry_options" => { :is_key => true }
34
+ optional ::SegmentRoutingStats, :ingress_stats, 4
35
+ optional ::SegmentRoutingStats, :egress_stats, 5
36
+ end
37
+
38
+ class SegmentRoutingStats
39
+ optional :uint64, :packets, 1, :".telemetry_options" => { :is_counter => true }
40
+ optional :uint64, :bytes, 2, :".telemetry_options" => { :is_counter => true }
41
+ optional :uint64, :packet_rate, 3, :".telemetry_options" => { :is_gauge => true }
42
+ optional :uint64, :byte_rate, 4, :".telemetry_options" => { :is_gauge => true }
43
+ end
44
+
45
+
46
+ ##
47
+ # Extended Message Fields
48
+ #
49
+ class ::JuniperNetworksSensors < ::Protobuf::Message
50
+ optional ::SrStatsPerSid, :".jnpr_sr_stats_per_sid_ext", 16, :extension => true
51
+ end
52
+
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-juniper-telemetry_tech-mocha
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.4.0
4
+ version: 0.4.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Tech Mocha
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-11-23 00:00:00.000000000 Z
11
+ date: 2017-12-20 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: fluentd
@@ -80,6 +80,9 @@ files:
80
80
  - lib/port.pb.rb
81
81
  - lib/port_exp.pb.rb
82
82
  - lib/qmon.pb.rb
83
+ - lib/sr_stats_per_if_egress.pb.rb
84
+ - lib/sr_stats_per_if_ingress.pb.rb
85
+ - lib/sr_stats_per_sid.pb.rb
83
86
  - lib/telemetry_top.pb.rb
84
87
  homepage: https://github.com/tech-mocha/fluentd-plugin-juniper-telemetry
85
88
  licenses: