riemann-tools 1.1.1 → 1.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (99) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/ci.yml +2 -0
  3. data/.gitignore +2 -0
  4. data/.rubocop.yml +8 -0
  5. data/.ruby-version +1 -0
  6. data/CHANGELOG.md +25 -2
  7. data/Rakefile +10 -3
  8. data/bin/riemann-apache-status +1 -106
  9. data/bin/riemann-bench +2 -70
  10. data/bin/riemann-cloudant +1 -56
  11. data/bin/riemann-consul +1 -106
  12. data/bin/riemann-dir-files-count +1 -55
  13. data/bin/riemann-dir-space +1 -55
  14. data/bin/riemann-diskstats +1 -92
  15. data/bin/riemann-fd +2 -81
  16. data/bin/riemann-freeswitch +2 -119
  17. data/bin/riemann-haproxy +1 -58
  18. data/bin/riemann-health +0 -2
  19. data/bin/riemann-kvminstance +2 -22
  20. data/bin/riemann-memcached +1 -37
  21. data/bin/riemann-net +0 -2
  22. data/bin/riemann-nginx-status +1 -85
  23. data/bin/riemann-ntp +0 -2
  24. data/bin/riemann-portcheck +1 -44
  25. data/bin/riemann-proc +1 -108
  26. data/bin/riemann-varnish +1 -54
  27. data/bin/riemann-wrapper +75 -0
  28. data/bin/riemann-zookeeper +1 -39
  29. data/lib/riemann/tools/apache_status.rb +107 -0
  30. data/lib/riemann/tools/bench.rb +72 -0
  31. data/lib/riemann/tools/cloudant.rb +57 -0
  32. data/lib/riemann/tools/consul_health.rb +107 -0
  33. data/lib/riemann/tools/dir_files_count.rb +56 -0
  34. data/lib/riemann/tools/dir_space.rb +56 -0
  35. data/lib/riemann/tools/diskstats.rb +94 -0
  36. data/lib/riemann/tools/fd.rb +81 -0
  37. data/lib/riemann/tools/freeswitch.rb +119 -0
  38. data/lib/riemann/tools/haproxy.rb +59 -0
  39. data/lib/riemann/tools/health.rb +150 -19
  40. data/lib/riemann/tools/kvm.rb +23 -0
  41. data/lib/riemann/tools/memcached.rb +38 -0
  42. data/lib/riemann/tools/net.rb +2 -1
  43. data/lib/riemann/tools/nginx_status.rb +86 -0
  44. data/lib/riemann/tools/ntp.rb +1 -0
  45. data/lib/riemann/tools/portcheck.rb +45 -0
  46. data/lib/riemann/tools/proc.rb +109 -0
  47. data/lib/riemann/tools/riemann_client_wrapper.rb +43 -0
  48. data/lib/riemann/tools/uptime_parser.tab.rb +323 -0
  49. data/lib/riemann/tools/varnish.rb +55 -0
  50. data/lib/riemann/tools/version.rb +1 -1
  51. data/lib/riemann/tools/zookeeper.rb +40 -0
  52. data/lib/riemann/tools.rb +2 -20
  53. data/riemann-tools.gemspec +4 -1
  54. data/tools/riemann-aws/Rakefile +6 -9
  55. data/tools/riemann-aws/bin/riemann-aws-billing +2 -87
  56. data/tools/riemann-aws/bin/riemann-aws-rds-status +2 -62
  57. data/tools/riemann-aws/bin/riemann-aws-sqs-status +2 -44
  58. data/tools/riemann-aws/bin/riemann-aws-status +2 -77
  59. data/tools/riemann-aws/bin/riemann-elb-metrics +2 -162
  60. data/tools/riemann-aws/bin/riemann-s3-list +2 -81
  61. data/tools/riemann-aws/bin/riemann-s3-status +2 -96
  62. data/tools/riemann-aws/lib/riemann/tools/aws/billing.rb +87 -0
  63. data/tools/riemann-aws/lib/riemann/tools/aws/elb_metrics.rb +163 -0
  64. data/tools/riemann-aws/lib/riemann/tools/aws/rds_status.rb +63 -0
  65. data/tools/riemann-aws/lib/riemann/tools/aws/s3_list.rb +82 -0
  66. data/tools/riemann-aws/lib/riemann/tools/aws/s3_status.rb +97 -0
  67. data/tools/riemann-aws/lib/riemann/tools/aws/sqs_status.rb +45 -0
  68. data/tools/riemann-aws/lib/riemann/tools/aws/status.rb +74 -0
  69. data/tools/riemann-chronos/Rakefile +6 -9
  70. data/tools/riemann-chronos/bin/riemann-chronos +1 -154
  71. data/tools/riemann-chronos/lib/riemann/tools/chronos.rb +157 -0
  72. data/tools/riemann-docker/Rakefile +5 -8
  73. data/tools/riemann-docker/bin/riemann-docker +2 -200
  74. data/tools/riemann-docker/lib/riemann/tools/docker.rb +200 -0
  75. data/tools/riemann-elasticsearch/Rakefile +6 -9
  76. data/tools/riemann-elasticsearch/bin/riemann-elasticsearch +1 -167
  77. data/tools/riemann-elasticsearch/lib/riemann/tools/elasticsearch.rb +170 -0
  78. data/tools/riemann-marathon/Rakefile +6 -9
  79. data/tools/riemann-marathon/bin/riemann-marathon +1 -156
  80. data/tools/riemann-marathon/lib/riemann/tools/marathon.rb +159 -0
  81. data/tools/riemann-mesos/Rakefile +6 -9
  82. data/tools/riemann-mesos/bin/riemann-mesos +1 -139
  83. data/tools/riemann-mesos/lib/riemann/tools/mesos.rb +142 -0
  84. data/tools/riemann-munin/Rakefile +5 -8
  85. data/tools/riemann-munin/bin/riemann-munin +1 -36
  86. data/tools/riemann-munin/lib/riemann/tools/munin.rb +37 -0
  87. data/tools/riemann-rabbitmq/Rakefile +6 -9
  88. data/tools/riemann-rabbitmq/bin/riemann-rabbitmq +1 -266
  89. data/tools/riemann-rabbitmq/lib/riemann/tools/rabbitmq.rb +269 -0
  90. data/tools/riemann-riak/Rakefile +5 -8
  91. data/tools/riemann-riak/bin/riemann-riak +1 -316
  92. data/tools/riemann-riak/bin/riemann-riak-keys +0 -1
  93. data/tools/riemann-riak/bin/riemann-riak-ring +0 -1
  94. data/tools/riemann-riak/lib/riemann/tools/riak.rb +317 -0
  95. metadata +57 -10
  96. data/.travis.yml +0 -31
  97. data/tools/riemann-riak/riak_status/key_count.erl +0 -13
  98. data/tools/riemann-riak/riak_status/riak_status.rb +0 -152
  99. data/tools/riemann-riak/riak_status/ringready.erl +0 -9
@@ -0,0 +1,163 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'riemann/tools'
4
+
5
+ module Riemann
6
+ module Tools
7
+ module Aws
8
+ class ElbMetrics
9
+ include Riemann::Tools
10
+ require 'fog/aws'
11
+ require 'time'
12
+
13
+ opt :fog_credentials_file, 'Fog credentials file', type: String
14
+ opt :fog_credential, 'Fog credentials to use', type: String
15
+ opt :access_key, 'AWS Access Key', type: String
16
+ opt :secret_key, 'AWS Secret Key', type: String
17
+ opt :region, 'AWS Region', type: String, default: 'eu-west-1'
18
+ opt :azs, 'List of AZs to aggregate against', type: :strings, default: ['all_az']
19
+ opt :elbs, 'List of ELBs to pull metrics from', type: :strings, required: true
20
+
21
+ def standard_metrics
22
+ # ELB metric types, from:
23
+ # http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html#elb-metricscollected
24
+ {
25
+ 'Latency' => {
26
+ 'Unit' => 'Seconds',
27
+ 'Statistics' => %w[Maximum Minimum Average],
28
+ },
29
+ 'RequestCount' => {
30
+ 'Unit' => 'Count',
31
+ 'Statistics' => ['Sum'],
32
+ },
33
+ 'HealthyHostCount' => {
34
+ 'Units' => 'Count',
35
+ 'Statistics' => %w[Minimum Maximum Average],
36
+ },
37
+ 'UnHealthyHostCount' => {
38
+ 'Units' => 'Count',
39
+ 'Statistics' => %w[Minimum Maximum Average],
40
+ },
41
+ 'HTTPCode_ELB_4XX' => {
42
+ 'Units' => 'Count',
43
+ 'Statistics' => ['Sum'],
44
+ },
45
+ 'HTTPCode_ELB_5XX' => {
46
+ 'Units' => 'Count',
47
+ 'Statistics' => ['Sum'],
48
+ },
49
+ 'HTTPCode_Backend_2XX' => {
50
+ 'Units' => 'Count',
51
+ 'Statistics' => ['Sum'],
52
+ },
53
+ 'HTTPCode_Backend_3XX' => {
54
+ 'Units' => 'Count',
55
+ 'Statistics' => ['Sum'],
56
+ },
57
+ 'HTTPCode_Backend_4XX' => {
58
+ 'Units' => 'Count',
59
+ 'Statistics' => ['Sum'],
60
+ },
61
+ 'HTTPCode_Backend_5XX' => {
62
+ 'Units' => 'Count',
63
+ 'Statistics' => ['Sum'],
64
+ },
65
+ }
66
+ end
67
+
68
+ def base_metrics
69
+ # get last 60 seconds
70
+ start_time = (Time.now.utc - 60).iso8601
71
+ end_time = Time.now.utc.iso8601
72
+
73
+ # The base query that all metrics would get
74
+ {
75
+ 'Namespace' => 'AWS/ELB',
76
+ 'StartTime' => start_time,
77
+ 'EndTime' => end_time,
78
+ 'Period' => 60,
79
+ }
80
+ end
81
+
82
+ def tick
83
+ if options[:fog_credentials_file]
84
+ Fog.credentials_path = options[:fog_credentials_file]
85
+ Fog.credential = options[:fog_credential].to_sym
86
+ connection = Fog::AWS::CloudWatch.new
87
+ else
88
+ connection = if options[:access_key] && options[:secret_key]
89
+ Fog::AWS::CloudWatch.new({
90
+ aws_access_key_id: options[:access_key],
91
+ aws_secret_access_key: options[:secret_key],
92
+ region: options[:region],
93
+ })
94
+ else
95
+ Fog::AWS::CloudWatch.new({
96
+ use_iam_profile: true,
97
+ region: options[:region],
98
+ })
99
+ end
100
+ end
101
+
102
+ options[:elbs].each do |lb|
103
+ metric_options = standard_metrics
104
+ metric_base_options = base_metrics
105
+
106
+ options[:azs].each do |az|
107
+ metric_options.keys.sort.each do |metric_type|
108
+ merged_options = metric_base_options.merge(metric_options[metric_type])
109
+ merged_options['MetricName'] = metric_type
110
+ merged_options['Dimensions'] = if az == 'all_az'
111
+ [{ 'Name' => 'LoadBalancerName', 'Value' => lb }]
112
+ else
113
+ [
114
+ { 'Name' => 'LoadBalancerName', 'Value' => lb },
115
+ { 'Name' => 'AvailabilityZone', 'Value' => az },
116
+ ]
117
+ end
118
+
119
+ result = connection.get_metric_statistics(merged_options)
120
+
121
+ # "If no response codes in the category 2XX-5XX range are sent to clients within
122
+ # the given time period, values for these metrics will not be recorded in CloudWatch"
123
+ # next if result.body["GetMetricStatisticsResult"]["Datapoints"].empty? && metric_type =~ /[2345]XX/
124
+ #
125
+ if result.body['GetMetricStatisticsResult']['Datapoints'].empty?
126
+ standard_metrics[metric_type]['Statistics'].each do |stat_type|
127
+ event = event(lb, az, metric_type, stat_type, 0.0)
128
+ report(event)
129
+ end
130
+ next
131
+ end
132
+
133
+ # We should only ever have a single data point
134
+ result.body['GetMetricStatisticsResult']['Datapoints'][0].keys.sort.each do |stat_type|
135
+ next if stat_type == 'Unit'
136
+ next if stat_type == 'Timestamp'
137
+
138
+ unit = result.body['GetMetricStatisticsResult']['Datapoints'][0]['Unit']
139
+ metric = result.body['GetMetricStatisticsResult']['Datapoints'][0][stat_type]
140
+ event = event(lb, az, metric_type, stat_type, metric, unit)
141
+ report(event)
142
+ end
143
+ end
144
+ end
145
+ end
146
+ end
147
+
148
+ private
149
+
150
+ def event(lb, az, metric_type, stat_type, metric, unit = nil)
151
+ {
152
+ host: lb,
153
+ service: "elb.#{az}.#{metric_type}.#{stat_type}",
154
+ ttl: 60,
155
+ description: "#{lb} #{metric_type} #{stat_type} (#{unit})",
156
+ tags: ['elb_metrics'],
157
+ metric: metric,
158
+ }
159
+ end
160
+ end
161
+ end
162
+ end
163
+ end
@@ -0,0 +1,63 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'riemann/tools'
4
+
5
+ module Riemann
6
+ module Tools
7
+ module Aws
8
+ class RdsStatus
9
+ include Riemann::Tools
10
+ require 'fog/aws'
11
+ require 'date'
12
+ require 'time'
13
+ require 'json'
14
+
15
+ opt :access_key, 'AWS access key', type: String
16
+ opt :secret_key, 'Secret access key', type: String
17
+ opt :region, 'AWS region', type: String, default: 'eu-west-1'
18
+ opt :dbinstance_identifier, 'DBInstanceIdentifier', type: String
19
+ def initialize
20
+ abort 'FATAL: specify a DB instance name, see --help for usage' unless opts[:dbinstance_identifier]
21
+ creds = if opts[:access_key] && opts[:secret_key]
22
+ {
23
+ aws_access_key_id: opts[:access_key],
24
+ aws_secret_access_key: opts[:secret_key],
25
+ }
26
+ else
27
+ { use_iam_profile: true }
28
+ end
29
+ creds['region'] = opts[:region]
30
+ @cloudwatch = Fog::AWS::CloudWatch.new(creds)
31
+ end
32
+
33
+ def tick
34
+ time = Time.new
35
+ %w[DatabaseConnections FreeableMemory FreeStorageSpace NetworkReceiveThroughput
36
+ NetworkTransmitThroughput ReadThroughput CPUUtilization].each do |metric|
37
+ result = @cloudwatch.get_metric_statistics(
38
+ 'Namespace' => 'AWS/RDS',
39
+ 'MetricName' => metric.to_s,
40
+ 'Statistics' => 'Average',
41
+ 'Dimensions' => [{ 'Name' => 'DBInstanceIdentifier', 'Value' => opts[:dbinstance_identifier].to_s }],
42
+ 'StartTime' => (time - 120).to_time.iso8601,
43
+ 'EndTime' => time.to_time.iso8601, 'Period' => 60,
44
+ )
45
+ metrics_result = result.data[:body]['GetMetricStatisticsResult']
46
+ next unless metrics_result['Datapoints'].length.positive?
47
+
48
+ datapoint = metrics_result['Datapoints'][0]
49
+ ev = {
50
+ metric: datapoint['Average'],
51
+ service: "#{opts[:dbinstance_identifier]}.#{metric} (#{datapoint['Unit']})",
52
+ description: JSON.dump(metrics_result),
53
+ state: 'ok',
54
+ ttl: 300,
55
+ }
56
+
57
+ report ev
58
+ end
59
+ end
60
+ end
61
+ end
62
+ end
63
+ end
@@ -0,0 +1,82 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'riemann/tools'
4
+
5
+ module Riemann
6
+ module Tools
7
+ module Aws
8
+ class S3List
9
+ include Riemann::Tools
10
+ require 'fog/aws'
11
+ require 'time'
12
+
13
+ opt :fog_credentials_file, 'Fog credentials file', type: String
14
+ opt :fog_credential, 'Fog credentials to use', type: String
15
+ opt :access_key, 'AWS Access Key', type: String
16
+ opt :secret_key, 'AWS Secret Key', type: String
17
+ opt :region, 'AWS Region', type: String, default: 'eu-west-1'
18
+ opt :buckets, 'Buckets to pull metrics from, multi=true, can have a prefix like mybucket/prefix', type: String,
19
+ multi: true, required: true
20
+ opt :max_objects, 'Max number of objects to list before stopping to save bandwidth', default: -1
21
+
22
+ def tick
23
+ if options[:fog_credentials_file]
24
+ Fog.credentials_path = options[:fog_credentials_file]
25
+ Fog.credential = options[:fog_credential].to_sym
26
+ connection = Fog::Storage.new
27
+ else
28
+ connection = if options[:access_key] && options[:secret_key]
29
+ Fog::Storage.new({
30
+ provider: 'AWS',
31
+ aws_access_key_id: options[:access_key],
32
+ aws_secret_access_key: options[:secret_key],
33
+ region: options[:region],
34
+ })
35
+ else
36
+ Fog::Storage.new({
37
+ provider: 'AWS',
38
+ use_iam_profile: true,
39
+ region: options[:region],
40
+ })
41
+ end
42
+ end
43
+
44
+ options[:buckets].each do |url|
45
+ split = url.split('/')
46
+ bucket = split[0]
47
+ prefix = ''
48
+ prefix = url[(split[0].length + 1)..] if split[1]
49
+ count = 0
50
+ connection.directories.get(bucket, prefix: prefix).files.map do |_file|
51
+ count += 1
52
+ break if options[:max_objects].positive? && count > options[:max_objects]
53
+ end
54
+ event = if options[:max_objects].positive? && count > options[:max_objects]
55
+ event(
56
+ url, 'objectCount', count, "count was bigger than threshold #{options[:max_objects]}",
57
+ 'warning',
58
+ )
59
+ else
60
+ event(url, 'objectCount', count, "All objects counted, threshold=#{options[:max_objects]}", 'ok')
61
+ end
62
+ report(event)
63
+ end
64
+ end
65
+
66
+ private
67
+
68
+ def event(bucket, label, metric, description, severity)
69
+ {
70
+ host: "bucket_#{bucket}",
71
+ service: "s3.#{label}",
72
+ ttl: 300,
73
+ description: "#{bucket} #{description}",
74
+ tags: ['s3_metrics'],
75
+ metric: metric,
76
+ state: severity,
77
+ }
78
+ end
79
+ end
80
+ end
81
+ end
82
+ end
@@ -0,0 +1,97 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'riemann/tools'
4
+
5
+ module Riemann
6
+ module Tools
7
+ module Aws
8
+ class S3Status
9
+ include Riemann::Tools
10
+ require 'fog/aws'
11
+ require 'time'
12
+
13
+ opt :fog_credentials_file, 'Fog credentials file', type: String
14
+ opt :fog_credential, 'Fog credentials to use', type: String
15
+ opt :access_key, 'AWS Access Key', type: String
16
+ opt :secret_key, 'AWS Secret Key', type: String
17
+ opt :region, 'AWS Region', type: String, default: 'eu-west-1'
18
+ opt :buckets, 'Buckets to pull metrics from, multi=true', type: String, multi: true, required: true
19
+ opt :statistic, 'Statistic to retrieve, multi=true, e.g. --statistic=Average --statistic=Maximum', type: String,
20
+ multi: true, required: true
21
+
22
+ def base_metrics
23
+ # get last 60 seconds
24
+ start_time = (Time.now.utc - 3600 * 24 * 1).iso8601
25
+ end_time = Time.now.utc.iso8601
26
+
27
+ # The base query that all metrics would get
28
+ {
29
+ 'Namespace' => 'AWS/S3',
30
+ 'StartTime' => start_time,
31
+ 'EndTime' => end_time,
32
+ 'Period' => 3600,
33
+ 'MetricName' => 'NumberOfObjects',
34
+ }
35
+ end
36
+
37
+ def tick
38
+ if options[:fog_credentials_file]
39
+ Fog.credentials_path = options[:fog_credentials_file]
40
+ Fog.credential = options[:fog_credential].to_sym
41
+ connection = Fog::AWS::CloudWatch.new
42
+ else
43
+ connection = if options[:access_key] && options[:secret_key]
44
+ Fog::AWS::CloudWatch.new({
45
+ aws_access_key_id: options[:access_key],
46
+ aws_secret_access_key: options[:secret_key],
47
+ region: options[:region],
48
+ })
49
+ else
50
+ Fog::AWS::CloudWatch.new({
51
+ use_iam_profile: true,
52
+ region: options[:region],
53
+ })
54
+ end
55
+ end
56
+
57
+ options[:statistic].each do |statistic|
58
+ options[:buckets].each do |bucket|
59
+ metric_base_options = base_metrics
60
+ metric_base_options['Statistics'] = statistic
61
+ metric_base_options['Dimensions'] = [
62
+ { 'Name' => 'BucketName', 'Value' => bucket },
63
+ { 'Name' => 'StorageType', 'Value' => 'AllStorageTypes' },
64
+ ]
65
+
66
+ result = connection.get_metric_statistics(metric_base_options)
67
+ next if result.body['GetMetricStatisticsResult']['Datapoints'].empty?
68
+
69
+ result.body['GetMetricStatisticsResult']['Datapoints'][0].keys.sort.each do |stat_type|
70
+ next if stat_type == 'Unit'
71
+ next if stat_type == 'Timestamp'
72
+
73
+ unit = result.body['GetMetricStatisticsResult']['Datapoints'][0]['Unit']
74
+ metric = result.body['GetMetricStatisticsResult']['Datapoints'][0][stat_type]
75
+ event = event(bucket, result.body['GetMetricStatisticsResult']['Label'], stat_type, unit, metric)
76
+ report(event)
77
+ end
78
+ end
79
+ end
80
+ end
81
+
82
+ private
83
+
84
+ def event(bucket, label, metric_type, stat_type, metric, unit = nil)
85
+ {
86
+ host: "bucket_#{bucket}",
87
+ service: "s3.#{label}.#{metric_type}.#{stat_type}",
88
+ ttl: 300,
89
+ description: "#{bucket} #{metric_type} #{stat_type} (#{unit})",
90
+ tags: ['s3_metrics'],
91
+ metric: metric,
92
+ }
93
+ end
94
+ end
95
+ end
96
+ end
97
+ end
@@ -0,0 +1,45 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'riemann/tools'
4
+
5
+ module Riemann
6
+ module Tools
7
+ module Aws
8
+ class SqsStatus
9
+ include Riemann::Tools
10
+ require 'fog/aws'
11
+
12
+ opt :access_key, 'AWS access key', type: String
13
+ opt :secret_key, 'Secret access key', type: String
14
+ opt :region, 'AWS region', type: String, default: 'us-east-1'
15
+ opt :queue, 'SQS Queue name', type: String
16
+ def initialize
17
+ creds = if opts.key?('access_key') && opts.key?('secret_key')
18
+ {
19
+ aws_access_key_id: opts[:access_key],
20
+ aws_secret_access_key: opts[:secret_key],
21
+ }
22
+ else
23
+ { use_iam_profile: true }
24
+ end
25
+ creds['region'] = opts[:region]
26
+ @sqs = Fog::AWS::SQS.new(creds)
27
+ response = @sqs.list_queues({ 'QueueNamePrefix' => opts[:queue] })
28
+ @queue_url = response[:body]['QueueUrls'].first
29
+ end
30
+
31
+ def tick
32
+ response = @sqs.get_queue_attributes(@queue_url, 'All')
33
+ %w[ApproximateNumberOfMessages ApproximateNumberOfMessagesNotVisible].each do |attr|
34
+ msg = {
35
+ metric: response[:body]['Attributes'][attr],
36
+ service: "#{opts[:queue]} #{attr}",
37
+ state: 'ok',
38
+ }
39
+ report msg
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end
45
+ end
@@ -0,0 +1,74 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'riemann/tools'
4
+
5
+ module Riemann
6
+ module Tools
7
+ module Aws
8
+ class Status
9
+ include Riemann::Tools
10
+ require 'fog/aws'
11
+ require 'date'
12
+
13
+ opt :fog_credentials_file, 'Fog credentials file', type: String
14
+ opt :fog_credential, 'Fog credentials to use', type: String
15
+ opt :access_key, 'AWS access key', type: String
16
+ opt :secret_key, 'Secret access key', type: String
17
+ opt :region, 'AWS region', type: String, default: 'eu-west-1'
18
+
19
+ opt :retirement_critical, 'Number of days before retirement. Defaults to 2', default: 2
20
+ opt :event_warning, 'Number of days before event. Defaults to nil (i.e. when the event appears)', default: nil
21
+
22
+ def initialize
23
+ if options[:fog_credentials_file]
24
+ Fog.credentials_path = options[:fog_credentials_file]
25
+ Fog.credential = options[:fog_credential].to_sym
26
+ @compute = Fog::AWS::Compute.new
27
+ else
28
+ @compute = if options[:access_key] && options[:secret_key]
29
+ Fog::AWS::Compute.new({
30
+ access_key_key_id: options[:access_key],
31
+ secret_key_access_key: options[:secret_key],
32
+ region: options[:region],
33
+ })
34
+ else
35
+ Fog::AWS::Compute.new({
36
+ use_iam_profile: true,
37
+ region: options[:region],
38
+ })
39
+ end
40
+ end
41
+ end
42
+
43
+ def tick
44
+ hosts = @compute.servers.select { |s| s.state == 'running' }
45
+
46
+ hosts.each do |host, host_status|
47
+ host_status['eventsSet'].each do |event|
48
+ before, _after = %w[notBefore notAfter].map { |k| Date.parse event[k].to_s if event[k] }
49
+
50
+ ev = {
51
+ host: host,
52
+ service: 'aws_instance_status',
53
+ description: "#{event['code']}\n\nstart #{event['notBefore']}\nend #{event['notAfter']}\n\n#{event['description']}",
54
+ state: 'ok',
55
+ ttl: 300,
56
+ }
57
+
58
+ ev2 = if (event['code'] == 'instance-retirement') &&
59
+ (Date.today >= before - opts[:retirement_critical])
60
+ { state: 'critical' }
61
+ elsif opts[:event_warning] && (Date.today >= before - opts[:event_warning])
62
+ { state: 'warning' }
63
+ else
64
+ { state: 'warning' }
65
+ end
66
+
67
+ report ev.merge(ev2)
68
+ end
69
+ end
70
+ end
71
+ end
72
+ end
73
+ end
74
+ end
@@ -11,10 +11,8 @@ ENV['COPYFILE_DISABLE'] = 'true'
11
11
 
12
12
  # Gemspec
13
13
  gemspec = Gem::Specification.new do |s|
14
- s.rubyforge_project = 'riemann-chronos'
15
-
16
14
  s.name = 'riemann-chronos'
17
- s.version = '0.1.1'
15
+ s.version = '0.1.3'
18
16
  s.author = 'Peter Ericson'
19
17
  s.email = 'peter.ericson@cba.com.au'
20
18
  s.homepage = 'https://github.com/riemann/riemann-tools'
@@ -22,15 +20,14 @@ gemspec = Gem::Specification.new do |s|
22
20
  s.summary = 'Submits Chronos stats to riemann.'
23
21
  s.license = 'MIT'
24
22
 
25
- s.add_dependency 'riemann-tools', '>= 0.2.13'
26
- s.add_dependency 'faraday', '>= 0.8.5'
27
- s.add_dependency 'json'
23
+ s.add_runtime_dependency 'riemann-tools', '~> 1.0', '>= 1.1.1'
24
+ s.add_runtime_dependency 'faraday', '~> 2.3', '>= 2.3.0'
25
+ s.add_runtime_dependency 'json', '~> 2.6', '>=2.6.2'
28
26
 
29
- s.files = FileList['bin/*', 'LICENSE', 'README.md'].to_a
27
+ s.files = FileList['bin/*', 'lib/**/*.rb', 'LICENSE', 'README.md'].to_a
30
28
  s.executables |= Dir.entries('bin/')
31
- s.has_rdoc = false
32
29
 
33
- s.required_ruby_version = '>= 1.8.7'
30
+ s.required_ruby_version = Gem::Requirement.new('>= 2.6.0')
34
31
  end
35
32
 
36
33
  Gem::PackageTask.new gemspec do |p|