pghero 2.5.0 → 2.5.1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of pghero might be problematic. Click here for more details.

checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 252b8c1bb7580d67b149ef865f97b99a89fbd9aecb640a92f5ebf8a532d7c10b
4
- data.tar.gz: 87a9df921265875867cb9d9b0dcd92fa6d6855075818129a8b5bf47337277354
3
+ metadata.gz: 8e147c844a9c109baa794e9a3950e747539e2b15f19b781e9822edaed5f216ff
4
+ data.tar.gz: e18d81b6876148607514621d96b7b84f9436081f7eb4660008cbf3148578bbe5
5
5
  SHA512:
6
- metadata.gz: 9b1f75f5ef19ee10da7ee815aca9ee7933fa89ae78046e495417f7c863f383172fba28bfb00605ac8810a2bb5a9895ae478a3f182ba36204080151f84ad9e2a6
7
- data.tar.gz: 6fc630bd5f46fbecb5b6194d58726397c8f28a812e7ceabe12784ed6892bd7f7609646cba22e57c34ab817be40b37e251384f5ba23a32e82199db7620f4dc009
6
+ metadata.gz: 1eea90a86822be5ee51a1b0b34e5e86297909a6bb41d4cf6589162285b8d2414b63c119cfba7d4df5b6f78d0b755ff517b47420c1ddd27a0cadcc51f0305f465
7
+ data.tar.gz: afc7bb09d979dada5636a8056ae1d692b88d8fc0cfda0af8d81f02a4a413499027953c4506c7a627da27c5c9b9f87aca1c030d76a0caa758773c6483c4ac171a
@@ -1,3 +1,9 @@
1
+ ## 2.5.1 (2020-06-23)
2
+
3
+ - Added support for `google-cloud-monitoring` >= 1
4
+ - Added support for `google-cloud-monitoring-v3`
5
+ - Fixed system stats not showing up in Rails 6 with environment variables
6
+
1
7
  ## 2.5.0 (2020-05-24)
2
8
 
3
9
  - Added system stats for Google Cloud SQL and Azure Database
@@ -32,14 +32,14 @@ databases:
32
32
 
33
33
  # Basic authentication
34
34
  # username: admin
35
- # password: secret
35
+ # password: <%%= ENV["PGHERO_PASSWORD"] %>
36
36
 
37
37
  # Stats database URL (defaults to app database)
38
38
  # stats_database_url: <%%= ENV["PGHERO_STATS_DATABASE_URL"] %>
39
39
 
40
40
  # AWS configuration (defaults to app AWS config)
41
- # aws_access_key_id: ...
42
- # aws_secret_access_key: ...
41
+ # aws_access_key_id: <%%= ENV["AWS_ACCESS_KEY_ID"] %>
42
+ # aws_secret_access_key: <%%= ENV["AWS_SECRET_ACCESS_KEY"] %>
43
43
  # aws_region: us-east-1
44
44
 
45
45
  # Filter data from queries (experimental)
@@ -119,11 +119,16 @@ module PgHero
119
119
 
120
120
  if databases.empty?
121
121
  databases["primary"] = {
122
- "url" => ENV["PGHERO_DATABASE_URL"] || ActiveRecord::Base.connection_config,
122
+ "url" => ENV["PGHERO_DATABASE_URL"] || ActiveRecord::Base.connection_config
123
+ }
124
+ end
125
+
126
+ if databases.size == 1
127
+ databases.values.first.merge!(
123
128
  "db_instance_identifier" => ENV["PGHERO_DB_INSTANCE_IDENTIFIER"],
124
129
  "gcp_database_id" => ENV["PGHERO_GCP_DATABASE_ID"],
125
130
  "azure_resource_id" => ENV["PGHERO_AZURE_RESOURCE_ID"]
126
- }
131
+ )
127
132
  end
128
133
 
129
134
  {
@@ -5,7 +5,7 @@ module PgHero
5
5
  !system_stats_provider.nil?
6
6
  end
7
7
 
8
- # TODO require AWS 2+ automatically
8
+ # TODO remove defined checks in 3.0
9
9
  def system_stats_provider
10
10
  if aws_db_instance_identifier && (defined?(Aws) || defined?(AWS))
11
11
  :aws
@@ -133,7 +133,7 @@ module PgHero
133
133
  private
134
134
 
135
135
  def gcp_stats(metric_name, duration: nil, period: nil, offset: nil, series: false)
136
- require "google/cloud/monitoring"
136
+ require "google/cloud/monitoring/v3"
137
137
 
138
138
  # TODO DRY with RDS stats
139
139
  duration = (duration || 1.hour).to_i
@@ -142,30 +142,63 @@ module PgHero
142
142
  end_time = Time.at(((Time.now - offset).to_f / period).ceil * period)
143
143
  start_time = end_time - duration
144
144
 
145
- client = Google::Cloud::Monitoring::Metric.new
146
-
147
- interval = Google::Monitoring::V3::TimeInterval.new
148
- interval.end_time = Google::Protobuf::Timestamp.new(seconds: end_time.to_i)
149
- # subtract period to make sure we get first data point
150
- interval.start_time = Google::Protobuf::Timestamp.new(seconds: (start_time - period).to_i)
151
-
152
- aggregation = Google::Monitoring::V3::Aggregation.new
153
- # may be better to use ALIGN_NEXT_OLDER for space stats to show most recent data point
154
- # stick with average for now to match AWS
155
- aggregation.per_series_aligner = Google::Monitoring::V3::Aggregation::Aligner::ALIGN_MEAN
156
- aggregation.alignment_period = period
157
-
158
145
  # validate input since we need to interpolate below
159
146
  raise Error, "Invalid metric name" unless metric_name =~ /\A[a-z\/_]+\z/i
160
147
  raise Error, "Invalid database id" unless gcp_database_id =~ /\A[a-z\-:]+\z/i
161
148
 
162
- results = client.list_time_series(
163
- "projects/#{gcp_database_id.split(":").first}",
164
- "metric.type = \"cloudsql.googleapis.com/database/#{metric_name}\" AND resource.label.database_id = \"#{gcp_database_id}\"",
165
- interval,
166
- Google::Monitoring::V3::ListTimeSeriesRequest::TimeSeriesView::FULL,
167
- aggregation: aggregation
168
- )
149
+ # we handle three situations:
150
+ # 1. google-cloud-monitoring-v3
151
+ # 2. google-cloud-monitoring >= 1
152
+ # 3. google-cloud-monitoring < 1
153
+
154
+ # for situations 1 and 2
155
+ # Google::Cloud::Monitoring.metric_service is documented
156
+ # but doesn't work for situation 1
157
+ if defined?(Google::Cloud::Monitoring::V3::MetricService::Client)
158
+ client = Google::Cloud::Monitoring::V3::MetricService::Client.new
159
+
160
+ interval = Google::Cloud::Monitoring::V3::TimeInterval.new
161
+ interval.end_time = Google::Protobuf::Timestamp.new(seconds: end_time.to_i)
162
+ # subtract period to make sure we get first data point
163
+ interval.start_time = Google::Protobuf::Timestamp.new(seconds: (start_time - period).to_i)
164
+
165
+ aggregation = Google::Cloud::Monitoring::V3::Aggregation.new
166
+ # may be better to use ALIGN_NEXT_OLDER for space stats to show most recent data point
167
+ # stick with average for now to match AWS
168
+ aggregation.per_series_aligner = Google::Cloud::Monitoring::V3::Aggregation::Aligner::ALIGN_MEAN
169
+ aggregation.alignment_period = period
170
+
171
+ results = client.list_time_series({
172
+ name: "projects/#{gcp_database_id.split(":").first}",
173
+ filter: "metric.type = \"cloudsql.googleapis.com/database/#{metric_name}\" AND resource.label.database_id = \"#{gcp_database_id}\"",
174
+ interval: interval,
175
+ view: Google::Cloud::Monitoring::V3::ListTimeSeriesRequest::TimeSeriesView::FULL,
176
+ aggregation: aggregation
177
+ })
178
+ else
179
+ require "google/cloud/monitoring"
180
+
181
+ client = Google::Cloud::Monitoring::Metric.new
182
+
183
+ interval = Google::Monitoring::V3::TimeInterval.new
184
+ interval.end_time = Google::Protobuf::Timestamp.new(seconds: end_time.to_i)
185
+ # subtract period to make sure we get first data point
186
+ interval.start_time = Google::Protobuf::Timestamp.new(seconds: (start_time - period).to_i)
187
+
188
+ aggregation = Google::Monitoring::V3::Aggregation.new
189
+ # may be better to use ALIGN_NEXT_OLDER for space stats to show most recent data point
190
+ # stick with average for now to match AWS
191
+ aggregation.per_series_aligner = Google::Monitoring::V3::Aggregation::Aligner::ALIGN_MEAN
192
+ aggregation.alignment_period = period
193
+
194
+ results = client.list_time_series(
195
+ "projects/#{gcp_database_id.split(":").first}",
196
+ "metric.type = \"cloudsql.googleapis.com/database/#{metric_name}\" AND resource.label.database_id = \"#{gcp_database_id}\"",
197
+ interval,
198
+ Google::Monitoring::V3::ListTimeSeriesRequest::TimeSeriesView::FULL,
199
+ aggregation: aggregation
200
+ )
201
+ end
169
202
 
170
203
  data = {}
171
204
  result = results.first
@@ -1,3 +1,3 @@
1
1
  module PgHero
2
- VERSION = "2.5.0"
2
+ VERSION = "2.5.1"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: pghero
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.5.0
4
+ version: 2.5.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Andrew Kane
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-05-24 00:00:00.000000000 Z
11
+ date: 2020-06-23 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: activerecord