fog-google 1.3.3 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. checksums.yaml +4 -4
  2. data/.gitignore +1 -0
  3. data/.rubocop.yml +1 -5
  4. data/.travis.yml +3 -0
  5. data/CHANGELOG.md +79 -0
  6. data/CONTRIBUTING.md +9 -10
  7. data/README.md +5 -5
  8. data/ci/README.md +5 -10
  9. data/ci/credentials.yml.template +28 -0
  10. data/ci/pipeline.yml +72 -11
  11. data/ci/tasks/run-int.sh +2 -1
  12. data/ci/tasks/run-int.yml +1 -0
  13. data/examples/create_instance.rb +4 -4
  14. data/examples/get_list_images.rb +1 -1
  15. data/examples/load-balance.rb +1 -1
  16. data/examples/metadata.rb +1 -1
  17. data/examples/network.rb +1 -1
  18. data/examples/storage_json.rb +1 -1
  19. data/fog-google.gemspec +3 -1
  20. data/lib/fog/compute/google.rb +2 -0
  21. data/lib/fog/compute/google/mock.rb +0 -19
  22. data/lib/fog/compute/google/models/disk.rb +12 -5
  23. data/lib/fog/compute/google/models/instance_group.rb +4 -0
  24. data/lib/fog/compute/google/models/network.rb +8 -2
  25. data/lib/fog/compute/google/models/server.rb +28 -4
  26. data/lib/fog/compute/google/models/servers.rb +1 -0
  27. data/lib/fog/compute/google/models/subnetworks.rb +1 -1
  28. data/lib/fog/compute/google/models/target_pool.rb +12 -1
  29. data/lib/fog/compute/google/requests/insert_disk.rb +12 -5
  30. data/lib/fog/compute/google/requests/insert_server.rb +6 -1
  31. data/lib/fog/compute/google/requests/insert_url_map.rb +12 -1
  32. data/lib/fog/compute/google/requests/set_server_metadata.rb +2 -0
  33. data/lib/fog/google/shared.rb +1 -2
  34. data/lib/fog/google/version.rb +1 -1
  35. data/lib/fog/storage/google_json/models/file.rb +31 -1
  36. data/lib/fog/storage/google_json/requests/put_object.rb +22 -12
  37. data/lib/fog/storage/google_xml/models/directory.rb +2 -3
  38. data/lib/fog/storage/google_xml/models/file.rb +2 -13
  39. data/lib/fog/storage/google_xml/requests/put_bucket.rb +1 -1
  40. data/lib/fog/storage/google_xml/requests/put_object.rb +1 -1
  41. data/lib/fog/storage/google_xml/requests/put_object_acl.rb +11 -2
  42. data/lib/fog/storage/google_xml/utils.rb +11 -0
  43. data/tasks/test.rake +63 -1
  44. data/test/integration/compute/addresses/addresses_shared.rb +1 -1
  45. data/test/integration/compute/test_compute_addresses_collection.rb +4 -3
  46. data/test/integration/compute/test_compute_networks_collection.rb +9 -6
  47. data/test/integration/compute/test_servers.rb +9 -0
  48. data/test/integration/compute/test_target_pools.rb +22 -0
  49. data/test/integration/factories/collection_factory.rb +1 -1
  50. data/test/integration/monitoring/test_timeseries.rb +78 -28
  51. data/test/integration/storage/test_files.rb +1 -1
  52. data/test/integration/storage/test_objects.rb +6 -0
  53. data/test/integration/test_authentication.rb +0 -18
  54. data/test/unit/compute/test_common_collections.rb +31 -0
  55. data/test/unit/compute/test_common_models.rb +36 -0
  56. metadata +39 -6
  57. data/ci/credentials.yml.tpl +0 -13
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Fog
2
4
  module Storage
3
5
  class GoogleJSON
@@ -46,18 +48,7 @@ module Fog
46
48
  kms_key_name: nil,
47
49
  predefined_acl: nil,
48
50
  **options)
49
- if data.is_a?(String)
50
- data = StringIO.new(data)
51
- options[:content_type] ||= "text/plain"
52
- elsif data.is_a?(::File)
53
- options[:content_type] ||= Fog::Storage.parse_data(data)[:headers]["Content-Type"]
54
- end
55
-
56
- # Paperclip::AbstractAdapter
57
- if data.respond_to?(:content_type) && data.respond_to?(:path)
58
- options[:content_type] ||= data.content_type
59
- data = data.path
60
- end
51
+ data, options = normalize_data(data, options)
61
52
 
62
53
  object_config = ::Google::Apis::StorageV1::Object.new(
63
54
  options.merge(:name => object_name)
@@ -78,6 +69,25 @@ module Fog
78
69
  :upload_source => data
79
70
  )
80
71
  end
72
+
73
+ protected
74
+
75
+ def normalize_data(data, options)
76
+ raise ArgumentError.new("data is required") unless data
77
+ if data.is_a?(String)
78
+ data = StringIO.new(data)
79
+ options[:content_type] ||= "text/plain"
80
+ elsif data.is_a?(::File)
81
+ options[:content_type] ||= Fog::Storage.parse_data(data)[:headers]["Content-Type"]
82
+ end
83
+
84
+ # Paperclip::AbstractAdapter
85
+ if data.respond_to?(:content_type) && data.respond_to?(:path)
86
+ options[:content_type] ||= data.content_type
87
+ data = data.path
88
+ end
89
+ [data, options]
90
+ end
81
91
  end
82
92
 
83
93
  class Mock
@@ -7,9 +7,8 @@ module Fog
7
7
  attribute :creation_date, :aliases => "CreationDate"
8
8
 
9
9
  def acl=(new_acl)
10
- valid_acls = ["private", "public-read", "public-read-write", "authenticated-read"]
11
- unless valid_acls.include?(new_acl)
12
- raise ArgumentError.new("acl must be one of [#{valid_acls.join(', ')}]")
10
+ unless Utils::VALID_ACLS.include?(new_acl)
11
+ raise ArgumentError.new("acl must be one of [#{Utils::VALID_ACLS.join(', ')}]")
13
12
  end
14
13
  @acl = new_acl
15
14
  end
@@ -17,20 +17,9 @@ module Fog
17
17
  attribute :owner, :aliases => "Owner"
18
18
  attribute :storage_class, :aliases => ["x-goog-storage-class", "StorageClass"]
19
19
 
20
- # https://cloud.google.com/storage/docs/access-control#predefined-acl
21
- VALID_ACLS = [
22
- "authenticated-read",
23
- "bucket-owner-full-control",
24
- "bucket-owner-read",
25
- "private",
26
- "project-private",
27
- "public-read",
28
- "public-read-write"
29
- ].freeze
30
-
31
20
  def acl=(new_acl)
32
- unless VALID_ACLS.include?(new_acl)
33
- raise ArgumentError.new("acl must be one of [#{VALID_ACLS.join(', ')}]")
21
+ unless Utils::VALID_ACLS.include?(new_acl)
22
+ raise ArgumentError.new("acl must be one of [#{Utils::VALID_ACLS.join(', ')}]")
34
23
  end
35
24
  @acl = new_acl
36
25
  end
@@ -38,7 +38,7 @@ module Fog
38
38
  class Mock
39
39
  def put_bucket(bucket_name, options = {})
40
40
  acl = options["x-goog-acl"] || "private"
41
- if !["private", "public-read", "public-read-write", "authenticated-read"].include?(acl)
41
+ if !Utils::VALID_ACLS.include?(acl)
42
42
  raise Excon::Errors::BadRequest.new("invalid x-goog-acl")
43
43
  else
44
44
  data[:acls][:bucket][bucket_name] = self.class.acls(options[acl])
@@ -38,7 +38,7 @@ module Fog
38
38
  class Mock
39
39
  def put_object(bucket_name, object_name, data, options = {})
40
40
  acl = options["x-goog-acl"] || "private"
41
- if !["private", "public-read", "public-read-write", "authenticated-read"].include?(acl)
41
+ if !Utils::VALID_ACLS.include?(acl)
42
42
  raise Excon::Errors::BadRequest.new("invalid x-goog-acl")
43
43
  else
44
44
  self.data[:acls][:object][bucket_name] ||= {}
@@ -24,7 +24,11 @@ module Fog
24
24
  end
25
25
 
26
26
  def put_object_acl(bucket_name, object_name, acl)
27
- data = <<-DATA
27
+ headers = {}
28
+ data = ""
29
+
30
+ if acl.is_a?(Hash)
31
+ data = <<-DATA
28
32
  <AccessControlList>
29
33
  <Owner>
30
34
  #{tag('ID', acl['Owner']['ID'])}
@@ -34,10 +38,15 @@ module Fog
34
38
  </Entries>
35
39
  </AccessControlList>
36
40
  DATA
41
+ elsif acl.is_a?(String) && Utils::VALID_ACLS.include?(acl)
42
+ headers["x-goog-acl"] = acl
43
+ else
44
+ raise Excon::Errors::BadRequest.new("invalid x-goog-acl")
45
+ end
37
46
 
38
47
  request(:body => data,
39
48
  :expects => 200,
40
- :headers => {},
49
+ :headers => headers,
41
50
  :host => "#{bucket_name}.#{@host}",
42
51
  :method => "PUT",
43
52
  :query => { "acl" => nil },
@@ -2,6 +2,17 @@ module Fog
2
2
  module Storage
3
3
  class GoogleXML
4
4
  module Utils
5
+ # https://cloud.google.com/storage/docs/access-control#predefined-acl
6
+ VALID_ACLS = [
7
+ "authenticated-read",
8
+ "bucket-owner-full-control",
9
+ "bucket-owner-read",
10
+ "private",
11
+ "project-private",
12
+ "public-read",
13
+ "public-read-write"
14
+ ].freeze
15
+
5
16
  def http_url(params, expires)
6
17
  "http://" << host_path_query(params, expires)
7
18
  end
data/tasks/test.rake CHANGED
@@ -1,6 +1,7 @@
1
1
  require "rake/testtask"
2
2
 
3
3
  Rake::TestTask.new do |t|
4
+ t.description = "Run integration and unit tests"
4
5
  t.libs << "test"
5
6
  t.pattern = File.join("test", "**", "test_*.rb")
6
7
  t.warning = false
@@ -9,6 +10,67 @@ end
9
10
  namespace :test do
10
11
  mock = ENV["FOG_MOCK"] || "true"
11
12
  task :travis do
12
- sh("export FOG_MOCK=#{mock} && bundle exec shindont")
13
+ sh("bundle exec rake test:unit")
14
+ end
15
+
16
+ desc "Run all integration tests in parallel"
17
+ multitask :parallel => ["test:compute",
18
+ "test:monitoring",
19
+ "test:pubsub",
20
+ "test:sql",
21
+ "test:storage"]
22
+
23
+ Rake::TestTask.new do |t|
24
+ t.name = "unit"
25
+ t.description = "Run Unit tests"
26
+ t.libs << "test"
27
+ t.pattern = FileList['test/unit/**/test_*.rb']
28
+ t.warning = false
29
+ t.verbose = true
30
+ end
31
+
32
+ Rake::TestTask.new do |t|
33
+ t.name = "compute"
34
+ t.description = "Run Compute API tests"
35
+ t.libs << "test"
36
+ t.pattern = FileList['test/integration/compute/test_*.rb']
37
+ t.warning = false
38
+ t.verbose = true
39
+ end
40
+
41
+ Rake::TestTask.new do |t|
42
+ t.name = "monitoring"
43
+ t.description = "Run Monitoring API tests"
44
+ t.libs << "test"
45
+ t.pattern = FileList['test/integration/monitoring/test_*.rb']
46
+ t.warning = false
47
+ t.verbose = true
48
+ end
49
+
50
+ Rake::TestTask.new do |t|
51
+ t.name = "pubsub"
52
+ t.description = "Run PubSub API tests"
53
+ t.libs << "test"
54
+ t.pattern = FileList['test/integration/pubsub/test_*.rb']
55
+ t.warning = false
56
+ t.verbose = true
57
+ end
58
+
59
+ Rake::TestTask.new do |t|
60
+ t.name = "sql"
61
+ t.description = "Run SQL API tests"
62
+ t.libs << "test"
63
+ t.pattern = FileList['test/integration/sql/test_*.rb']
64
+ t.warning = false
65
+ t.verbose = true
66
+ end
67
+
68
+ Rake::TestTask.new do |t|
69
+ t.name = "storage"
70
+ t.description = "Run Storage API tests"
71
+ t.libs << "test"
72
+ t.pattern = FileList['test/integration/storage/test_*.rb']
73
+ t.warning = false
74
+ t.verbose = true
13
75
  end
14
76
  end
@@ -4,7 +4,7 @@ require "securerandom"
4
4
 
5
5
  class TestComputeAddressShared < FogIntegrationTest
6
6
  DEFAULT_REGION = "us-central1".freeze
7
- ADDRESS_RESOURCE_PREFIX = "fog-test-address".freeze
7
+ ADDRESS_RESOURCE_PREFIX = "fog-int-test-address".freeze
8
8
 
9
9
  include ClientHelper
10
10
 
@@ -5,12 +5,13 @@ class TestComputeAddressesCollection < FogIntegrationTest
5
5
  DEFAULT_REGION = "us-central1".freeze
6
6
  DEFAULT_ZONE = "us-central1-b".freeze
7
7
  RESOURCE_PREFIX = "fog-test-addresscol".freeze
8
+ TEST_ASYNC = false
8
9
 
9
10
  # Ensure we clean up any created resources
10
11
  Minitest.after_run do
11
12
  client = Fog::Compute::Google.new
12
- client.addresses.each { |a| a.destroy if a.name.start_with?(RESOURCE_PREFIX) }
13
- client.servers.each { |s| s.destroy if s.name.start_with?(RESOURCE_PREFIX) }
13
+ client.addresses.each { |a| a.destroy(TEST_ASYNC) if a.name.start_with?(RESOURCE_PREFIX) }
14
+ client.servers.each { |s| s.destroy(TEST_ASYNC) if s.name.start_with?(RESOURCE_PREFIX) }
14
15
  end
15
16
 
16
17
  def test_address_workflow
@@ -50,7 +51,7 @@ class TestComputeAddressesCollection < FogIntegrationTest
50
51
  ],
51
52
  :external_ip => my_address.address
52
53
  )
53
- my_server.wait_for { provisioning? }
54
+ my_server.wait_for { staging? }
54
55
 
55
56
  # And verify that it's correctly assigned
56
57
  assert_equal(
@@ -5,12 +5,13 @@ class TestComputeNetworksCollection < FogIntegrationTest
5
5
  DEFAULT_REGION = "us-central1".freeze
6
6
  DEFAULT_ZONE = "us-central1-b".freeze
7
7
  RESOURCE_PREFIX = "fog-test-networkscol".freeze
8
+ TEST_ASYNC = false
8
9
 
9
10
  # Ensure we clean up any created resources
10
11
  Minitest.after_run do
11
12
  client = Fog::Compute::Google.new
12
- client.networks.each { |a| a.destroy if a.name.start_with?(RESOURCE_PREFIX) }
13
- client.servers.each { |s| s.destroy if s.name.start_with?(RESOURCE_PREFIX) }
13
+ client.networks.each { |a| a.destroy(TEST_ASYNC) if a.name.start_with?(RESOURCE_PREFIX) }
14
+ client.servers.each { |s| s.destroy(TEST_ASYNC) if s.name.start_with?(RESOURCE_PREFIX) }
14
15
  end
15
16
 
16
17
  def test_network_workflow
@@ -30,14 +31,15 @@ class TestComputeNetworksCollection < FogIntegrationTest
30
31
 
31
32
  # Be aware that although the address resource is created, it might not yet
32
33
  # have an ip address. You can poll until the address has been assigned.
33
- my_network.wait_for { !my_network.ipv4_range.nil? }
34
+ my_network.wait_for(60) { !my_network.ipv4_range.nil? }
34
35
  assert_match(/\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\/\d{1,2}/,
35
36
  my_network.ipv4_range,
36
37
  "My address's address should have a valid ipv4 address")
37
38
 
38
39
  # Now that we have an address, we can create a server using the static ip
39
40
  server_name = new_resource_name
40
- client.servers.create(
41
+
42
+ my_server = client.servers.create(
41
43
  :name => server_name,
42
44
  :machine_type => "f1-micro",
43
45
  :zone => client.zones.get(DEFAULT_ZONE).self_link,
@@ -49,9 +51,10 @@ class TestComputeNetworksCollection < FogIntegrationTest
49
51
  }
50
52
  ],
51
53
  :network_interfaces => [my_network.get_as_interface_config]
52
- ).wait_for { ready? }
54
+ )
55
+
56
+ my_server.wait_for { ready? }
53
57
 
54
- my_server = client.servers.get(server_name, DEFAULT_ZONE)
55
58
  # We need to verify that the network has been correctly assigned
56
59
  assert_equal(
57
60
  my_network.self_link,
@@ -4,8 +4,17 @@ require "integration/factories/servers_factory"
4
4
  class TestServers < FogIntegrationTest
5
5
  include TestCollection
6
6
 
7
+ # Cleanup is handled by TestCollection
7
8
  def setup
8
9
  @subject = Fog::Compute[:google].servers
9
10
  @factory = ServersFactory.new(namespaced_name)
10
11
  end
12
+
13
+ def test_set_metadata
14
+ server = @factory.create
15
+ server.wait_for { ready? }
16
+ server.set_metadata({ "foo" => "bar", "baz" => "foo" }, false)
17
+ assert_equal [{ :key => "foo", :value => "bar" },
18
+ { :key=>"baz", :value=>"foo" }], server.metadata[:items]
19
+ end
11
20
  end
@@ -7,10 +7,32 @@ class TestTargetPools < FogIntegrationTest
7
7
  def setup
8
8
  @subject = Fog::Compute[:google].target_pools
9
9
  @factory = TargetPoolsFactory.new(namespaced_name)
10
+ @servers = Fog::Compute[:google].servers
10
11
  end
11
12
 
12
13
  # Override to include zone in get request
13
14
  def get_resource(identity)
14
15
  @subject.get(identity, TEST_ZONE)
15
16
  end
17
+
18
+ def test_get_health
19
+ target_pool = @factory.create
20
+ server = @servers.get(target_pool.instances[0].split("/").last)
21
+ server.wait_for { ready? }
22
+
23
+ # There's no way to track the readiness of the instance resource in a target pool,
24
+ # so wrapping in a soft retry:
25
+ begin
26
+ retries ||= 0
27
+ target_pool.get_health
28
+ rescue ::Google::Apis::ClientError
29
+ sleep 25
30
+ retry if (retries += 1) < 3
31
+ end
32
+
33
+ assert_equal(target_pool.get_health[server.self_link][0][:instance], server.self_link,
34
+ "target_pool should return instance health details")
35
+ assert_equal(target_pool.get_health(server.name)[server.self_link][0][:instance], server.self_link,
36
+ "target_pool should return instance health details when an instance is specified")
37
+ end
16
38
  end
@@ -7,7 +7,7 @@ class CollectionFactory
7
7
  @resource_counter = 0
8
8
  end
9
9
 
10
- def cleanup(async = true)
10
+ def cleanup(async = false)
11
11
  resources = @subject.all.select { |resource| resource.name.start_with? PREFIX }
12
12
  resources.each { |r| r.destroy(async) }
13
13
  resources.each { |r| Fog.wait_for { !@subject.all.map(&:identity).include? r.identity } }
@@ -1,6 +1,13 @@
1
1
  require "helpers/integration_test_helper"
2
+ require "retriable"
2
3
 
3
4
  class TestMetricDescriptors < FogIntegrationTest
5
+ # Retriable is used to wrap each request in this test due to Stackdriver API being slow with
6
+ # metric propagation (sometimes 80+ seconds) and client returning
7
+ # Google::Apis::ClientError: badRequest if the metric hasn't yet been created instead of a 404.
8
+ NOT_READY_REGEX = /The provided filter doesn't refer to any known metric./
9
+ RETRIABLE_TRIES = 3
10
+ RETRIABLE_BASE_INTERVAL = 30
4
11
  TEST_METRIC_PREFIX = "custom.googleapis.com/fog-google-test/timeseries".freeze
5
12
  LABEL_DESCRIPTORS = [
6
13
  {
@@ -44,13 +51,37 @@ class TestMetricDescriptors < FogIntegrationTest
44
51
  resp = @client.create_timeseries(:timeseries => [expected])
45
52
  assert_empty(resp.to_h)
46
53
 
47
- series = @client.timeseries_collection.all(
48
- :filter => "metric.type = \"#{metric_type}\"",
49
- :interval => {
50
- :start_time => start_time.to_datetime.rfc3339,
51
- :end_time => Time.now.to_datetime.rfc3339
52
- }
53
- )
54
+ # Wait for metric to be created
55
+ Retriable.retriable(on: {Google::Apis::ClientError => NOT_READY_REGEX},
56
+ tries: RETRIABLE_TRIES,
57
+ base_interval: RETRIABLE_BASE_INTERVAL) do
58
+ @client.list_timeseries(
59
+ :filter => "metric.type = \"#{metric_type}\"",
60
+ :interval => {
61
+ # Subtracting one second because timeSeries.list API
62
+ # doesn't return points that are exactly the same time
63
+ # as the interval for some reason.
64
+ :start_time => (start_time - 1).to_datetime.rfc3339,
65
+ :end_time => Time.now.to_datetime.rfc3339
66
+ }
67
+ ).time_series
68
+ end
69
+
70
+ series = Retriable.retriable(on: { Google::Apis::ClientError => NOT_READY_REGEX },
71
+ tries: RETRIABLE_TRIES,
72
+ base_interval: RETRIABLE_BASE_INTERVAL) do
73
+ @client.timeseries_collection.all(
74
+ :filter => "metric.type = \"#{metric_type}\"",
75
+ :interval => {
76
+ # Subtracting one second because timeSeries.list API
77
+ # doesn't return points that are exactly the same time
78
+ # as the interval for some reason.
79
+ :start_time => (start_time - 1).to_datetime.rfc3339,
80
+ :end_time => Time.now.to_datetime.rfc3339
81
+ }
82
+ )
83
+ end
84
+
54
85
  assert_equal(1, series.size)
55
86
  actual = series.first
56
87
  assert_equal(expected[:metric], actual.metric)
@@ -84,28 +115,42 @@ class TestMetricDescriptors < FogIntegrationTest
84
115
  _some_timeseries(start_time, metric_type, labels)
85
116
  end
86
117
 
87
- @client.create_timeseries(:timeseries => timeseries)
118
+ Retriable.retriable(on: Google::Apis::ServerError,
119
+ tries: RETRIABLE_TRIES,
120
+ base_interval: RETRIABLE_BASE_INTERVAL) do
121
+ @client.create_timeseries(:timeseries => timeseries)
122
+ end
88
123
  interval = {
89
- :start_time => start_time.to_datetime.rfc3339,
124
+ # Subtracting one second because timeSeries.list API
125
+ # doesn't return points that are exactly the same time
126
+ # as the interval for some reason.
127
+ :start_time => (start_time - 1).to_datetime.rfc3339,
90
128
  :end_time => Time.now.to_datetime.rfc3339
91
129
  }
92
130
 
93
- # Wait for creation
94
- Fog.wait_for(30) do
95
- # Test all created timeseries are returned.
96
- list_result = @client.list_timeseries(
131
+
132
+ # Wait for metric to be created
133
+ # Retriable is used instead of wait_for due to API client returning Google::Apis::ClientError: badRequest if the
134
+ # metric hasn't yet been created
135
+ Retriable.retriable(on: { Google::Apis::ClientError => NOT_READY_REGEX },
136
+ tries: RETRIABLE_TRIES,
137
+ base_interval: RETRIABLE_BASE_INTERVAL) do
138
+ @client.list_timeseries(
97
139
  :filter => "metric.type = \"#{metric_type}\"",
98
140
  :interval => interval
99
141
  ).time_series
100
- !list_result.nil? && list_result.size == timeseries.size
101
142
  end
102
143
 
103
144
  # Test page size
104
- resp = @client.list_timeseries(
105
- :filter => "metric.type = \"#{metric_type}\"",
106
- :interval => interval,
107
- :page_size => 1
108
- )
145
+ resp = Retriable.retriable(on: { Google::Apis::ClientError => NOT_READY_REGEX },
146
+ tries: RETRIABLE_TRIES,
147
+ base_interval: RETRIABLE_BASE_INTERVAL) do
148
+ @client.list_timeseries(
149
+ :filter => "metric.type = \"#{metric_type}\"",
150
+ :interval => interval,
151
+ :page_size => 1
152
+ )
153
+ end
109
154
  assert_equal(resp.time_series.size, 1,
110
155
  "expected timeseries count to be equal to page size 1")
111
156
 
@@ -123,13 +168,17 @@ class TestMetricDescriptors < FogIntegrationTest
123
168
  "expected different timeseries when using page_token")
124
169
 
125
170
  # Test filter
126
- series = @client.timeseries_collection.all(
127
- :filter => %[
128
- metric.type = "#{metric_type}" AND
129
- metric.label.test_string_label = "first"
130
- ],
131
- :interval => interval
132
- )
171
+ series = Retriable.retriable(on: { Google::Apis::ClientError => NOT_READY_REGEX },
172
+ tries: RETRIABLE_TRIES,
173
+ base_interval: RETRIABLE_BASE_INTERVAL) do
174
+ @client.timeseries_collection.all(
175
+ :filter => %[
176
+ metric.type = "#{metric_type}" AND
177
+ metric.label.test_string_label = "first"
178
+ ],
179
+ :interval => interval
180
+ )
181
+ end
133
182
  assert_equal(series.size, 1,
134
183
  "expected returned timeseries to be filtered to 1 value")
135
184
  assert_equal("true", series.first.metric[:labels][:test_bool_label])
@@ -138,7 +187,8 @@ class TestMetricDescriptors < FogIntegrationTest
138
187
 
139
188
  def _delete_test_resources
140
189
  list_resp = @client.monitoring.list_project_metric_descriptors(
141
- :filter => "metric.type = starts_with(\"#{TEST_METRIC_PREFIX}\")"
190
+ "projects/#{@client.project}",
191
+ filter: "metric.type = starts_with(\"#{TEST_METRIC_PREFIX}\")"
142
192
  )
143
193
  unless list_resp.metric_descriptors.nil?
144
194
  puts "Found #{list_resp.metric_descriptors.size} test metric descriptors."
@@ -164,7 +214,7 @@ class TestMetricDescriptors < FogIntegrationTest
164
214
  )
165
215
 
166
216
  # Wait for metric descriptor to be created
167
- Fog.wait_for(30, 2) do
217
+ Fog.wait_for(180, 2) do
168
218
  begin
169
219
  @client.get_metric_descriptor(metric_type)
170
220
  true