gcloud 0.10.0 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +8 -8
- data/CHANGELOG.md +36 -0
- data/lib/gcloud/backoff.rb +5 -5
- data/lib/gcloud/bigquery.rb +24 -0
- data/lib/gcloud/bigquery/connection.rb +32 -25
- data/lib/gcloud/bigquery/data.rb +99 -1
- data/lib/gcloud/bigquery/dataset.rb +5 -13
- data/lib/gcloud/bigquery/dataset/list.rb +124 -2
- data/lib/gcloud/bigquery/job/list.rb +125 -2
- data/lib/gcloud/bigquery/project.rb +30 -27
- data/lib/gcloud/bigquery/query_data.rb +102 -1
- data/lib/gcloud/bigquery/table.rb +17 -2
- data/lib/gcloud/bigquery/table/list.rb +132 -3
- data/lib/gcloud/datastore.rb +30 -19
- data/lib/gcloud/datastore/dataset.rb +2 -22
- data/lib/gcloud/datastore/dataset/lookup_results.rb +160 -4
- data/lib/gcloud/datastore/dataset/query_results.rb +229 -23
- data/lib/gcloud/datastore/transaction.rb +2 -5
- data/lib/gcloud/dns.rb +20 -0
- data/lib/gcloud/dns/change/list.rb +109 -6
- data/lib/gcloud/dns/connection.rb +18 -9
- data/lib/gcloud/dns/project.rb +4 -8
- data/lib/gcloud/dns/record/list.rb +96 -13
- data/lib/gcloud/dns/zone.rb +9 -24
- data/lib/gcloud/dns/zone/list.rb +102 -5
- data/lib/gcloud/dns/zone/transaction.rb +1 -1
- data/lib/gcloud/logging.rb +19 -0
- data/lib/gcloud/logging/entry/list.rb +83 -14
- data/lib/gcloud/logging/metric/list.rb +89 -12
- data/lib/gcloud/logging/project.rb +18 -30
- data/lib/gcloud/logging/resource_descriptor/list.rb +105 -6
- data/lib/gcloud/logging/sink/list.rb +89 -12
- data/lib/gcloud/pubsub.rb +23 -0
- data/lib/gcloud/pubsub/project.rb +21 -29
- data/lib/gcloud/pubsub/service.rb +1 -3
- data/lib/gcloud/pubsub/subscription/list.rb +167 -13
- data/lib/gcloud/pubsub/topic.rb +15 -13
- data/lib/gcloud/pubsub/topic/batch.rb +10 -4
- data/lib/gcloud/pubsub/topic/list.rb +134 -8
- data/lib/gcloud/resource_manager.rb +24 -0
- data/lib/gcloud/resource_manager/connection.rb +18 -9
- data/lib/gcloud/resource_manager/manager.rb +7 -4
- data/lib/gcloud/resource_manager/project/list.rb +93 -14
- data/lib/gcloud/storage.rb +63 -0
- data/lib/gcloud/storage/bucket.rb +100 -61
- data/lib/gcloud/storage/bucket/list.rb +132 -8
- data/lib/gcloud/storage/connection.rb +68 -44
- data/lib/gcloud/storage/errors.rb +9 -3
- data/lib/gcloud/storage/file.rb +48 -4
- data/lib/gcloud/storage/file/list.rb +151 -15
- data/lib/gcloud/storage/file/verifier.rb +3 -3
- data/lib/gcloud/storage/project.rb +15 -30
- data/lib/gcloud/translate.rb +20 -0
- data/lib/gcloud/translate/connection.rb +12 -3
- data/lib/gcloud/version.rb +1 -1
- data/lib/gcloud/vision.rb +20 -0
- data/lib/gcloud/vision/connection.rb +10 -1
- data/lib/gcloud/vision/image.rb +15 -18
- metadata +16 -2
data/lib/gcloud/storage.rb
CHANGED
@@ -240,6 +240,45 @@ module Gcloud
|
|
240
240
|
# "avatars/heidi/400x400.png"
|
241
241
|
# ```
|
242
242
|
#
|
243
|
+
# ### Customer-supplied encryption keys
|
244
|
+
#
|
245
|
+
# By default, Google Cloud Storage manages server-side encryption keys on
|
246
|
+
# your behalf. However, a [customer-supplied encryption
|
247
|
+
# key](https://cloud.google.com/storage/docs/encryption#customer-supplied)
|
248
|
+
# can be provided with the `encryption_key` and `encryption_key_sha256`
|
249
|
+
# options. If given, the same key and SHA256 hash also must be provided to
|
250
|
+
# subsequently download or copy the file. If you use customer-supplied
|
251
|
+
# encryption keys, you must securely manage your keys and ensure that they are
|
252
|
+
# not lost. Also, please note that file metadata is not encrypted, with the
|
253
|
+
# exception of the CRC32C checksum and MD5 hash. The names of files and
|
254
|
+
# buckets are also not encrypted, and you can read or update the metadata of
|
255
|
+
# an encrypted file without providing the encryption key.
|
256
|
+
#
|
257
|
+
# ```ruby
|
258
|
+
# require "gcloud"
|
259
|
+
# require "digest/sha2"
|
260
|
+
#
|
261
|
+
# gcloud = Gcloud.new
|
262
|
+
# storage = gcloud.storage
|
263
|
+
# bucket = storage.bucket "my-todo-app"
|
264
|
+
#
|
265
|
+
# # Key generation shown for example purposes only. Write your own.
|
266
|
+
# cipher = OpenSSL::Cipher.new "aes-256-cfb"
|
267
|
+
# cipher.encrypt
|
268
|
+
# key = cipher.random_key
|
269
|
+
# key_hash = Digest::SHA256.digest key
|
270
|
+
#
|
271
|
+
# bucket.create_file "/var/todo-app/avatars/heidi/400x400.png",
|
272
|
+
# "avatars/heidi/400x400.png",
|
273
|
+
# encryption_key: key,
|
274
|
+
# encryption_key_sha256: key_hash
|
275
|
+
#
|
276
|
+
# # Store your key and hash securely for later use.
|
277
|
+
# file = bucket.file "avatars/heidi/400x400.png",
|
278
|
+
# encryption_key: key,
|
279
|
+
# encryption_key_sha256: key_hash
|
280
|
+
# ```
|
281
|
+
#
|
243
282
|
# ### A note about large uploads
|
244
283
|
#
|
245
284
|
# You may encounter a Broken pipe (Errno::EPIPE) error when attempting to
|
@@ -405,6 +444,30 @@ module Gcloud
|
|
405
444
|
# file.acl.public!
|
406
445
|
# ```
|
407
446
|
#
|
447
|
+
# ## Configuring Backoff
|
448
|
+
#
|
449
|
+
# The {Gcloud::Backoff} class allows users to globally configure how Cloud API
|
450
|
+
# requests are automatically retried in the case of some errors, such as a
|
451
|
+
# `500` or `503` status code, or a specific internal error code such as
|
452
|
+
# `rateLimitExceeded`.
|
453
|
+
#
|
454
|
+
# If an API call fails, the response will be inspected to see if the call
|
455
|
+
# should be retried. If the response matches the criteria, then the request
|
456
|
+
# will be retried after a delay. If another error occurs, the delay will be
|
457
|
+
# increased incrementally before a subsequent attempt. The first retry will be
|
458
|
+
# delayed one second, the second retry two seconds, and so on.
|
459
|
+
#
|
460
|
+
# ```ruby
|
461
|
+
# require "gcloud"
|
462
|
+
# require "gcloud/backoff"
|
463
|
+
#
|
464
|
+
# Gcloud::Backoff.retries = 5 # Raise the maximum number of retries from 3
|
465
|
+
# ```
|
466
|
+
#
|
467
|
+
# See the [Storage status and error
|
468
|
+
# codes](https://cloud.google.com/storage/docs/json_api/v1/status-codes)
|
469
|
+
# for a list of error conditions.
|
470
|
+
#
|
408
471
|
module Storage
|
409
472
|
end
|
410
473
|
end
|
@@ -326,11 +326,7 @@ module Gcloud
|
|
326
326
|
# The bucket must be empty before it can be deleted.
|
327
327
|
#
|
328
328
|
# The API call to delete the bucket may be retried under certain
|
329
|
-
# conditions. See {Gcloud::Backoff} to control this behavior
|
330
|
-
# specify the wanted behavior using the `retries` option.
|
331
|
-
#
|
332
|
-
# @param [Integer] retries The number of times the API call should be
|
333
|
-
# retried. Default is Gcloud::Backoff.retries.
|
329
|
+
# conditions. See {Gcloud::Backoff} to control this behavior.
|
334
330
|
#
|
335
331
|
# @return [Boolean] Returns `true` if the bucket was deleted.
|
336
332
|
#
|
@@ -343,19 +339,9 @@ module Gcloud
|
|
343
339
|
# bucket = storage.bucket "my-bucket"
|
344
340
|
# bucket.delete
|
345
341
|
#
|
346
|
-
|
347
|
-
# require "gcloud"
|
348
|
-
#
|
349
|
-
# gcloud = Gcloud.new
|
350
|
-
# storage = gcloud.storage
|
351
|
-
#
|
352
|
-
# bucket = storage.bucket "my-bucket"
|
353
|
-
# bucket.delete retries: 5
|
354
|
-
#
|
355
|
-
def delete retries: nil
|
342
|
+
def delete
|
356
343
|
ensure_connection!
|
357
|
-
|
358
|
-
resp = connection.delete_bucket name, options
|
344
|
+
resp = connection.delete_bucket name
|
359
345
|
if resp.success?
|
360
346
|
true
|
361
347
|
else
|
@@ -399,24 +385,16 @@ module Gcloud
|
|
399
385
|
# puts file.name
|
400
386
|
# end
|
401
387
|
#
|
402
|
-
# @example
|
388
|
+
# @example Retrieve all files: (See {File::List#all})
|
403
389
|
# require "gcloud"
|
404
390
|
#
|
405
391
|
# gcloud = Gcloud.new
|
406
392
|
# storage = gcloud.storage
|
407
393
|
#
|
408
394
|
# bucket = storage.bucket "my-bucket"
|
409
|
-
#
|
410
|
-
#
|
411
|
-
#
|
412
|
-
# while tmp_files.any? do
|
413
|
-
# tmp_files.each do |file|
|
414
|
-
# all_files << file
|
415
|
-
# end
|
416
|
-
# # break loop if no more buckets available
|
417
|
-
# break if tmp_files.token.nil?
|
418
|
-
# # get the next group of files
|
419
|
-
# tmp_files = bucket.files token: tmp_files.token
|
395
|
+
# files = bucket.files
|
396
|
+
# files.all do |file|
|
397
|
+
# puts file.name
|
420
398
|
# end
|
421
399
|
#
|
422
400
|
def files prefix: nil, delimiter: nil, token: nil, max: nil, versions: nil
|
@@ -429,20 +407,32 @@ module Gcloud
|
|
429
407
|
versions: versions
|
430
408
|
}
|
431
409
|
resp = connection.list_files name, options
|
432
|
-
|
433
|
-
|
434
|
-
|
435
|
-
fail ApiError.from_response(resp)
|
436
|
-
end
|
410
|
+
fail ApiError.from_response(resp) unless resp.success?
|
411
|
+
File::List.from_response resp, connection, name, prefix, delimiter, max,
|
412
|
+
versions
|
437
413
|
end
|
438
414
|
alias_method :find_files, :files
|
439
415
|
|
440
416
|
##
|
441
417
|
# Retrieves a file matching the path.
|
442
418
|
#
|
419
|
+
# If a [customer-supplied encryption
|
420
|
+
# key](https://cloud.google.com/storage/docs/encryption#customer-supplied)
|
421
|
+
# was used with {#create_file}, the `encryption_key` and
|
422
|
+
# `encryption_key_sha256` options must be provided or else the file's
|
423
|
+
# CRC32C checksum and MD5 hash will not be returned.
|
424
|
+
#
|
443
425
|
# @param [String] path Name (path) of the file.
|
444
426
|
# @param [Integer] generation When present, selects a specific revision of
|
445
427
|
# this object. Default is the latest version.
|
428
|
+
# @param [String] encryption_key Optional. The customer-supplied, AES-256
|
429
|
+
# encryption key used to encrypt the file, if one was provided to
|
430
|
+
# {#create_file}. Must be provided if `encryption_key_sha256` is
|
431
|
+
# provided.
|
432
|
+
# @param [String] encryption_key_sha256 Optional. The SHA256 hash of the
|
433
|
+
# customer-supplied, AES-256 encryption key used to encrypt the file, if
|
434
|
+
# one was provided to {#create_file}. Must be provided if
|
435
|
+
# `encryption_key` is provided.
|
446
436
|
#
|
447
437
|
# @return [Gcloud::Storage::File, nil] Returns nil if file does not exist
|
448
438
|
#
|
@@ -457,9 +447,11 @@ module Gcloud
|
|
457
447
|
# file = bucket.file "path/to/my-file.ext"
|
458
448
|
# puts file.name
|
459
449
|
#
|
460
|
-
def file path, generation: nil
|
450
|
+
def file path, generation: nil, encryption_key: nil,
|
451
|
+
encryption_key_sha256: nil
|
461
452
|
ensure_connection!
|
462
|
-
options = { generation: generation
|
453
|
+
options = { generation: generation, encryption_key: encryption_key,
|
454
|
+
encryption_key_sha256: encryption_key_sha256 }
|
463
455
|
resp = connection.get_file name, path, options
|
464
456
|
if resp.success?
|
465
457
|
File.from_gapi resp.data, connection
|
@@ -470,8 +462,8 @@ module Gcloud
|
|
470
462
|
alias_method :find_file, :file
|
471
463
|
|
472
464
|
##
|
473
|
-
#
|
474
|
-
# and the path to store it with in the bucket.
|
465
|
+
# Creates a new {File} object by providing a path to a local file to
|
466
|
+
# upload and the path to store it with in the bucket.
|
475
467
|
#
|
476
468
|
# A `chunk_size` value can be provided in the options to be used
|
477
469
|
# in resumable uploads. This value is the number of bytes per
|
@@ -479,6 +471,43 @@ module Gcloud
|
|
479
471
|
# by 256KB then it will be lowered to the nearest acceptable
|
480
472
|
# value.
|
481
473
|
#
|
474
|
+
# #### Customer-supplied encryption keys
|
475
|
+
#
|
476
|
+
# By default, Google Cloud Storage manages server-side encryption keys on
|
477
|
+
# your behalf. However, a [customer-supplied encryption
|
478
|
+
# key](https://cloud.google.com/storage/docs/encryption#customer-supplied)
|
479
|
+
# can be provided with the `encryption_key` and `encryption_key_sha256`
|
480
|
+
# options. If given, the same key and SHA256 hash also must be provided to
|
481
|
+
# subsequently download or copy the file. If you use customer-supplied
|
482
|
+
# encryption keys, you must securely manage your keys and ensure that they
|
483
|
+
# are not lost. Also, please note that file metadata is not encrypted,
|
484
|
+
# with the exception of the CRC32C checksum and MD5 hash. The names of
|
485
|
+
# files and buckets are also not encrypted, and you can read or update the
|
486
|
+
# metadata of an encrypted file without providing the encryption key.
|
487
|
+
#
|
488
|
+
# #### Troubleshooting large uploads
|
489
|
+
#
|
490
|
+
# You may encounter errors while attempting to upload large files. Below
|
491
|
+
# are a couple of common cases and their solutions.
|
492
|
+
#
|
493
|
+
# ##### Handling memory errors
|
494
|
+
#
|
495
|
+
# If you encounter a memory error such as `NoMemoryError`, try performing
|
496
|
+
# a resumable upload and setting the `chunk_size` option to a value that
|
497
|
+
# works for your environment, as explained in the final example above.
|
498
|
+
#
|
499
|
+
# ##### Handling broken pipe errors
|
500
|
+
#
|
501
|
+
# To avoid broken pipe (`Errno::EPIPE`) errors when uploading, add the
|
502
|
+
# [httpclient](https://rubygems.org/gems/httpclient) gem to your project,
|
503
|
+
# and the configuration shown below. These lines must execute after you
|
504
|
+
# require gcloud but before you make your first gcloud connection. The
|
505
|
+
# first statement configures [Faraday](https://rubygems.org/gems/faraday)
|
506
|
+
# to use httpclient. The second statement, which should only be added if
|
507
|
+
# you are using a version of Faraday at or above 0.9.2, is a workaround
|
508
|
+
# for [this gzip
|
509
|
+
# issue](https://github.com/GoogleCloudPlatform/gcloud-ruby/issues/367).
|
510
|
+
#
|
482
511
|
# @param [String] file Path of the file on the filesystem to upload.
|
483
512
|
# @param [String] path Path to store the file in Google Cloud Storage.
|
484
513
|
# @param [String] acl A predefined set of access controls to apply to this
|
@@ -532,6 +561,12 @@ module Gcloud
|
|
532
561
|
# @param [Hash] metadata A hash of custom, user-provided web-safe keys and
|
533
562
|
# arbitrary string values that will returned with requests for the file
|
534
563
|
# as "x-goog-meta-" response headers.
|
564
|
+
# @param [String] encryption_key Optional. A customer-supplied, AES-256
|
565
|
+
# encryption key that will be used to encrypt the file. Must be provided
|
566
|
+
# if `encryption_key_sha256` is provided.
|
567
|
+
# @param [String] encryption_key_sha256 Optional. The SHA256 hash of the
|
568
|
+
# customer-supplied, AES-256 encryption key that will be used to encrypt
|
569
|
+
# the file. Must be provided if `encryption_key` is provided.
|
535
570
|
#
|
536
571
|
# @return [Gcloud::Storage::File]
|
537
572
|
#
|
@@ -545,7 +580,7 @@ module Gcloud
|
|
545
580
|
#
|
546
581
|
# bucket.create_file "path/to/local.file.ext"
|
547
582
|
#
|
548
|
-
# @example
|
583
|
+
# @example Specifying a destination path:
|
549
584
|
# require "gcloud"
|
550
585
|
#
|
551
586
|
# gcloud = Gcloud.new
|
@@ -556,7 +591,7 @@ module Gcloud
|
|
556
591
|
# bucket.create_file "path/to/local.file.ext",
|
557
592
|
# "destination/path/file.ext"
|
558
593
|
#
|
559
|
-
# @example
|
594
|
+
# @example Specifying the chunk size as a number of bytes:
|
560
595
|
# require "gcloud"
|
561
596
|
#
|
562
597
|
# gcloud = Gcloud.new
|
@@ -568,30 +603,31 @@ module Gcloud
|
|
568
603
|
# "destination/path/file.ext",
|
569
604
|
# chunk_size: 1024*1024 # 1 MB chunk
|
570
605
|
#
|
571
|
-
#
|
606
|
+
# @example Providing a customer-supplied encryption key:
|
607
|
+
# require "gcloud"
|
608
|
+
# require "digest/sha2"
|
572
609
|
#
|
573
|
-
#
|
574
|
-
#
|
610
|
+
# gcloud = Gcloud.new
|
611
|
+
# storage = gcloud.storage
|
612
|
+
# bucket = storage.bucket "my-bucket"
|
575
613
|
#
|
576
|
-
#
|
614
|
+
# # Key generation shown for example purposes only. Write your own.
|
615
|
+
# cipher = OpenSSL::Cipher.new "aes-256-cfb"
|
616
|
+
# cipher.encrypt
|
617
|
+
# key = cipher.random_key
|
618
|
+
# key_hash = Digest::SHA256.digest key
|
577
619
|
#
|
578
|
-
#
|
579
|
-
#
|
580
|
-
#
|
581
|
-
#
|
582
|
-
# ##### Handling broken pipe errors
|
620
|
+
# bucket.create_file "path/to/local.file.ext",
|
621
|
+
# "destination/path/file.ext",
|
622
|
+
# encryption_key: key,
|
623
|
+
# encryption_key_sha256: key_hash
|
583
624
|
#
|
584
|
-
#
|
585
|
-
#
|
586
|
-
#
|
587
|
-
#
|
588
|
-
# first statement configures [Faraday](https://rubygems.org/gems/faraday)
|
589
|
-
# to use httpclient. The second statement, which should only be added if
|
590
|
-
# you are using a version of Faraday at or above 0.9.2, is a workaround
|
591
|
-
# for [this gzip
|
592
|
-
# issue](https://github.com/GoogleCloudPlatform/gcloud-ruby/issues/367).
|
625
|
+
# # Store your key and hash securely for later use.
|
626
|
+
# file = bucket.file "destination/path/file.ext",
|
627
|
+
# encryption_key: key,
|
628
|
+
# encryption_key_sha256: key_hash
|
593
629
|
#
|
594
|
-
# @example
|
630
|
+
# @example Avoiding broken pipe errors with large uploads:
|
595
631
|
# require "gcloud"
|
596
632
|
#
|
597
633
|
# # Use httpclient to avoid broken pipe errors with large uploads
|
@@ -608,13 +644,16 @@ module Gcloud
|
|
608
644
|
def create_file file, path = nil, acl: nil, cache_control: nil,
|
609
645
|
content_disposition: nil, content_encoding: nil,
|
610
646
|
content_language: nil, content_type: nil, chunk_size: nil,
|
611
|
-
crc32c: nil, md5: nil, metadata: nil
|
647
|
+
crc32c: nil, md5: nil, metadata: nil, encryption_key: nil,
|
648
|
+
encryption_key_sha256: nil
|
612
649
|
ensure_connection!
|
613
650
|
options = { acl: File::Acl.predefined_rule_for(acl), md5: md5,
|
614
651
|
cache_control: cache_control, content_type: content_type,
|
615
652
|
content_disposition: content_disposition, crc32c: crc32c,
|
616
653
|
content_encoding: content_encoding, chunk_size: chunk_size,
|
617
|
-
content_language: content_language, metadata: metadata
|
654
|
+
content_language: content_language, metadata: metadata,
|
655
|
+
encryption_key: encryption_key,
|
656
|
+
encryption_key_sha256: encryption_key_sha256 }
|
618
657
|
ensure_file_exists! file
|
619
658
|
resumable = resumable_upload?(file)
|
620
659
|
resp = @connection.upload_file resumable, name, file, path, options
|
@@ -24,23 +24,147 @@ module Gcloud
|
|
24
24
|
##
|
25
25
|
# If not empty, indicates that there are more buckets
|
26
26
|
# that match the request and this value should be passed to
|
27
|
-
# the next Gcloud::Storage::Project#buckets to continue.
|
27
|
+
# the next {Gcloud::Storage::Project#buckets} to continue.
|
28
28
|
attr_accessor :token
|
29
29
|
|
30
30
|
##
|
31
|
-
# Create a new Bucket::List with an array of values.
|
32
|
-
def initialize arr = []
|
31
|
+
# @private Create a new Bucket::List with an array of values.
|
32
|
+
def initialize arr = []
|
33
33
|
super arr
|
34
|
-
|
34
|
+
end
|
35
|
+
|
36
|
+
##
|
37
|
+
# Whether there is a next page of buckets.
|
38
|
+
#
|
39
|
+
# @return [Boolean]
|
40
|
+
#
|
41
|
+
# @example
|
42
|
+
# require "gcloud"
|
43
|
+
#
|
44
|
+
# gcloud = Gcloud.new
|
45
|
+
# storage = gcloud.storage
|
46
|
+
#
|
47
|
+
# buckets = storage.buckets
|
48
|
+
# if buckets.next?
|
49
|
+
# next_buckets = buckets.next
|
50
|
+
# end
|
51
|
+
#
|
52
|
+
def next?
|
53
|
+
!token.nil?
|
54
|
+
end
|
55
|
+
|
56
|
+
##
|
57
|
+
# Retrieve the next page of buckets.
|
58
|
+
#
|
59
|
+
# @return [Bucket::List]
|
60
|
+
#
|
61
|
+
# @example
|
62
|
+
# require "gcloud"
|
63
|
+
#
|
64
|
+
# gcloud = Gcloud.new
|
65
|
+
# storage = gcloud.storage
|
66
|
+
#
|
67
|
+
# buckets = storage.buckets
|
68
|
+
# if buckets.next?
|
69
|
+
# next_buckets = buckets.next
|
70
|
+
# end
|
71
|
+
#
|
72
|
+
def next
|
73
|
+
return nil unless next?
|
74
|
+
ensure_connection!
|
75
|
+
options = { prefix: @prefix, token: @token, max: @max }
|
76
|
+
resp = @connection.list_buckets options
|
77
|
+
fail ApiError.from_response(resp) unless resp.success?
|
78
|
+
Bucket::List.from_response resp, @connection, @prefix, @max
|
79
|
+
end
|
80
|
+
|
81
|
+
##
|
82
|
+
# Retrieves all buckets by repeatedly loading {#next} until {#next?}
|
83
|
+
# returns `false`. Calls the given block once for each bucket, which is
|
84
|
+
# passed as the parameter.
|
85
|
+
#
|
86
|
+
# An Enumerator is returned if no block is given.
|
87
|
+
#
|
88
|
+
# This method may make several API calls until all buckets are
|
89
|
+
# retrieved. Be sure to use as narrow a search criteria as possible.
|
90
|
+
# Please use with caution.
|
91
|
+
#
|
92
|
+
# @param [Integer] request_limit The upper limit of API requests to make
|
93
|
+
# to load all buckets. Default is no limit.
|
94
|
+
# @yield [bucket] The block for accessing each bucket.
|
95
|
+
# @yieldparam [Bucket] bucket The bucket object.
|
96
|
+
#
|
97
|
+
# @return [Enumerator]
|
98
|
+
#
|
99
|
+
# @example Iterating each bucket by passing a block:
|
100
|
+
# require "gcloud"
|
101
|
+
#
|
102
|
+
# gcloud = Gcloud.new
|
103
|
+
# storage = gcloud.storage
|
104
|
+
#
|
105
|
+
# buckets = storage.buckets
|
106
|
+
# buckets.all do |bucket|
|
107
|
+
# puts bucket.name
|
108
|
+
# end
|
109
|
+
#
|
110
|
+
# @example Using the enumerator by not passing a block:
|
111
|
+
# require "gcloud"
|
112
|
+
#
|
113
|
+
# gcloud = Gcloud.new
|
114
|
+
# storage = gcloud.storage
|
115
|
+
#
|
116
|
+
# buckets = storage.buckets
|
117
|
+
# all_names = buckets.all.map do |bucket|
|
118
|
+
# bucket.name
|
119
|
+
# end
|
120
|
+
#
|
121
|
+
# @example Limit the number of API calls made:
|
122
|
+
# require "gcloud"
|
123
|
+
#
|
124
|
+
# gcloud = Gcloud.new
|
125
|
+
# storage = gcloud.storage
|
126
|
+
#
|
127
|
+
# buckets = storage.buckets
|
128
|
+
# buckets.all(request_limit: 10) do |bucket|
|
129
|
+
# puts bucket.name
|
130
|
+
# end
|
131
|
+
#
|
132
|
+
def all request_limit: nil
|
133
|
+
request_limit = request_limit.to_i if request_limit
|
134
|
+
unless block_given?
|
135
|
+
return enum_for(:all, request_limit: request_limit)
|
136
|
+
end
|
137
|
+
results = self
|
138
|
+
loop do
|
139
|
+
results.each { |r| yield r }
|
140
|
+
if request_limit
|
141
|
+
request_limit -= 1
|
142
|
+
break if request_limit < 0
|
143
|
+
end
|
144
|
+
break unless results.next?
|
145
|
+
results = results.next
|
146
|
+
end
|
35
147
|
end
|
36
148
|
|
37
149
|
##
|
38
150
|
# @private New Bucket::List from a response object.
|
39
|
-
def self.from_response resp, conn
|
40
|
-
buckets = Array(resp.data["items"]).map do |gapi_object|
|
151
|
+
def self.from_response resp, conn, prefix = nil, max = nil
|
152
|
+
buckets = new(Array(resp.data["items"]).map do |gapi_object|
|
41
153
|
Bucket.from_gapi gapi_object, conn
|
42
|
-
end
|
43
|
-
|
154
|
+
end)
|
155
|
+
buckets.instance_variable_set "@token", resp.data["nextPageToken"]
|
156
|
+
buckets.instance_variable_set "@connection", conn
|
157
|
+
buckets.instance_variable_set "@prefix", prefix
|
158
|
+
buckets.instance_variable_set "@max", max
|
159
|
+
buckets
|
160
|
+
end
|
161
|
+
|
162
|
+
protected
|
163
|
+
|
164
|
+
##
|
165
|
+
# Raise an error unless an active connection is available.
|
166
|
+
def ensure_connection!
|
167
|
+
fail "Must have active connection" unless @connection
|
44
168
|
end
|
45
169
|
end
|
46
170
|
end
|