gcloud 0.10.0 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +8 -8
- data/CHANGELOG.md +36 -0
- data/lib/gcloud/backoff.rb +5 -5
- data/lib/gcloud/bigquery.rb +24 -0
- data/lib/gcloud/bigquery/connection.rb +32 -25
- data/lib/gcloud/bigquery/data.rb +99 -1
- data/lib/gcloud/bigquery/dataset.rb +5 -13
- data/lib/gcloud/bigquery/dataset/list.rb +124 -2
- data/lib/gcloud/bigquery/job/list.rb +125 -2
- data/lib/gcloud/bigquery/project.rb +30 -27
- data/lib/gcloud/bigquery/query_data.rb +102 -1
- data/lib/gcloud/bigquery/table.rb +17 -2
- data/lib/gcloud/bigquery/table/list.rb +132 -3
- data/lib/gcloud/datastore.rb +30 -19
- data/lib/gcloud/datastore/dataset.rb +2 -22
- data/lib/gcloud/datastore/dataset/lookup_results.rb +160 -4
- data/lib/gcloud/datastore/dataset/query_results.rb +229 -23
- data/lib/gcloud/datastore/transaction.rb +2 -5
- data/lib/gcloud/dns.rb +20 -0
- data/lib/gcloud/dns/change/list.rb +109 -6
- data/lib/gcloud/dns/connection.rb +18 -9
- data/lib/gcloud/dns/project.rb +4 -8
- data/lib/gcloud/dns/record/list.rb +96 -13
- data/lib/gcloud/dns/zone.rb +9 -24
- data/lib/gcloud/dns/zone/list.rb +102 -5
- data/lib/gcloud/dns/zone/transaction.rb +1 -1
- data/lib/gcloud/logging.rb +19 -0
- data/lib/gcloud/logging/entry/list.rb +83 -14
- data/lib/gcloud/logging/metric/list.rb +89 -12
- data/lib/gcloud/logging/project.rb +18 -30
- data/lib/gcloud/logging/resource_descriptor/list.rb +105 -6
- data/lib/gcloud/logging/sink/list.rb +89 -12
- data/lib/gcloud/pubsub.rb +23 -0
- data/lib/gcloud/pubsub/project.rb +21 -29
- data/lib/gcloud/pubsub/service.rb +1 -3
- data/lib/gcloud/pubsub/subscription/list.rb +167 -13
- data/lib/gcloud/pubsub/topic.rb +15 -13
- data/lib/gcloud/pubsub/topic/batch.rb +10 -4
- data/lib/gcloud/pubsub/topic/list.rb +134 -8
- data/lib/gcloud/resource_manager.rb +24 -0
- data/lib/gcloud/resource_manager/connection.rb +18 -9
- data/lib/gcloud/resource_manager/manager.rb +7 -4
- data/lib/gcloud/resource_manager/project/list.rb +93 -14
- data/lib/gcloud/storage.rb +63 -0
- data/lib/gcloud/storage/bucket.rb +100 -61
- data/lib/gcloud/storage/bucket/list.rb +132 -8
- data/lib/gcloud/storage/connection.rb +68 -44
- data/lib/gcloud/storage/errors.rb +9 -3
- data/lib/gcloud/storage/file.rb +48 -4
- data/lib/gcloud/storage/file/list.rb +151 -15
- data/lib/gcloud/storage/file/verifier.rb +3 -3
- data/lib/gcloud/storage/project.rb +15 -30
- data/lib/gcloud/translate.rb +20 -0
- data/lib/gcloud/translate/connection.rb +12 -3
- data/lib/gcloud/version.rb +1 -1
- data/lib/gcloud/vision.rb +20 -0
- data/lib/gcloud/vision/connection.rb +10 -1
- data/lib/gcloud/vision/image.rb +15 -18
- metadata +16 -2
checksums.yaml
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
---
|
2
2
|
!binary "U0hBMQ==":
|
3
3
|
metadata.gz: !binary |-
|
4
|
-
|
4
|
+
MTIzYjc5OWY0M2E1MGJkMTVlNWI5NmM4YTdkZDM3ZWRmZTgyZDRiZQ==
|
5
5
|
data.tar.gz: !binary |-
|
6
|
-
|
6
|
+
Nzg1ZTExY2RhMDRlMDUzMzkyY2FkYTI4ODUyMDc3NTEyMjIzM2QyMw==
|
7
7
|
SHA512:
|
8
8
|
metadata.gz: !binary |-
|
9
|
-
|
10
|
-
|
11
|
-
|
9
|
+
YmFjZmQyMjU4NjRhZGM0Zjc4NDYxYmVjYTQyNzhhZGI1ZTAyZjBhYTBjMDZm
|
10
|
+
N2U5OWZkOTYzZDk1NjEyNzk0MjJiMTU1OTVlYjZiN2Q1MjhjMTMwYjg2M2U3
|
11
|
+
OWI2NWYxMjlmZGE2NDM2MDZjOWExYzIyNDhiMjEzOTE4ZWRjMjY=
|
12
12
|
data.tar.gz: !binary |-
|
13
|
-
|
14
|
-
|
15
|
-
|
13
|
+
ZjgzNWI3NjA3ZWFkNmJjZWUxYWM1M2M3MGIxMzdmOTcxMTdlZjIzZjRiOGU3
|
14
|
+
Yzk1MWQ0ZDQzYjg0N2Y1MDI2MzhmM2VlNTA0NWY2NWYyMDAzMjU3YmE5YzM3
|
15
|
+
ZWI3MTkyMDU2M2YyYzMyYWQ0MTg0YWMwYzc4NWNlNTBmMThlNzU=
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,41 @@
|
|
1
1
|
# Release History
|
2
2
|
|
3
|
+
### 0.11.0 / 2016-06-13
|
4
|
+
|
5
|
+
#### Changes
|
6
|
+
|
7
|
+
* Add backoff to all requests (dolzenko)
|
8
|
+
* BigQuery
|
9
|
+
* Add `#all` to `Data`
|
10
|
+
* Add `#all`, `#next` and `#next?` to `Dataset::List`, `Job::List`,
|
11
|
+
`QueryData`, `Table::List` and `Dataset::LookupResults`
|
12
|
+
* `#all` methods now return lazy enumerator with `request_limit`
|
13
|
+
* Datastore
|
14
|
+
* Add `#cursor_for`, `#each_with_cursor` and `#all_with_cursor` to
|
15
|
+
`Dataset::QueryResults`
|
16
|
+
* `#all` and `#all_with_cursor` methods now return lazy enumerator with
|
17
|
+
`request_limit`
|
18
|
+
* DNS
|
19
|
+
* Add `#all` to `Change::List` and `Zone::List`
|
20
|
+
* `#all` methods now return lazy enumerator with `request_limit`
|
21
|
+
* Logging
|
22
|
+
* `#all` methods now return lazy enumerator with `request_limit`
|
23
|
+
* Pub/Sub
|
24
|
+
* Fix bug when publishing multi-byte strings
|
25
|
+
* Add support for IO-ish objects
|
26
|
+
* Add `#all`, `#next` and `#next?` to `Subscription::List` and `Topic::List`
|
27
|
+
* `#all` methods now return lazy enumerator with `request_limit`
|
28
|
+
* Resource Manager
|
29
|
+
* `#all` methods now return lazy enumerator with `request_limit`
|
30
|
+
* Storage
|
31
|
+
* Breaking Change: Remove `retries` option from `Bucket#delete` and
|
32
|
+
`Project#create_bucket` (configure in `Backoff` instead)
|
33
|
+
* Add support for customer-supplied encryption keys
|
34
|
+
* Fix issue verifying large files (Aguasvivas22)
|
35
|
+
* `#all` methods now return lazy enumerator with `request_limit`
|
36
|
+
* Vision
|
37
|
+
* Add support for IO-ish objects
|
38
|
+
|
3
39
|
### 0.10.0 / 2016-05-19
|
4
40
|
|
5
41
|
#### Major Changes
|
data/lib/gcloud/backoff.rb
CHANGED
@@ -33,26 +33,26 @@ module Gcloud
|
|
33
33
|
##
|
34
34
|
# The number of times a retriable API call should be retried.
|
35
35
|
#
|
36
|
-
# The default value is 3
|
36
|
+
# The default value is `3`.
|
37
37
|
attr_accessor :retries
|
38
38
|
|
39
39
|
##
|
40
40
|
# The GRPC Status Codes that should be retried.
|
41
41
|
#
|
42
|
-
# The default values are 14
|
42
|
+
# The default values are `14`.
|
43
43
|
attr_accessor :grpc_codes
|
44
44
|
|
45
45
|
##
|
46
46
|
# The HTTP Status Codes that should be retried.
|
47
47
|
#
|
48
|
-
# The default values are 500 and 503
|
48
|
+
# The default values are `500` and `503`.
|
49
49
|
attr_accessor :http_codes
|
50
50
|
|
51
51
|
##
|
52
52
|
# The Google API error reasons that should be retried.
|
53
53
|
#
|
54
|
-
# The default values are rateLimitExceeded and
|
55
|
-
# userRateLimitExceeded
|
54
|
+
# The default values are `rateLimitExceeded` and
|
55
|
+
# `userRateLimitExceeded`.
|
56
56
|
attr_accessor :reasons
|
57
57
|
|
58
58
|
##
|
data/lib/gcloud/bigquery.rb
CHANGED
@@ -386,6 +386,30 @@ module Gcloud
|
|
386
386
|
# BigQuery](https://cloud.google.com/bigquery/exporting-data-from-bigquery)
|
387
387
|
# for details.
|
388
388
|
#
|
389
|
+
# ## Configuring Backoff
|
390
|
+
#
|
391
|
+
# The {Gcloud::Backoff} class allows users to globally configure how Cloud API
|
392
|
+
# requests are automatically retried in the case of some errors, such as a
|
393
|
+
# `500` or `503` status code, or a specific internal error code such as
|
394
|
+
# `rateLimitExceeded`.
|
395
|
+
#
|
396
|
+
# If an API call fails, the response will be inspected to see if the call
|
397
|
+
# should be retried. If the response matches the criteria, then the request
|
398
|
+
# will be retried after a delay. If another error occurs, the delay will be
|
399
|
+
# increased incrementally before a subsequent attempt. The first retry will be
|
400
|
+
# delayed one second, the second retry two seconds, and so on.
|
401
|
+
#
|
402
|
+
# ```ruby
|
403
|
+
# require "gcloud"
|
404
|
+
# require "gcloud/backoff"
|
405
|
+
#
|
406
|
+
# Gcloud::Backoff.retries = 5 # Raise the maximum number of retries from 3
|
407
|
+
# ```
|
408
|
+
#
|
409
|
+
# See the [BigQuery error
|
410
|
+
# table](https://cloud.google.com/bigquery/troubleshooting-errors#errortable)
|
411
|
+
# for a list of error conditions.
|
412
|
+
#
|
389
413
|
module Bigquery
|
390
414
|
end
|
391
415
|
end
|
@@ -15,6 +15,7 @@
|
|
15
15
|
|
16
16
|
require "pathname"
|
17
17
|
require "gcloud/version"
|
18
|
+
require "gcloud/backoff"
|
18
19
|
require "google/api_client"
|
19
20
|
require "digest/md5"
|
20
21
|
|
@@ -50,7 +51,7 @@ module Gcloud
|
|
50
51
|
maxResults: options.delete(:max)
|
51
52
|
}.delete_if { |_, v| v.nil? }
|
52
53
|
|
53
|
-
|
54
|
+
execute(
|
54
55
|
api_method: @bigquery.datasets.list,
|
55
56
|
parameters: params
|
56
57
|
)
|
@@ -59,7 +60,7 @@ module Gcloud
|
|
59
60
|
##
|
60
61
|
# Returns the dataset specified by datasetID.
|
61
62
|
def get_dataset dataset_id
|
62
|
-
|
63
|
+
execute(
|
63
64
|
api_method: @bigquery.datasets.get,
|
64
65
|
parameters: { projectId: @project, datasetId: dataset_id }
|
65
66
|
)
|
@@ -68,7 +69,7 @@ module Gcloud
|
|
68
69
|
##
|
69
70
|
# Creates a new empty dataset.
|
70
71
|
def insert_dataset dataset_id, options = {}
|
71
|
-
|
72
|
+
execute(
|
72
73
|
api_method: @bigquery.datasets.insert,
|
73
74
|
parameters: { projectId: @project },
|
74
75
|
body_object: insert_dataset_request(dataset_id, options)
|
@@ -81,7 +82,7 @@ module Gcloud
|
|
81
82
|
def patch_dataset dataset_id, options = {}
|
82
83
|
project_id = options[:project_id] || @project
|
83
84
|
|
84
|
-
|
85
|
+
execute(
|
85
86
|
api_method: @bigquery.datasets.patch,
|
86
87
|
parameters: { projectId: project_id, datasetId: dataset_id },
|
87
88
|
body_object: patch_dataset_request(options)
|
@@ -95,7 +96,7 @@ module Gcloud
|
|
95
96
|
# Immediately after deletion, you can create another dataset with
|
96
97
|
# the same name.
|
97
98
|
def delete_dataset dataset_id, force = nil
|
98
|
-
|
99
|
+
execute(
|
99
100
|
api_method: @bigquery.datasets.delete,
|
100
101
|
parameters: { projectId: @project, datasetId: dataset_id,
|
101
102
|
deleteContents: force
|
@@ -113,14 +114,14 @@ module Gcloud
|
|
113
114
|
maxResults: options.delete(:max)
|
114
115
|
}.delete_if { |_, v| v.nil? }
|
115
116
|
|
116
|
-
|
117
|
+
execute(
|
117
118
|
api_method: @bigquery.tables.list,
|
118
119
|
parameters: params
|
119
120
|
)
|
120
121
|
end
|
121
122
|
|
122
123
|
def get_project_table project_id, dataset_id, table_id
|
123
|
-
|
124
|
+
execute(
|
124
125
|
api_method: @bigquery.tables.get,
|
125
126
|
parameters: { projectId: project_id, datasetId: dataset_id,
|
126
127
|
tableId: table_id }
|
@@ -139,7 +140,7 @@ module Gcloud
|
|
139
140
|
##
|
140
141
|
# Creates a new, empty table in the dataset.
|
141
142
|
def insert_table dataset_id, table_id, options = {}
|
142
|
-
|
143
|
+
execute(
|
143
144
|
api_method: @bigquery.tables.insert,
|
144
145
|
parameters: { projectId: @project, datasetId: dataset_id },
|
145
146
|
body_object: insert_table_request(dataset_id, table_id, options)
|
@@ -150,7 +151,7 @@ module Gcloud
|
|
150
151
|
# Updates information in an existing table, replacing fields that
|
151
152
|
# are provided in the submitted table resource.
|
152
153
|
def patch_table dataset_id, table_id, options = {}
|
153
|
-
|
154
|
+
execute(
|
154
155
|
api_method: @bigquery.tables.patch,
|
155
156
|
parameters: { projectId: @project, datasetId: dataset_id,
|
156
157
|
tableId: table_id },
|
@@ -162,7 +163,7 @@ module Gcloud
|
|
162
163
|
# Deletes the table specified by tableId from the dataset.
|
163
164
|
# If the table contains data, all the data will be deleted.
|
164
165
|
def delete_table dataset_id, table_id
|
165
|
-
|
166
|
+
execute(
|
166
167
|
api_method: @bigquery.tables.delete,
|
167
168
|
parameters: { projectId: @project, datasetId: dataset_id,
|
168
169
|
tableId: table_id }
|
@@ -179,14 +180,14 @@ module Gcloud
|
|
179
180
|
startIndex: options.delete(:start)
|
180
181
|
}.delete_if { |_, v| v.nil? }
|
181
182
|
|
182
|
-
|
183
|
+
execute(
|
183
184
|
api_method: @bigquery.tabledata.list,
|
184
185
|
parameters: params
|
185
186
|
)
|
186
187
|
end
|
187
188
|
|
188
189
|
def insert_tabledata dataset_id, table_id, rows, options = {}
|
189
|
-
|
190
|
+
execute(
|
190
191
|
api_method: @bigquery.tabledata.insert_all,
|
191
192
|
parameters: { projectId: @project,
|
192
193
|
datasetId: dataset_id,
|
@@ -199,7 +200,7 @@ module Gcloud
|
|
199
200
|
# Lists all jobs in the specified project to which you have
|
200
201
|
# been granted the READER job role.
|
201
202
|
def list_jobs options = {}
|
202
|
-
|
203
|
+
execute(
|
203
204
|
api_method: @bigquery.jobs.list,
|
204
205
|
parameters: list_jobs_params(options)
|
205
206
|
)
|
@@ -208,14 +209,14 @@ module Gcloud
|
|
208
209
|
##
|
209
210
|
# Returns the job specified by jobID.
|
210
211
|
def get_job job_id
|
211
|
-
|
212
|
+
execute(
|
212
213
|
api_method: @bigquery.jobs.get,
|
213
214
|
parameters: { projectId: @project, jobId: job_id }
|
214
215
|
)
|
215
216
|
end
|
216
217
|
|
217
218
|
def insert_job config
|
218
|
-
|
219
|
+
execute(
|
219
220
|
api_method: @bigquery.jobs.insert,
|
220
221
|
parameters: { projectId: @project },
|
221
222
|
body_object: { "configuration" => config }
|
@@ -223,7 +224,7 @@ module Gcloud
|
|
223
224
|
end
|
224
225
|
|
225
226
|
def query_job query, options = {}
|
226
|
-
|
227
|
+
execute(
|
227
228
|
api_method: @bigquery.jobs.insert,
|
228
229
|
parameters: { projectId: @project },
|
229
230
|
body_object: query_table_config(query, options)
|
@@ -231,7 +232,7 @@ module Gcloud
|
|
231
232
|
end
|
232
233
|
|
233
234
|
def query query, options = {}
|
234
|
-
|
235
|
+
execute(
|
235
236
|
api_method: @bigquery.jobs.query,
|
236
237
|
parameters: { projectId: @project },
|
237
238
|
body_object: query_config(query, options)
|
@@ -248,14 +249,14 @@ module Gcloud
|
|
248
249
|
timeoutMs: options.delete(:timeout)
|
249
250
|
}.delete_if { |_, v| v.nil? }
|
250
251
|
|
251
|
-
|
252
|
+
execute(
|
252
253
|
api_method: @bigquery.jobs.get_query_results,
|
253
254
|
parameters: params
|
254
255
|
)
|
255
256
|
end
|
256
257
|
|
257
258
|
def copy_table source, target, options = {}
|
258
|
-
|
259
|
+
execute(
|
259
260
|
api_method: @bigquery.jobs.insert,
|
260
261
|
parameters: { projectId: @project },
|
261
262
|
body_object: copy_table_config(source, target, options)
|
@@ -263,7 +264,7 @@ module Gcloud
|
|
263
264
|
end
|
264
265
|
|
265
266
|
def link_table table, urls, options = {}
|
266
|
-
|
267
|
+
execute(
|
267
268
|
api_method: @bigquery.jobs.insert,
|
268
269
|
parameters: { projectId: @project },
|
269
270
|
body_object: link_table_config(table, urls, options)
|
@@ -271,7 +272,7 @@ module Gcloud
|
|
271
272
|
end
|
272
273
|
|
273
274
|
def extract_table table, storage_files, options = {}
|
274
|
-
|
275
|
+
execute(
|
275
276
|
api_method: @bigquery.jobs.insert,
|
276
277
|
parameters: { projectId: @project },
|
277
278
|
body_object: extract_table_config(table, storage_files, options)
|
@@ -279,7 +280,7 @@ module Gcloud
|
|
279
280
|
end
|
280
281
|
|
281
282
|
def load_table table, storage_url, options = {}
|
282
|
-
|
283
|
+
execute(
|
283
284
|
api_method: @bigquery.jobs.insert,
|
284
285
|
parameters: { projectId: @project },
|
285
286
|
body_object: load_table_config(table, storage_url,
|
@@ -290,7 +291,7 @@ module Gcloud
|
|
290
291
|
def load_multipart table, file, options = {}
|
291
292
|
media = load_media file
|
292
293
|
|
293
|
-
|
294
|
+
execute(
|
294
295
|
api_method: @bigquery.jobs.insert,
|
295
296
|
media: media,
|
296
297
|
parameters: { projectId: @project, uploadType: "multipart" },
|
@@ -301,14 +302,14 @@ module Gcloud
|
|
301
302
|
def load_resumable table, file, chunk_size = nil, options = {}
|
302
303
|
media = load_media file, chunk_size
|
303
304
|
|
304
|
-
result =
|
305
|
+
result = execute(
|
305
306
|
api_method: @bigquery.jobs.insert,
|
306
307
|
media: media,
|
307
308
|
parameters: { projectId: @project, uploadType: "resumable" },
|
308
309
|
body_object: load_table_config(table, nil, file, options)
|
309
310
|
)
|
310
311
|
upload = result.resumable_upload
|
311
|
-
result =
|
312
|
+
result = execute upload while upload.resumable?
|
312
313
|
result
|
313
314
|
end
|
314
315
|
|
@@ -612,6 +613,12 @@ module Gcloud
|
|
612
613
|
media.chunk_size = chunk_size unless chunk_size.nil?
|
613
614
|
media
|
614
615
|
end
|
616
|
+
|
617
|
+
def execute options
|
618
|
+
Gcloud::Backoff.new.execute_gapi do
|
619
|
+
@client.execute options
|
620
|
+
end
|
621
|
+
end
|
615
622
|
end
|
616
623
|
end
|
617
624
|
end
|
data/lib/gcloud/bigquery/data.rb
CHANGED
@@ -61,17 +61,115 @@ module Gcloud
|
|
61
61
|
end
|
62
62
|
|
63
63
|
##
|
64
|
-
#
|
64
|
+
# Whether there is a next page of data.
|
65
|
+
#
|
66
|
+
# @return [Boolean]
|
67
|
+
#
|
68
|
+
# @example
|
69
|
+
# require "gcloud"
|
70
|
+
#
|
71
|
+
# gcloud = Gcloud.new
|
72
|
+
# bigquery = gcloud.bigquery
|
73
|
+
# table = dataset.table "my_table"
|
74
|
+
#
|
75
|
+
# data = table.data
|
76
|
+
# if data.next?
|
77
|
+
# next_data = data.next
|
78
|
+
# end
|
79
|
+
#
|
65
80
|
def next?
|
66
81
|
!token.nil?
|
67
82
|
end
|
68
83
|
|
84
|
+
##
|
85
|
+
# Retrieve the next page of data.
|
86
|
+
#
|
87
|
+
# @return [Data]
|
88
|
+
#
|
89
|
+
# @example
|
90
|
+
# require "gcloud"
|
91
|
+
#
|
92
|
+
# gcloud = Gcloud.new
|
93
|
+
# bigquery = gcloud.bigquery
|
94
|
+
# table = dataset.table "my_table"
|
95
|
+
#
|
96
|
+
# data = table.data
|
97
|
+
# if data.next?
|
98
|
+
# next_data = data.next
|
99
|
+
# end
|
100
|
+
#
|
69
101
|
def next
|
70
102
|
return nil unless next?
|
71
103
|
ensure_table!
|
72
104
|
table.data token: token
|
73
105
|
end
|
74
106
|
|
107
|
+
##
|
108
|
+
# Retrieves all rows by repeatedly loading {#next} until {#next?} returns
|
109
|
+
# `false`. Calls the given block once for each row, which is passed as the
|
110
|
+
# parameter.
|
111
|
+
#
|
112
|
+
# An Enumerator is returned if no block is given.
|
113
|
+
#
|
114
|
+
# This method may make several API calls until all rows are retrieved. Be
|
115
|
+
# sure to use as narrow a search criteria as possible. Please use with
|
116
|
+
# caution.
|
117
|
+
#
|
118
|
+
# @param [Integer] request_limit The upper limit of API requests to make
|
119
|
+
# to load all data. Default is no limit.
|
120
|
+
# @yield [row] The block for accessing each row of data.
|
121
|
+
# @yieldparam [Hash] row The row object.
|
122
|
+
#
|
123
|
+
# @return [Enumerator]
|
124
|
+
#
|
125
|
+
# @example Iterating each rows by passing a block:
|
126
|
+
# require "gcloud"
|
127
|
+
#
|
128
|
+
# gcloud = Gcloud.new
|
129
|
+
# bigquery = gcloud.bigquery
|
130
|
+
# table = dataset.table "my_table"
|
131
|
+
#
|
132
|
+
# table.data.all do |row|
|
133
|
+
# puts row["word"]
|
134
|
+
# end
|
135
|
+
#
|
136
|
+
# @example Using the enumerator by not passing a block:
|
137
|
+
# require "gcloud"
|
138
|
+
#
|
139
|
+
# gcloud = Gcloud.new
|
140
|
+
# bigquery = gcloud.bigquery
|
141
|
+
# table = dataset.table "my_table"
|
142
|
+
#
|
143
|
+
# words = table.data.all.map do |row|
|
144
|
+
# row["word"]
|
145
|
+
# end
|
146
|
+
#
|
147
|
+
# @example Limit the number of API calls made:
|
148
|
+
# require "gcloud"
|
149
|
+
#
|
150
|
+
# gcloud = Gcloud.new
|
151
|
+
# bigquery = gcloud.bigquery
|
152
|
+
# table = dataset.table "my_table"
|
153
|
+
#
|
154
|
+
# table.data.all(request_limit: 10) do |row|
|
155
|
+
# puts row["word"]
|
156
|
+
# end
|
157
|
+
#
|
158
|
+
def all request_limit: nil
|
159
|
+
request_limit = request_limit.to_i if request_limit
|
160
|
+
return enum_for(:all, request_limit: request_limit) unless block_given?
|
161
|
+
results = self
|
162
|
+
loop do
|
163
|
+
results.each { |r| yield r }
|
164
|
+
if request_limit
|
165
|
+
request_limit -= 1
|
166
|
+
break if request_limit < 0
|
167
|
+
end
|
168
|
+
break unless results.next?
|
169
|
+
results = results.next
|
170
|
+
end
|
171
|
+
end
|
172
|
+
|
75
173
|
##
|
76
174
|
# Represents Table Data as a list of positional values (array of arrays).
|
77
175
|
# No type conversion is made, e.g. numbers are formatted as strings.
|