gcloud 0.3.0 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +8 -8
- data/CHANGELOG.md +21 -0
- data/lib/gcloud.rb +0 -5
- data/lib/gcloud/bigquery.rb +31 -62
- data/lib/gcloud/bigquery/connection.rb +58 -35
- data/lib/gcloud/bigquery/dataset.rb +147 -18
- data/lib/gcloud/bigquery/dataset/access.rb +477 -0
- data/lib/gcloud/bigquery/dataset/list.rb +1 -1
- data/lib/gcloud/bigquery/errors.rb +2 -0
- data/lib/gcloud/bigquery/job.rb +30 -6
- data/lib/gcloud/bigquery/job/list.rb +1 -1
- data/lib/gcloud/bigquery/project.rb +47 -8
- data/lib/gcloud/bigquery/query_job.rb +1 -5
- data/lib/gcloud/bigquery/table.rb +185 -47
- data/lib/gcloud/bigquery/table/list.rb +1 -1
- data/lib/gcloud/bigquery/table/schema.rb +252 -0
- data/lib/gcloud/bigquery/view.rb +25 -0
- data/lib/gcloud/datastore/connection.rb +4 -0
- data/lib/gcloud/datastore/dataset.rb +5 -2
- data/lib/gcloud/datastore/errors.rb +1 -1
- data/lib/gcloud/datastore/properties.rb +1 -0
- data/lib/gcloud/datastore/proto.rb +3 -0
- data/lib/gcloud/errors.rb +23 -0
- data/lib/gcloud/gce.rb +62 -0
- data/lib/gcloud/pubsub/connection.rb +4 -0
- data/lib/gcloud/pubsub/errors.rb +2 -0
- data/lib/gcloud/pubsub/project.rb +5 -3
- data/lib/gcloud/pubsub/subscription/list.rb +1 -1
- data/lib/gcloud/pubsub/topic.rb +1 -1
- data/lib/gcloud/pubsub/topic/list.rb +1 -1
- data/lib/gcloud/storage.rb +16 -0
- data/lib/gcloud/storage/bucket.rb +31 -1
- data/lib/gcloud/storage/bucket/acl.rb +12 -10
- data/lib/gcloud/storage/bucket/list.rb +1 -1
- data/lib/gcloud/storage/connection.rb +4 -0
- data/lib/gcloud/storage/errors.rb +2 -0
- data/lib/gcloud/storage/file.rb +13 -0
- data/lib/gcloud/storage/file/acl.rb +6 -5
- data/lib/gcloud/storage/file/list.rb +1 -1
- data/lib/gcloud/storage/project.rb +4 -2
- data/lib/gcloud/version.rb +1 -1
- metadata +6 -2
@@ -35,7 +35,7 @@ module Gcloud
|
|
35
35
|
|
36
36
|
##
|
37
37
|
# New Dataset::List from a response object.
|
38
|
-
def self.
|
38
|
+
def self.from_response resp, conn #:nodoc:
|
39
39
|
datasets = List.new(Array(resp.data["datasets"]).map do |gapi_object|
|
40
40
|
Dataset.from_gapi gapi_object, conn
|
41
41
|
end)
|
data/lib/gcloud/bigquery/job.rb
CHANGED
@@ -44,11 +44,7 @@ module Gcloud
|
|
44
44
|
# q = "SELECT COUNT(word) as count FROM publicdata:samples.shakespeare"
|
45
45
|
# job = bigquery.query_job q
|
46
46
|
#
|
47
|
-
#
|
48
|
-
# break if job.done?
|
49
|
-
# sleep 1
|
50
|
-
# job.refresh!
|
51
|
-
# end
|
47
|
+
# job.wait_until_done!
|
52
48
|
#
|
53
49
|
# if job.failed?
|
54
50
|
# puts job.error
|
@@ -219,7 +215,7 @@ module Gcloud
|
|
219
215
|
|
220
216
|
##
|
221
217
|
# Reloads the job with current data from the BigQuery service.
|
222
|
-
def
|
218
|
+
def reload!
|
223
219
|
ensure_connection!
|
224
220
|
resp = connection.get_job job_id
|
225
221
|
if resp.success?
|
@@ -228,6 +224,34 @@ module Gcloud
|
|
228
224
|
fail ApiError.from_response(resp)
|
229
225
|
end
|
230
226
|
end
|
227
|
+
alias_method :refresh!, :reload!
|
228
|
+
|
229
|
+
##
|
230
|
+
# Refreshes the job until the job is +DONE+.
|
231
|
+
# The delay between refreshes will incrementally increase.
|
232
|
+
#
|
233
|
+
# === Example
|
234
|
+
#
|
235
|
+
# require "gcloud"
|
236
|
+
#
|
237
|
+
# gcloud = Gcloud.new
|
238
|
+
# bigquery = gcloud.bigquery
|
239
|
+
# dataset = bigquery.dataset "my_dataset"
|
240
|
+
# table = dataset.table "my_table"
|
241
|
+
#
|
242
|
+
# extract_job = table.extract "gs://my-bucket/file-name.json",
|
243
|
+
# format: "json"
|
244
|
+
# extract_job.wait_until_done!
|
245
|
+
# extract_job.done? #=> true
|
246
|
+
def wait_until_done!
|
247
|
+
backoff = ->(retries) { sleep 2 * retries + 5 }
|
248
|
+
retries = 0
|
249
|
+
until done?
|
250
|
+
backoff.call retries
|
251
|
+
retries += 1
|
252
|
+
reload!
|
253
|
+
end
|
254
|
+
end
|
231
255
|
|
232
256
|
##
|
233
257
|
# New Job from a Google API Client object.
|
@@ -38,7 +38,7 @@ module Gcloud
|
|
38
38
|
|
39
39
|
##
|
40
40
|
# New Job::List from a response object.
|
41
|
-
def self.
|
41
|
+
def self.from_response resp, conn #:nodoc:
|
42
42
|
jobs = List.new(Array(resp.data["jobs"]).map do |gapi_object|
|
43
43
|
Job.from_gapi gapi_object, conn
|
44
44
|
end)
|
@@ -13,6 +13,7 @@
|
|
13
13
|
# See the License for the specific language governing permissions and
|
14
14
|
# limitations under the License.
|
15
15
|
|
16
|
+
require "gcloud/gce"
|
16
17
|
require "gcloud/bigquery/connection"
|
17
18
|
require "gcloud/bigquery/credentials"
|
18
19
|
require "gcloud/bigquery/errors"
|
@@ -77,7 +78,8 @@ module Gcloud
|
|
77
78
|
def self.default_project #:nodoc:
|
78
79
|
ENV["BIGQUERY_PROJECT"] ||
|
79
80
|
ENV["GCLOUD_PROJECT"] ||
|
80
|
-
ENV["GOOGLE_CLOUD_PROJECT"]
|
81
|
+
ENV["GOOGLE_CLOUD_PROJECT"] ||
|
82
|
+
Gcloud::GCE.project_id
|
81
83
|
end
|
82
84
|
|
83
85
|
##
|
@@ -143,11 +145,7 @@ module Gcloud
|
|
143
145
|
#
|
144
146
|
# job = bigquery.query_job "SELECT name FROM [my_proj:my_data.my_table]"
|
145
147
|
#
|
146
|
-
#
|
147
|
-
# break if job.done?
|
148
|
-
# sleep 1
|
149
|
-
# job.refresh!
|
150
|
-
# end
|
148
|
+
# job.wait_until_done!
|
151
149
|
# if !job.failed?
|
152
150
|
# job.query_results.each do |row|
|
153
151
|
# puts row["name"]
|
@@ -268,6 +266,10 @@ module Gcloud
|
|
268
266
|
end
|
269
267
|
end
|
270
268
|
|
269
|
+
# rubocop:disable Metrics/AbcSize
|
270
|
+
# rubocop:disable Metrics/MethodLength
|
271
|
+
# Disabled rubocop because the level of abstraction is not violated here
|
272
|
+
|
271
273
|
##
|
272
274
|
# Creates a new dataset.
|
273
275
|
#
|
@@ -286,6 +288,11 @@ module Gcloud
|
|
286
288
|
# <code>options[:expiration]</code>::
|
287
289
|
# The default lifetime of all tables in the dataset, in milliseconds.
|
288
290
|
# The minimum value is 3600000 milliseconds (one hour). (+Integer+)
|
291
|
+
# <code>options[:access]</code>::
|
292
|
+
# The access rules for a Dataset using the Google Cloud Datastore API
|
293
|
+
# data structure of an array of hashes. See {BigQuery Access
|
294
|
+
# Control}[https://cloud.google.com/bigquery/access-control] for more
|
295
|
+
# information. (+Array of Hashes+)
|
289
296
|
#
|
290
297
|
# === Returns
|
291
298
|
#
|
@@ -311,7 +318,36 @@ module Gcloud
|
|
311
318
|
# name: "My Dataset",
|
312
319
|
# description: "This is my Dataset"
|
313
320
|
#
|
321
|
+
# Access rules can be provided with the +access+ option:
|
322
|
+
#
|
323
|
+
# require "gcloud"
|
324
|
+
#
|
325
|
+
# gcloud = Gcloud.new
|
326
|
+
# bigquery = gcloud.bigquery
|
327
|
+
#
|
328
|
+
# dataset = bigquery.create_dataset "my_dataset",
|
329
|
+
# access: [{"role"=>"WRITER", "userByEmail"=>"writers@example.com"}]
|
330
|
+
#
|
331
|
+
# Or access rules can be configured by using the block syntax:
|
332
|
+
# (See Dataset::Access)
|
333
|
+
#
|
334
|
+
# require "gcloud"
|
335
|
+
#
|
336
|
+
# gcloud = Gcloud.new
|
337
|
+
# bigquery = gcloud.bigquery
|
338
|
+
#
|
339
|
+
# dataset = bigquery.create_dataset "my_dataset" do |access|
|
340
|
+
# access.add_writer_user "writers@example.com"
|
341
|
+
# end
|
342
|
+
#
|
314
343
|
def create_dataset dataset_id, options = {}
|
344
|
+
if block_given?
|
345
|
+
access_builder = Dataset::Access.new connection.default_access_rules,
|
346
|
+
"projectId" => project
|
347
|
+
yield access_builder
|
348
|
+
options[:access] = access_builder.access if access_builder.changed?
|
349
|
+
end
|
350
|
+
|
315
351
|
ensure_connection!
|
316
352
|
resp = connection.insert_dataset dataset_id, options
|
317
353
|
if resp.success?
|
@@ -321,6 +357,9 @@ module Gcloud
|
|
321
357
|
end
|
322
358
|
end
|
323
359
|
|
360
|
+
# rubocop:enable Metrics/AbcSize
|
361
|
+
# rubocop:enable Metrics/MethodLength
|
362
|
+
|
324
363
|
##
|
325
364
|
# Retrieves the list of datasets belonging to the project.
|
326
365
|
#
|
@@ -387,7 +426,7 @@ module Gcloud
|
|
387
426
|
ensure_connection!
|
388
427
|
resp = connection.list_datasets options
|
389
428
|
if resp.success?
|
390
|
-
Dataset::List.
|
429
|
+
Dataset::List.from_response resp, connection
|
391
430
|
else
|
392
431
|
fail ApiError.from_response(resp)
|
393
432
|
end
|
@@ -494,7 +533,7 @@ module Gcloud
|
|
494
533
|
ensure_connection!
|
495
534
|
resp = connection.list_jobs options
|
496
535
|
if resp.success?
|
497
|
-
Job::List.
|
536
|
+
Job::List.from_response resp, connection
|
498
537
|
else
|
499
538
|
fail ApiError.from_response(resp)
|
500
539
|
end
|
@@ -126,11 +126,7 @@ module Gcloud
|
|
126
126
|
# q = "SELECT word FROM publicdata:samples.shakespeare"
|
127
127
|
# job = bigquery.query_job q
|
128
128
|
#
|
129
|
-
#
|
130
|
-
# break if job.done?
|
131
|
-
# sleep 1
|
132
|
-
# job.refresh!
|
133
|
-
# end
|
129
|
+
# job.wait_until_done!
|
134
130
|
# data = job.query_results
|
135
131
|
# data.each do |row|
|
136
132
|
# puts row["word"]
|
@@ -16,6 +16,7 @@
|
|
16
16
|
require "gcloud/bigquery/view"
|
17
17
|
require "gcloud/bigquery/data"
|
18
18
|
require "gcloud/bigquery/table/list"
|
19
|
+
require "gcloud/bigquery/table/schema"
|
19
20
|
require "gcloud/bigquery/errors"
|
20
21
|
require "gcloud/bigquery/insert_response"
|
21
22
|
require "gcloud/upload"
|
@@ -36,46 +37,25 @@ module Gcloud
|
|
36
37
|
# gcloud = Gcloud.new
|
37
38
|
# bigquery = gcloud.bigquery
|
38
39
|
# dataset = bigquery.dataset "my_dataset"
|
39
|
-
# table = dataset.create_table "my_table"
|
40
40
|
#
|
41
|
-
#
|
42
|
-
# "
|
43
|
-
#
|
44
|
-
#
|
45
|
-
#
|
46
|
-
#
|
47
|
-
#
|
48
|
-
# {
|
49
|
-
# "name" => "cities_lived",
|
50
|
-
# "type" => "RECORD",
|
51
|
-
# "mode" => "REPEATED",
|
52
|
-
# "fields" => [
|
53
|
-
# {
|
54
|
-
# "name" => "place",
|
55
|
-
# "type" => "STRING",
|
56
|
-
# "mode" => "REQUIRED"
|
57
|
-
# },
|
58
|
-
# {
|
59
|
-
# "name" => "number_of_years",
|
60
|
-
# "type" => "INTEGER",
|
61
|
-
# "mode" => "REQUIRED"
|
62
|
-
# }
|
63
|
-
# ]
|
64
|
-
# }
|
65
|
-
# ]
|
66
|
-
# }
|
67
|
-
# table.schema = schema
|
41
|
+
# table = dataset.create_table "my_table" do |schema|
|
42
|
+
# schema.string "first_name", mode: :required
|
43
|
+
# schema.record "cities_lived", mode: :repeated do |nested_schema|
|
44
|
+
# nested_schema.string "place", mode: :required
|
45
|
+
# nested_schema.integer "number_of_years", mode: :required
|
46
|
+
# end
|
47
|
+
# end
|
68
48
|
#
|
69
49
|
# row = {
|
70
50
|
# "first_name" => "Alice",
|
71
51
|
# "cities_lived" => [
|
72
52
|
# {
|
73
|
-
# "place"
|
74
|
-
# "number_of_years"
|
53
|
+
# "place" => "Seattle",
|
54
|
+
# "number_of_years" => 5
|
75
55
|
# },
|
76
56
|
# {
|
77
|
-
# "place"
|
78
|
-
# "number_of_years"
|
57
|
+
# "place" => "Stockholm",
|
58
|
+
# "number_of_years" => 6
|
79
59
|
# }
|
80
60
|
# ]
|
81
61
|
# }
|
@@ -126,6 +106,51 @@ module Gcloud
|
|
126
106
|
@gapi["tableReference"]["projectId"]
|
127
107
|
end
|
128
108
|
|
109
|
+
##
|
110
|
+
# The gapi fragment containing the Project ID, Dataset ID, and Table ID as
|
111
|
+
# a camel-cased hash.
|
112
|
+
def table_ref #:nodoc:
|
113
|
+
table_ref = @gapi["tableReference"]
|
114
|
+
table_ref = table_ref.to_hash if table_ref.respond_to? :to_hash
|
115
|
+
table_ref
|
116
|
+
end
|
117
|
+
|
118
|
+
##
|
119
|
+
# The combined Project ID, Dataset ID, and Table ID for this table, in the
|
120
|
+
# format specified by the {Query
|
121
|
+
# Reference}[https://cloud.google.com/bigquery/query-reference#from]:
|
122
|
+
# +project_name:datasetId.tableId+. To use this value in queries see
|
123
|
+
# #query_id.
|
124
|
+
#
|
125
|
+
# :category: Attributes
|
126
|
+
#
|
127
|
+
def id
|
128
|
+
@gapi["id"]
|
129
|
+
end
|
130
|
+
|
131
|
+
##
|
132
|
+
# The value returned by #id, wrapped in square brackets if the Project ID
|
133
|
+
# contains dashes, as specified by the {Query
|
134
|
+
# Reference}[https://cloud.google.com/bigquery/query-reference#from].
|
135
|
+
# Useful in queries.
|
136
|
+
#
|
137
|
+
# === Example
|
138
|
+
#
|
139
|
+
# require "gcloud"
|
140
|
+
#
|
141
|
+
# gcloud = Gcloud.new
|
142
|
+
# bigquery = gcloud.bigquery
|
143
|
+
# dataset = bigquery.dataset "my_dataset"
|
144
|
+
# table = dataset.table "my_table"
|
145
|
+
#
|
146
|
+
# data = bigquery.query "SELECT name FROM #{table.query_id}"
|
147
|
+
#
|
148
|
+
# :category: Attributes
|
149
|
+
#
|
150
|
+
def query_id
|
151
|
+
project_id["-"] ? "[#{id}]" : id
|
152
|
+
end
|
153
|
+
|
129
154
|
##
|
130
155
|
# The name of the table.
|
131
156
|
#
|
@@ -266,20 +291,66 @@ module Gcloud
|
|
266
291
|
end
|
267
292
|
|
268
293
|
##
|
269
|
-
#
|
294
|
+
# Returns the table's schema as hash containing the keys and values
|
295
|
+
# returned by the Google Cloud BigQuery {Rest API
|
296
|
+
# }[https://cloud.google.com/bigquery/docs/reference/v2/tables#resource].
|
297
|
+
# This method can also be used to set, replace, or add to the schema by
|
298
|
+
# passing a block. See Table::Schema for available methods. To set the
|
299
|
+
# schema by passing a hash instead, use #schema=.
|
300
|
+
#
|
301
|
+
# === Parameters
|
302
|
+
#
|
303
|
+
# +options+::
|
304
|
+
# An optional Hash for controlling additional behavior. (+Hash+)
|
305
|
+
# <code>options[:replace]</code>::
|
306
|
+
# Whether to replace the existing schema with the new schema. If
|
307
|
+
# +true+, the fields will replace the existing schema. If
|
308
|
+
# +false+, the fields will be added to the existing schema. When a table
|
309
|
+
# already contains data, schema changes must be additive. Thus, the
|
310
|
+
# default value is +false+. (+Boolean+)
|
311
|
+
#
|
312
|
+
# === Examples
|
313
|
+
#
|
314
|
+
# require "gcloud"
|
315
|
+
#
|
316
|
+
# gcloud = Gcloud.new
|
317
|
+
# bigquery = gcloud.bigquery
|
318
|
+
# dataset = bigquery.dataset "my_dataset"
|
319
|
+
# table = dataset.create_table "my_table"
|
320
|
+
#
|
321
|
+
# table.schema do |schema|
|
322
|
+
# schema.string "first_name", mode: :required
|
323
|
+
# schema.record "cities_lived", mode: :repeated do |nested_schema|
|
324
|
+
# nested_schema.string "place", mode: :required
|
325
|
+
# nested_schema.integer "number_of_years", mode: :required
|
326
|
+
# end
|
327
|
+
# end
|
270
328
|
#
|
271
329
|
# :category: Attributes
|
272
330
|
#
|
273
|
-
def schema
|
331
|
+
def schema options = {}
|
274
332
|
ensure_full_data!
|
275
|
-
|
276
|
-
|
277
|
-
s = {}
|
278
|
-
s
|
333
|
+
g = @gapi
|
334
|
+
g = g.to_hash if g.respond_to? :to_hash
|
335
|
+
s = g["schema"] ||= {}
|
336
|
+
return s unless block_given?
|
337
|
+
s = nil if options[:replace]
|
338
|
+
schema_builder = Schema.new s
|
339
|
+
yield schema_builder
|
340
|
+
self.schema = schema_builder.schema if schema_builder.changed?
|
279
341
|
end
|
280
342
|
|
281
343
|
##
|
282
344
|
# Updates the schema of the table.
|
345
|
+
# To update the schema using a block instead, use #schema.
|
346
|
+
#
|
347
|
+
# === Parameters
|
348
|
+
#
|
349
|
+
# +schema+::
|
350
|
+
# A hash containing keys and values as specified by the Google Cloud
|
351
|
+
# BigQuery {Rest API
|
352
|
+
# }[https://cloud.google.com/bigquery/docs/reference/v2/tables#resource]
|
353
|
+
# . (+Hash+)
|
283
354
|
#
|
284
355
|
# === Example
|
285
356
|
#
|
@@ -385,7 +456,7 @@ module Gcloud
|
|
385
456
|
# === Parameters
|
386
457
|
#
|
387
458
|
# +destination_table+::
|
388
|
-
# The destination for the copied data. (+Table+)
|
459
|
+
# The destination for the copied data. (+Table+ or +String+)
|
389
460
|
# +options+::
|
390
461
|
# An optional Hash for controlling additional behavior. (+Hash+)
|
391
462
|
# <code>options[:create]</code>::
|
@@ -409,7 +480,7 @@ module Gcloud
|
|
409
480
|
#
|
410
481
|
# Gcloud::Bigquery::CopyJob
|
411
482
|
#
|
412
|
-
# ===
|
483
|
+
# === Examples
|
413
484
|
#
|
414
485
|
# require "gcloud"
|
415
486
|
#
|
@@ -421,11 +492,28 @@ module Gcloud
|
|
421
492
|
#
|
422
493
|
# copy_job = table.copy destination_table
|
423
494
|
#
|
495
|
+
# The destination table argument can also be a string identifier as
|
496
|
+
# specified by the {Query
|
497
|
+
# Reference}[https://cloud.google.com/bigquery/query-reference#from]:
|
498
|
+
# +project_name:datasetId.tableId+. This is useful for referencing tables
|
499
|
+
# in other projects and datasets.
|
500
|
+
#
|
501
|
+
# require "gcloud"
|
502
|
+
#
|
503
|
+
# gcloud = Gcloud.new
|
504
|
+
# bigquery = gcloud.bigquery
|
505
|
+
# dataset = bigquery.dataset "my_dataset"
|
506
|
+
# table = dataset.table "my_table"
|
507
|
+
#
|
508
|
+
# copy_job = table.copy "other-project:other_dataset.other_table"
|
509
|
+
#
|
424
510
|
# :category: Data
|
425
511
|
#
|
426
512
|
def copy destination_table, options = {}
|
427
513
|
ensure_connection!
|
428
|
-
resp = connection.copy_table
|
514
|
+
resp = connection.copy_table table_ref,
|
515
|
+
get_table_ref(destination_table),
|
516
|
+
options
|
429
517
|
if resp.success?
|
430
518
|
Job.from_gapi resp.data, connection
|
431
519
|
else
|
@@ -467,7 +555,7 @@ module Gcloud
|
|
467
555
|
#
|
468
556
|
def link source_url, options = {} #:nodoc:
|
469
557
|
ensure_connection!
|
470
|
-
resp = connection.link_table
|
558
|
+
resp = connection.link_table table_ref, source_url, options
|
471
559
|
if resp.success?
|
472
560
|
Job.from_gapi resp.data, connection
|
473
561
|
else
|
@@ -516,7 +604,7 @@ module Gcloud
|
|
516
604
|
#
|
517
605
|
def extract extract_url, options = {}
|
518
606
|
ensure_connection!
|
519
|
-
resp = connection.extract_table
|
607
|
+
resp = connection.extract_table table_ref, extract_url, options
|
520
608
|
if resp.success?
|
521
609
|
Job.from_gapi resp.data, connection
|
522
610
|
else
|
@@ -542,6 +630,7 @@ module Gcloud
|
|
542
630
|
# * +csv+ - CSV
|
543
631
|
# * +json+ - {Newline-delimited JSON}[http://jsonlines.org/]
|
544
632
|
# * +avro+ - {Avro}[http://avro.apache.org/]
|
633
|
+
# * +datastore_backup+ - Cloud Datastore backup
|
545
634
|
# <code>options[:create]</code>::
|
546
635
|
# Specifies whether the job is allowed to create new tables. (+String+)
|
547
636
|
#
|
@@ -558,6 +647,12 @@ module Gcloud
|
|
558
647
|
# * +append+ - BigQuery appends the data to the table.
|
559
648
|
# * +empty+ - An error will be returned if the table already contains
|
560
649
|
# data.
|
650
|
+
# <code>options[:projection_fields]</code>::
|
651
|
+
# If the +format+ option is set to +datastore_backup+, indicates which
|
652
|
+
# entity properties to load from a Cloud Datastore backup. Property
|
653
|
+
# names are case sensitive and must be top-level properties. If not set,
|
654
|
+
# BigQuery loads all properties. If any named property isn't found in
|
655
|
+
# the Cloud Datastore backup, an invalid error is returned. (+Array+)
|
561
656
|
#
|
562
657
|
# === Returns
|
563
658
|
#
|
@@ -589,7 +684,7 @@ module Gcloud
|
|
589
684
|
# file = bucket.file "file-name.csv"
|
590
685
|
# load_job = table.load file
|
591
686
|
#
|
592
|
-
# Or, you can upload a
|
687
|
+
# Or, you can upload a file directly.
|
593
688
|
# See {Loading Data with a POST Request}[
|
594
689
|
# https://cloud.google.com/bigquery/loading-data-post-request#multipart].
|
595
690
|
#
|
@@ -603,6 +698,23 @@ module Gcloud
|
|
603
698
|
# file = File.open "my_data.csv"
|
604
699
|
# load_job = table.load file
|
605
700
|
#
|
701
|
+
# === A note about large direct uploads
|
702
|
+
#
|
703
|
+
# You may encounter a broken pipe error while attempting to upload large
|
704
|
+
# files. To avoid this problem, add
|
705
|
+
# {httpclient}[https://rubygems.org/gems/httpclient] as a dependency to
|
706
|
+
# your project, and configure {Faraday}[https://rubygems.org/gems/faraday]
|
707
|
+
# to use it, after requiring Gcloud, but before initiating your Gcloud
|
708
|
+
# connection.
|
709
|
+
#
|
710
|
+
# require "gcloud"
|
711
|
+
#
|
712
|
+
# Faraday.default_adapter = :httpclient
|
713
|
+
#
|
714
|
+
# gcloud = Gcloud.new
|
715
|
+
# bigquery = gcloud.bigquery
|
716
|
+
# dataset = bigquery.dataset "my_dataset"
|
717
|
+
#
|
606
718
|
# :category: Data
|
607
719
|
#
|
608
720
|
def load file, options = {}
|
@@ -700,6 +812,22 @@ module Gcloud
|
|
700
812
|
end
|
701
813
|
end
|
702
814
|
|
815
|
+
##
|
816
|
+
# Reloads the table with current data from the BigQuery service.
|
817
|
+
#
|
818
|
+
# :category: Lifecycle
|
819
|
+
#
|
820
|
+
def reload!
|
821
|
+
ensure_connection!
|
822
|
+
resp = connection.get_table dataset_id, table_id
|
823
|
+
if resp.success?
|
824
|
+
@gapi = resp.data
|
825
|
+
else
|
826
|
+
fail ApiError.from_response(resp)
|
827
|
+
end
|
828
|
+
end
|
829
|
+
alias_method :refresh!, :reload!
|
830
|
+
|
703
831
|
##
|
704
832
|
# New Table from a Google API Client object.
|
705
833
|
def self.from_gapi gapi, conn #:nodoc:
|
@@ -737,7 +865,7 @@ module Gcloud
|
|
737
865
|
# Convert to storage URL
|
738
866
|
file = file.to_gs_url if file.respond_to? :to_gs_url
|
739
867
|
|
740
|
-
resp = connection.load_table
|
868
|
+
resp = connection.load_table table_ref, file, options
|
741
869
|
if resp.success?
|
742
870
|
Job.from_gapi resp.data, connection
|
743
871
|
else
|
@@ -755,7 +883,7 @@ module Gcloud
|
|
755
883
|
|
756
884
|
def load_resumable file, options = {}
|
757
885
|
chunk_size = verify_chunk_size! options[:chunk_size]
|
758
|
-
resp = connection.load_resumable
|
886
|
+
resp = connection.load_resumable table_ref, file, chunk_size, options
|
759
887
|
if resp.success?
|
760
888
|
Job.from_gapi resp.data, connection
|
761
889
|
else
|
@@ -764,7 +892,7 @@ module Gcloud
|
|
764
892
|
end
|
765
893
|
|
766
894
|
def load_multipart file, options = {}
|
767
|
-
resp = connection.load_multipart
|
895
|
+
resp = connection.load_multipart table_ref, file, options
|
768
896
|
if resp.success?
|
769
897
|
Job.from_gapi resp.data, connection
|
770
898
|
else
|
@@ -822,6 +950,16 @@ module Gcloud
|
|
822
950
|
def data_complete?
|
823
951
|
!@gapi["creationTime"].nil?
|
824
952
|
end
|
953
|
+
|
954
|
+
private
|
955
|
+
|
956
|
+
def get_table_ref table
|
957
|
+
if table.respond_to? :table_ref
|
958
|
+
table.table_ref
|
959
|
+
else
|
960
|
+
Connection.table_ref_from_s table, table_ref
|
961
|
+
end
|
962
|
+
end
|
825
963
|
end
|
826
964
|
end
|
827
965
|
end
|