gcloud 0.3.0 → 0.3.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (42) hide show
  1. checksums.yaml +8 -8
  2. data/CHANGELOG.md +21 -0
  3. data/lib/gcloud.rb +0 -5
  4. data/lib/gcloud/bigquery.rb +31 -62
  5. data/lib/gcloud/bigquery/connection.rb +58 -35
  6. data/lib/gcloud/bigquery/dataset.rb +147 -18
  7. data/lib/gcloud/bigquery/dataset/access.rb +477 -0
  8. data/lib/gcloud/bigquery/dataset/list.rb +1 -1
  9. data/lib/gcloud/bigquery/errors.rb +2 -0
  10. data/lib/gcloud/bigquery/job.rb +30 -6
  11. data/lib/gcloud/bigquery/job/list.rb +1 -1
  12. data/lib/gcloud/bigquery/project.rb +47 -8
  13. data/lib/gcloud/bigquery/query_job.rb +1 -5
  14. data/lib/gcloud/bigquery/table.rb +185 -47
  15. data/lib/gcloud/bigquery/table/list.rb +1 -1
  16. data/lib/gcloud/bigquery/table/schema.rb +252 -0
  17. data/lib/gcloud/bigquery/view.rb +25 -0
  18. data/lib/gcloud/datastore/connection.rb +4 -0
  19. data/lib/gcloud/datastore/dataset.rb +5 -2
  20. data/lib/gcloud/datastore/errors.rb +1 -1
  21. data/lib/gcloud/datastore/properties.rb +1 -0
  22. data/lib/gcloud/datastore/proto.rb +3 -0
  23. data/lib/gcloud/errors.rb +23 -0
  24. data/lib/gcloud/gce.rb +62 -0
  25. data/lib/gcloud/pubsub/connection.rb +4 -0
  26. data/lib/gcloud/pubsub/errors.rb +2 -0
  27. data/lib/gcloud/pubsub/project.rb +5 -3
  28. data/lib/gcloud/pubsub/subscription/list.rb +1 -1
  29. data/lib/gcloud/pubsub/topic.rb +1 -1
  30. data/lib/gcloud/pubsub/topic/list.rb +1 -1
  31. data/lib/gcloud/storage.rb +16 -0
  32. data/lib/gcloud/storage/bucket.rb +31 -1
  33. data/lib/gcloud/storage/bucket/acl.rb +12 -10
  34. data/lib/gcloud/storage/bucket/list.rb +1 -1
  35. data/lib/gcloud/storage/connection.rb +4 -0
  36. data/lib/gcloud/storage/errors.rb +2 -0
  37. data/lib/gcloud/storage/file.rb +13 -0
  38. data/lib/gcloud/storage/file/acl.rb +6 -5
  39. data/lib/gcloud/storage/file/list.rb +1 -1
  40. data/lib/gcloud/storage/project.rb +4 -2
  41. data/lib/gcloud/version.rb +1 -1
  42. metadata +6 -2
checksums.yaml CHANGED
@@ -1,15 +1,15 @@
1
1
  ---
2
2
  !binary "U0hBMQ==":
3
3
  metadata.gz: !binary |-
4
- ZDhjZWRiZTI5NTJjY2FhMmNlNGUzNmRjNTYwYWY0MzhiYmFjOWU2ZA==
4
+ MzczYWVlN2JlNmY4YzI0MTBiNGM4ZjY5YjZlMjkzMDVmZjQ3MGQwNg==
5
5
  data.tar.gz: !binary |-
6
- MGE5MDc4NzkzOTQ2ZWE3NWE0ZDZhMWI5ZDc2M2M0NGNiOTFkMGFjYg==
6
+ YmNkZGUyNzljMjY1YjlhMWQyZjliMGNhZGY2OTdhYjJiOTBiZTNhNw==
7
7
  SHA512:
8
8
  metadata.gz: !binary |-
9
- ODkwZjlhNTcxNTU5ZmQ0OWUzODUxNmY4MDQ1NzIyZTEyOWRiZGI0ZDNkMzc2
10
- MDZjNTk0YTM2YzE5M2MzYjI3YTUzYjQ0YzI4YjM3YzUwMDQxYzg4NzdhNWI5
11
- NDFkNzE1MDk1YmNkMmEyZTc4ZmNjNzM4NDQzNmVmYTY2ZjA2YzY=
9
+ MzE5YjkzYmJmNmNmODY4NThhMjdlOTM2YmUxOTZmYWE4MDZjYjBhZDVmN2U1
10
+ NDMxMjY4NjlkZmZiNGJkNmYyNzczM2RhZWY3YmI4ODY5MmU0N2FiNjI5ZDNi
11
+ NzBhZGJmNDc4NDU2MWNkMmNkMTA5MzE3MjU0N2Y2N2ZiY2YzYmE=
12
12
  data.tar.gz: !binary |-
13
- Y2FmNTFjMjhjMWJlNWI4NGQ4YmI4ZjQzYmJiYTU0ZGRhNzNmMTZiYjI3YTFk
14
- NDI3YjdiNDZlOTE4M2FkOTlhMDViYWM4NmRkOGMzYmJhM2RjMTE0MjA2NjMz
15
- NDA3NWM5N2ViMjk3NmE2ZGY1YzdkZDA0YzcwMjIwOGQxMTQ2ZDk=
13
+ OGU0MTQzYzRiOGU0NzJlZDllOGFmZWMzYTcwZDc3NTgwMTMxZTIxMjAxOTc4
14
+ MDQ0M2I2YWQyZGIyZmYyMzQ0MThmMTQ2ZjQ2NzJlNTAzMDI1Y2I2MWY3Y2U2
15
+ NjdjMjJjZGNlN2EyZmFmNDg3MTcwY2JiZmVkYzExYzZiNTZmZTY=
data/CHANGELOG.md CHANGED
@@ -1,5 +1,26 @@
1
1
  # Release History
2
2
 
3
+ ### 0.3.1 / 2015-09-08
4
+
5
+ #### Changes
6
+
7
+ * Auto-discovery of project-id on Google Compute Engine
8
+ * Support getting project id from GCE auth compute
9
+ * New dataset access DSL for BigQuery
10
+ * New table schema DSL for BigQuery
11
+ * Add Code of Conduct
12
+
13
+ #### Minor changes
14
+
15
+ * Load data to BigQuery from Datastore backup
16
+ * Add `Job#wait_until_complete` convenience method to BigQuery
17
+ * Add String representation of tables in BigQuery
18
+ * Add `refresh!` methods to Storage and BigQuery
19
+ * Support `DATASTORE_DATASET` environment variable
20
+ * Update Storage and BigQuery documentation for possible errors during large file uploads
21
+ * Fix missing Pathname require
22
+ * Truncate object representation in interactive output
23
+
3
24
  ### 0.3.0 / 2015-08-21
4
25
 
5
26
  #### Major changes
data/lib/gcloud.rb CHANGED
@@ -253,9 +253,4 @@ module Gcloud
253
253
  require "gcloud/bigquery"
254
254
  Gcloud.bigquery @project, @keyfile, options
255
255
  end
256
-
257
- ##
258
- # Base Gcloud exception class.
259
- class Error < StandardError
260
- end
261
256
  end
@@ -171,11 +171,7 @@ module Gcloud
171
171
  # "FROM publicdata:samples.shakespeare"
172
172
  # job = bigquery.query_job sql
173
173
  #
174
- # loop do
175
- # break if job.done?
176
- # sleep 1
177
- # job.refresh!
178
- # end
174
+ # job.wait_until_done!
179
175
  # if !job.failed?
180
176
  # job.query_results.each do |row|
181
177
  # puts row["word"]
@@ -213,33 +209,13 @@ module Gcloud
213
209
  # bigquery = gcloud.bigquery
214
210
  # dataset = bigquery.dataset "my_dataset"
215
211
  #
216
- # schema = {
217
- # "fields" => [
218
- # {
219
- # "name" => "first_name",
220
- # "type" => "STRING",
221
- # "mode" => "REQUIRED"
222
- # },
223
- # {
224
- # "name" => "cities_lived",
225
- # "type" => "RECORD",
226
- # "mode" => "REPEATED",
227
- # "fields" => [
228
- # {
229
- # "name" => "place",
230
- # "type" => "STRING",
231
- # "mode" => "REQUIRED"
232
- # },
233
- # {
234
- # "name" => "number_of_years",
235
- # "type" => "INTEGER",
236
- # "mode" => "REQUIRED"
237
- # }
238
- # ]
239
- # }
240
- # ]
241
- # }
242
- # table = dataset.create_table "people", schema: schema
212
+ # table = dataset.create_table "people" do |schema|
213
+ # schema.string "first_name", mode: :required
214
+ # schema.record "cities_lived", mode: :repeated do |nested_schema|
215
+ # nested_schema.string "place", mode: :required
216
+ # nested_schema.integer "number_of_years", mode: :required
217
+ # end
218
+ # end
243
219
  #
244
220
  # Because of the repeated field in this schema, we cannot use the CSV format
245
221
  # to load data into the table.
@@ -310,26 +286,11 @@ module Gcloud
310
286
  # gcloud = Gcloud.new
311
287
  # bigquery = gcloud.bigquery
312
288
  # dataset = bigquery.dataset "my_dataset"
313
- # schema = {
314
- # "fields" => [
315
- # {
316
- # "name" => "name",
317
- # "type" => "STRING",
318
- # "mode" => "REQUIRED"
319
- # },
320
- # {
321
- # "name" => "sex",
322
- # "type" => "STRING",
323
- # "mode" => "REQUIRED"
324
- # },
325
- # {
326
- # "name" => "number",
327
- # "type" => "INTEGER",
328
- # "mode" => "REQUIRED"
329
- # }
330
- # ]
331
- # }
332
- # table = dataset.create_table "baby_names", schema: schema
289
+ # table = dataset.create_table "baby_names" do |schema|
290
+ # schema.string "name", mode: :required
291
+ # schema.string "sex", mode: :required
292
+ # schema.integer "number", mode: :required
293
+ # end
333
294
  #
334
295
  # file = File.open "names/yob2014.txt"
335
296
  # load_job = table.load file, format: "csv"
@@ -340,6 +301,22 @@ module Gcloud
340
301
  # default format for load operations, the option is not actually necessary.
341
302
  # For JSON saved with a +.txt+ extension, however, it would be.
342
303
  #
304
+ # === A note about large uploads
305
+ #
306
+ # You may encounter a broken pipe error while attempting to upload large
307
+ # files. To avoid this problem, add
308
+ # {httpclient}[https://rubygems.org/gems/httpclient] as a dependency to your
309
+ # project, and configure {Faraday}[https://rubygems.org/gems/faraday] to use
310
+ # it, after requiring Gcloud, but before initiating your Gcloud connection.
311
+ #
312
+ # require "gcloud"
313
+ #
314
+ # Faraday.default_adapter = :httpclient
315
+ #
316
+ # gcloud = Gcloud.new
317
+ # bigquery = gcloud.bigquery
318
+ # dataset = bigquery.dataset "my_dataset"
319
+ #
343
320
  # == Exporting query results to Google Cloud Storage
344
321
  #
345
322
  # The example below shows how to pass the +table+ option with a query in order
@@ -362,11 +339,7 @@ module Gcloud
362
339
  # "ORDER BY count DESC"
363
340
  # query_job = dataset.query_job sql, table: result_table
364
341
  #
365
- # loop do
366
- # break if query_job.done?
367
- # sleep 1
368
- # query_job.refresh!
369
- # end
342
+ # query_job.wait_until_done!
370
343
  #
371
344
  # if !query_job.failed?
372
345
  #
@@ -377,11 +350,7 @@ module Gcloud
377
350
  #
378
351
  # extract_job = result_table.extract extract_url
379
352
  #
380
- # loop do
381
- # break if extract_job.done?
382
- # sleep 1
383
- # extract_job.refresh!
384
- # end
353
+ # extract_job.wait_until_done!
385
354
  #
386
355
  # # Download to local filesystem
387
356
  # bucket.files.first.download "baby-names-sam.csv"
@@ -13,6 +13,7 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
+ require "pathname"
16
17
  require "gcloud/version"
17
18
  require "google/api_client"
18
19
  require "digest/md5"
@@ -79,15 +80,11 @@ module Gcloud
79
80
  # fields that are provided in the submitted dataset resource.
80
81
  def patch_dataset dataset_id, options = {}
81
82
  project_id = options[:project_id] || @project
82
- body = { friendlyName: options[:name],
83
- description: options[:description],
84
- defaultTableExpirationMs: options[:default_expiration]
85
- }.delete_if { |_, v| v.nil? }
86
83
 
87
84
  @client.execute(
88
85
  api_method: @bigquery.datasets.patch,
89
86
  parameters: { projectId: project_id, datasetId: dataset_id },
90
- body_object: body
87
+ body_object: patch_dataset_request(options)
91
88
  )
92
89
  end
93
90
 
@@ -315,6 +312,35 @@ module Gcloud
315
312
  result
316
313
  end
317
314
 
315
+ def default_access_rules
316
+ [
317
+ { "role" => "OWNER", "specialGroup" => "projectOwners" },
318
+ { "role" => "WRITER", "specialGroup" => "projectWriters" },
319
+ { "role" => "READER", "specialGroup" => "projectReaders" },
320
+ { "role" => "OWNER", "userByEmail" => credentials.issuer }
321
+ ]
322
+ end
323
+
324
+ ##
325
+ # Extracts at least +tbl+ group, and possibly +dts+ and +prj+ groups,
326
+ # from strings in the formats: "my_table", "my_dataset.my_table", or
327
+ # "my-project:my_dataset.my_table". Then merges project_id and
328
+ # dataset_id from the default table if they are missing.
329
+ def self.table_ref_from_s str, default_table_ref
330
+ str = str.to_s
331
+ m = /\A(((?<prj>\S*):)?(?<dts>\S*)\.)?(?<tbl>\S*)\z/.match str
332
+ unless m
333
+ fail ArgumentError, "unable to identify table from #{str.inspect}"
334
+ end
335
+ default_table_ref.merge("projectId" => m["prj"],
336
+ "datasetId" => m["dts"],
337
+ "tableId" => m["tbl"])
338
+ end
339
+
340
+ def inspect #:nodoc:
341
+ "#{self.class}(#{@project})"
342
+ end
343
+
318
344
  protected
319
345
 
320
346
  ##
@@ -335,11 +361,20 @@ module Gcloud
335
361
  "kind" => "bigquery#dataset",
336
362
  "datasetReference" => {
337
363
  "projectId" => @project,
338
- "datasetId" => dataset_id
339
- },
364
+ "datasetId" => dataset_id },
340
365
  "friendlyName" => options[:name],
341
366
  "description" => options[:description],
342
- "defaultTableExpirationMs" => options[:expiration]
367
+ "defaultTableExpirationMs" => options[:expiration],
368
+ "access" => options[:access]
369
+ }.delete_if { |_, v| v.nil? }
370
+ end
371
+
372
+ def patch_dataset_request options = {}
373
+ {
374
+ friendlyName: options[:name],
375
+ description: options[:description],
376
+ defaultTableExpirationMs: options[:default_expiration],
377
+ access: options[:access]
343
378
  }.delete_if { |_, v| v.nil? }
344
379
  end
345
380
 
@@ -457,16 +492,8 @@ module Gcloud
457
492
  {
458
493
  "configuration" => {
459
494
  "copy" => {
460
- "sourceTable" => {
461
- "projectId" => source["tableReference"]["projectId"],
462
- "datasetId" => source["tableReference"]["datasetId"],
463
- "tableId" => source["tableReference"]["tableId"]
464
- }.delete_if { |_, v| v.nil? },
465
- "destinationTable" => {
466
- "projectId" => target["tableReference"]["projectId"],
467
- "datasetId" => target["tableReference"]["datasetId"],
468
- "tableId" => target["tableReference"]["tableId"]
469
- }.delete_if { |_, v| v.nil? },
495
+ "sourceTable" => source,
496
+ "destinationTable" => target,
470
497
  "createDisposition" => create_disposition(options[:create]),
471
498
  "writeDisposition" => write_disposition(options[:write])
472
499
  }.delete_if { |_, v| v.nil? },
@@ -481,11 +508,7 @@ module Gcloud
481
508
  "configuration" => {
482
509
  "link" => {
483
510
  "sourceUri" => Array(urls),
484
- "destinationTable" => {
485
- "projectId" => table["tableReference"]["projectId"],
486
- "datasetId" => table["tableReference"]["datasetId"],
487
- "tableId" => table["tableReference"]["tableId"]
488
- }.delete_if { |_, v| v.nil? },
511
+ "destinationTable" => table,
489
512
  "createDisposition" => create_disposition(options[:create]),
490
513
  "writeDisposition" => write_disposition(options[:write]),
491
514
  "sourceFormat" => source_format(path, options[:format])
@@ -504,11 +527,7 @@ module Gcloud
504
527
  "configuration" => {
505
528
  "extract" => {
506
529
  "destinationUris" => Array(storage_urls),
507
- "sourceTable" => {
508
- "projectId" => table["tableReference"]["projectId"],
509
- "datasetId" => table["tableReference"]["datasetId"],
510
- "tableId" => table["tableReference"]["tableId"]
511
- }.delete_if { |_, v| v.nil? },
530
+ "sourceTable" => table,
512
531
  "destinationFormat" => dest_format
513
532
  }.delete_if { |_, v| v.nil? },
514
533
  "dryRun" => options[:dryrun]
@@ -523,14 +542,11 @@ module Gcloud
523
542
  "configuration" => {
524
543
  "load" => {
525
544
  "sourceUris" => Array(urls),
526
- "destinationTable" => {
527
- "projectId" => table["tableReference"]["projectId"],
528
- "datasetId" => table["tableReference"]["datasetId"],
529
- "tableId" => table["tableReference"]["tableId"]
530
- }.delete_if { |_, v| v.nil? },
545
+ "destinationTable" => table,
531
546
  "createDisposition" => create_disposition(options[:create]),
532
547
  "writeDisposition" => write_disposition(options[:write]),
533
- "sourceFormat" => source_format(path, options[:format])
548
+ "sourceFormat" => source_format(path, options[:format]),
549
+ "projectionFields" => projection_fields(options[:projection_fields])
534
550
  }.delete_if { |_, v| v.nil? },
535
551
  "dryRun" => options[:dryrun]
536
552
  }.delete_if { |_, v| v.nil? }
@@ -568,15 +584,22 @@ module Gcloud
568
584
  val = { "csv" => "CSV",
569
585
  "json" => "NEWLINE_DELIMITED_JSON",
570
586
  "newline_delimited_json" => "NEWLINE_DELIMITED_JSON",
571
- "avro" => "AVRO" }[format.to_s.downcase]
587
+ "avro" => "AVRO",
588
+ "datastore" => "DATASTORE_BACKUP",
589
+ "datastore_backup" => "DATASTORE_BACKUP"}[format.to_s.downcase]
572
590
  return val unless val.nil?
573
591
  return nil if path.nil?
574
592
  return "CSV" if path.end_with? ".csv"
575
593
  return "NEWLINE_DELIMITED_JSON" if path.end_with? ".json"
576
594
  return "AVRO" if path.end_with? ".avro"
595
+ return "DATASTORE_BACKUP" if path.end_with? ".backup_info"
577
596
  nil
578
597
  end
579
598
 
599
+ def projection_fields array_or_str
600
+ Array(array_or_str) unless array_or_str.nil?
601
+ end
602
+
580
603
  # rubocop:enable all
581
604
 
582
605
  def load_media file, chunk_size = nil
@@ -16,7 +16,9 @@
16
16
  require "json"
17
17
  require "gcloud/bigquery/errors"
18
18
  require "gcloud/bigquery/table"
19
+ require "gcloud/bigquery/table/schema"
19
20
  require "gcloud/bigquery/dataset/list"
21
+ require "gcloud/bigquery/dataset/access"
20
22
 
21
23
  module Gcloud
22
24
  module Bigquery
@@ -34,7 +36,7 @@ module Gcloud
34
36
  # bigquery = gcloud.bigquery
35
37
  #
36
38
  # dataset = bigquery.create_dataset "my_dataset",
37
- # name: "My Dataset"
39
+ # name: "My Dataset",
38
40
  # description: "This is my Dataset"
39
41
  #
40
42
  class Dataset
@@ -73,6 +75,15 @@ module Gcloud
73
75
  @gapi["datasetReference"]["projectId"]
74
76
  end
75
77
 
78
+ ##
79
+ # The gapi fragment containing the Project ID and Dataset ID as a
80
+ # camel-cased hash.
81
+ def dataset_ref #:nodoc:
82
+ dataset_ref = @gapi["datasetReference"]
83
+ dataset_ref = dataset_ref.to_hash if dataset_ref.respond_to? :to_hash
84
+ dataset_ref
85
+ end
86
+
76
87
  ##
77
88
  # A descriptive name for the dataset.
78
89
  #
@@ -181,6 +192,89 @@ module Gcloud
181
192
  @gapi["location"]
182
193
  end
183
194
 
195
+ ##
196
+ # Retrieves the access rules for a Dataset using the Google Cloud
197
+ # Datastore API data structure of an array of hashes. The rules can be
198
+ # updated when passing a block, see Dataset::Access for all the methods
199
+ # available. See {BigQuery Access
200
+ # Control}[https://cloud.google.com/bigquery/access-control] for more
201
+ # information.
202
+ #
203
+ # === Examples
204
+ #
205
+ # require "gcloud"
206
+ #
207
+ # gcloud = Gcloud.new
208
+ # bigquery = gcloud.bigquery
209
+ # dataset = bigquery.dataset "my_dataset"
210
+ #
211
+ # dataset.access #=> [{"role"=>"OWNER",
212
+ # # "specialGroup"=>"projectOwners"},
213
+ # # {"role"=>"WRITER",
214
+ # # "specialGroup"=>"projectWriters"},
215
+ # # {"role"=>"READER",
216
+ # # "specialGroup"=>"projectReaders"},
217
+ # # {"role"=>"OWNER",
218
+ # # "userByEmail"=>"123456789-...com"}]
219
+ #
220
+ # Manage the access rules by passing a block.
221
+ #
222
+ # require "gcloud"
223
+ #
224
+ # gcloud = Gcloud.new
225
+ # bigquery = gcloud.bigquery
226
+ # dataset = bigquery.dataset "my_dataset"
227
+ #
228
+ # dataset.access do |access|
229
+ # access.add_owner_group "owners@example.com"
230
+ # access.add_writer_user "writer@example.com"
231
+ # access.remove_writer_user "readers@example.com"
232
+ # access.add_reader_special :all
233
+ # access.add_reader_view other_dataset_view_object
234
+ # end
235
+ #
236
+ def access
237
+ ensure_full_data!
238
+ g = @gapi
239
+ g = g.to_hash if g.respond_to? :to_hash
240
+ a = g["access"] ||= []
241
+ return a unless block_given?
242
+ a2 = Access.new a, dataset_ref
243
+ yield a2
244
+ self.access = a2.access if a2.changed?
245
+ end
246
+
247
+ ##
248
+ # Sets the access rules for a Dataset using the Google Cloud Datastore API
249
+ # data structure of an array of hashes. See {BigQuery Access
250
+ # Control}[https://cloud.google.com/bigquery/access-control] for more
251
+ # information.
252
+ #
253
+ # This method is provided for advanced usage of managing the access rules.
254
+ # Calling #access with a block is the preferred way to manage access
255
+ # rules.
256
+ #
257
+ # === Example
258
+ #
259
+ # require "gcloud"
260
+ #
261
+ # gcloud = Gcloud.new
262
+ # bigquery = gcloud.bigquery
263
+ # dataset = bigquery.dataset "my_dataset"
264
+ #
265
+ # dataset.access = [{"role"=>"OWNER",
266
+ # "specialGroup"=>"projectOwners"},
267
+ # {"role"=>"WRITER",
268
+ # "specialGroup"=>"projectWriters"},
269
+ # {"role"=>"READER",
270
+ # "specialGroup"=>"projectReaders"},
271
+ # {"role"=>"OWNER",
272
+ # "userByEmail"=>"123456789-...com"}]
273
+ #
274
+ def access= new_access
275
+ patch_gapi! access: new_access
276
+ end
277
+
184
278
  ##
185
279
  # Permanently deletes the dataset. The dataset must be empty before it can
186
280
  # be deleted unless the +force+ option is set to +true+.
@@ -236,10 +330,11 @@ module Gcloud
236
330
  # <code>options[:description]</code>::
237
331
  # A user-friendly description of the table. (+String+)
238
332
  # <code>options[:schema]</code>::
239
- # A schema specifying fields and data types for the table. See the
333
+ # A hash specifying fields and data types for the table. A block may be
334
+ # passed instead (see examples.) For the format of this hash, see the
240
335
  # {Tables resource
241
336
  # }[https://cloud.google.com/bigquery/docs/reference/v2/tables#resource]
242
- # for more information. (+Hash+)
337
+ # . (+Hash+)
243
338
  #
244
339
  # === Returns
245
340
  #
@@ -254,7 +349,35 @@ module Gcloud
254
349
  # dataset = bigquery.dataset "my_dataset"
255
350
  # table = dataset.create_table "my_table"
256
351
  #
257
- # A name and description can be provided:
352
+ # You can also pass name and description options.
353
+ #
354
+ # require "gcloud"
355
+ #
356
+ # gcloud = Gcloud.new
357
+ # bigquery = gcloud.bigquery
358
+ # dataset = bigquery.dataset "my_dataset"
359
+ # table = dataset.create_table "my_table"
360
+ # name: "My Table",
361
+ # description: "A description of my table."
362
+ #
363
+ # You can define the table's schema using a block.
364
+ #
365
+ # require "gcloud"
366
+ #
367
+ # gcloud = Gcloud.new
368
+ # bigquery = gcloud.bigquery
369
+ # dataset = bigquery.dataset "my_dataset"
370
+ # table = dataset.create_table "my_table" do |schema|
371
+ # schema.string "first_name", mode: :required
372
+ # schema.record "cities_lived", mode: :repeated do |nested_schema|
373
+ # nested_schema.string "place", mode: :required
374
+ # nested_schema.integer "number_of_years", mode: :required
375
+ # end
376
+ # end
377
+ #
378
+ # Or, if you are adapting existing code that was written for the {Rest API
379
+ # }[https://cloud.google.com/bigquery/docs/reference/v2/tables#resource],
380
+ # you can pass the table's schema as a hash.
258
381
  #
259
382
  # require "gcloud"
260
383
  #
@@ -288,20 +411,21 @@ module Gcloud
288
411
  # }
289
412
  # ]
290
413
  # }
291
- # table = dataset.create_table "my_table",
292
- # name: "My Table",
293
- # schema: schema
414
+ # table = dataset.create_table "my_table", schema: schema
294
415
  #
295
416
  # :category: Table
296
417
  #
297
418
  def create_table table_id, options = {}
298
419
  ensure_connection!
299
- resp = connection.insert_table dataset_id, table_id, options
300
- if resp.success?
301
- Table.from_gapi resp.data, connection
302
- else
303
- fail ApiError.from_response(resp)
420
+ if block_given?
421
+ if options[:schema]
422
+ fail ArgumentError, "only schema block or schema option is allowed"
423
+ end
424
+ schema_builder = Table::Schema.new nil
425
+ yield schema_builder
426
+ options[:schema] = schema_builder.schema if schema_builder.changed?
304
427
  end
428
+ insert_table table_id, options
305
429
  end
306
430
 
307
431
  ##
@@ -447,7 +571,7 @@ module Gcloud
447
571
  ensure_connection!
448
572
  resp = connection.list_tables dataset_id, options
449
573
  if resp.success?
450
- Table::List.from_resp resp, connection
574
+ Table::List.from_response resp, connection
451
575
  else
452
576
  fail ApiError.from_response(resp)
453
577
  end
@@ -516,11 +640,7 @@ module Gcloud
516
640
  #
517
641
  # job = bigquery.query_job "SELECT name FROM my_table"
518
642
  #
519
- # loop do
520
- # break if job.done?
521
- # sleep 1
522
- # job.refresh!
523
- # end
643
+ # job.wait_until_done!
524
644
  # if !job.failed?
525
645
  # job.query_results.each do |row|
526
646
  # puts row["name"]
@@ -621,6 +741,15 @@ module Gcloud
621
741
 
622
742
  protected
623
743
 
744
+ def insert_table table_id, options
745
+ resp = connection.insert_table dataset_id, table_id, options
746
+ if resp.success?
747
+ Table.from_gapi resp.data, connection
748
+ else
749
+ fail ApiError.from_response(resp)
750
+ end
751
+ end
752
+
624
753
  ##
625
754
  # Raise an error unless an active connection is available.
626
755
  def ensure_connection!