google-cloud-bigquery 0.26.0 → 0.27.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 88d4c67489641c08fc5ba2e2dc4f2900c71719c4
4
- data.tar.gz: 37fdfb59139e9adb1d52cfc5aabbb503570d366a
3
+ metadata.gz: 1efd6a85955173b9194de996101ba92b393bebc2
4
+ data.tar.gz: 8c246d8d436bb637d1b848f860c0bcf674b1bb47
5
5
  SHA512:
6
- metadata.gz: 50bcceec330c54226ce05ae5e634702269bfe0c6c7e3f89b9c98194c97d3df4fbf8996133504519c3bf24ad41468877beb01956af500ec81324a0cda3663530b
7
- data.tar.gz: 8392c4e87188d742ba3dcfdd9ff7664223fa3a533382ea21920299edf0256be2462cc2772b011faafcaf49c3f9c6a5549db677f634a22872323ee0de67f158f7
6
+ metadata.gz: 39a385146545138ce4d0458b18a4a226a12090e38b1a61c0fd0fc8b942e0fb70a27258d96f54cc687cfe96c6dd429d0c8341a92bee55029b1747a94da3ea3094
7
+ data.tar.gz: 83facb29e66f5add2f152ae1d070b1be1899662756d5eedcb6c1529b2308fa09495687c1527582ccd94dd3dbe43cf93c9ca4651b1eae084f33bb992d9ba2d3ad
data/README.md CHANGED
@@ -42,7 +42,7 @@ load_job = table.load file
42
42
  count_sql = "SELECT owner, COUNT(*) AS complete_count FROM todos GROUP BY owner"
43
43
  data = bigquery.query count_sql
44
44
  data.each do |row|
45
- puts row["name"]
45
+ puts row[:name]
46
46
  end
47
47
  ```
48
48
 
@@ -153,7 +153,7 @@ module Google
153
153
  # table = dataset.table "my_table"
154
154
  #
155
155
  # table.data.all do |row|
156
- # puts row["word"]
156
+ # puts row[:word]
157
157
  # end
158
158
  #
159
159
  # @example Using the enumerator by not passing a block:
@@ -164,7 +164,7 @@ module Google
164
164
  # table = dataset.table "my_table"
165
165
  #
166
166
  # words = table.data.all.map do |row|
167
- # row["word"]
167
+ # row[:word]
168
168
  # end
169
169
  #
170
170
  # @example Limit the number of API calls made:
@@ -175,7 +175,7 @@ module Google
175
175
  # table = dataset.table "my_table"
176
176
  #
177
177
  # table.data.all(request_limit: 10) do |row|
178
- # puts row["word"]
178
+ # puts row[:word]
179
179
  # end
180
180
  #
181
181
  def all request_limit: nil
@@ -596,6 +596,15 @@ module Google
596
596
  # Flattens all nested and repeated fields in the query results. The
597
597
  # default value is `true`. `large_results` parameter must be `true` if
598
598
  # this is set to `false`.
599
+ # @param [Integer] maximum_billing_tier Limits the billing tier for this
600
+ # job. Queries that have resource usage beyond this tier will fail
601
+ # (without incurring a charge). Optional. If unspecified, this will be
602
+ # set to your project default. For more information, see [High-Compute
603
+ # queries](https://cloud.google.com/bigquery/pricing#high-compute).
604
+ # @param [Integer] maximum_bytes_billed Limits the bytes billed for this
605
+ # job. Queries that will have bytes billed beyond this limit will fail
606
+ # (without incurring a charge). Optional. If unspecified, this will be
607
+ # set to your project default.
599
608
  #
600
609
  # @return [Google::Cloud::Bigquery::QueryJob]
601
610
  #
@@ -610,7 +619,7 @@ module Google
610
619
  # job.wait_until_done!
611
620
  # if !job.failed?
612
621
  # job.query_results.each do |row|
613
- # puts row["name"]
622
+ # puts row[:name]
614
623
  # end
615
624
  # end
616
625
  #
@@ -626,7 +635,7 @@ module Google
626
635
  # job.wait_until_done!
627
636
  # if !job.failed?
628
637
  # job.query_results.each do |row|
629
- # puts row["name"]
638
+ # puts row[:name]
630
639
  # end
631
640
  # end
632
641
  #
@@ -642,7 +651,7 @@ module Google
642
651
  # job.wait_until_done!
643
652
  # if !job.failed?
644
653
  # job.query_results.each do |row|
645
- # puts row["name"]
654
+ # puts row[:name]
646
655
  # end
647
656
  # end
648
657
  #
@@ -658,7 +667,7 @@ module Google
658
667
  # job.wait_until_done!
659
668
  # if !job.failed?
660
669
  # job.query_results.each do |row|
661
- # puts row["name"]
670
+ # puts row[:name]
662
671
  # end
663
672
  # end
664
673
  #
@@ -666,11 +675,14 @@ module Google
666
675
  #
667
676
  def query_job query, params: nil, priority: "INTERACTIVE", cache: true,
668
677
  table: nil, create: nil, write: nil, standard_sql: nil,
669
- legacy_sql: nil, large_results: nil, flatten: nil
678
+ legacy_sql: nil, large_results: nil, flatten: nil,
679
+ maximum_billing_tier: nil, maximum_bytes_billed: nil
670
680
  options = { priority: priority, cache: cache, table: table,
671
681
  create: create, write: write,
672
682
  large_results: large_results, flatten: flatten,
673
683
  legacy_sql: legacy_sql, standard_sql: standard_sql,
684
+ maximum_billing_tier: maximum_billing_tier,
685
+ maximum_bytes_billed: maximum_bytes_billed,
674
686
  params: params }
675
687
  options[:dataset] ||= self
676
688
  ensure_service!
@@ -768,7 +780,7 @@ module Google
768
780
  # data = dataset.query "SELECT name FROM my_table"
769
781
  #
770
782
  # data.each do |row|
771
- # puts row["name"]
783
+ # puts row[:name]
772
784
  # end
773
785
  #
774
786
  # @example Query using legacy SQL:
@@ -781,7 +793,7 @@ module Google
781
793
  # legacy_sql: true
782
794
  #
783
795
  # data.each do |row|
784
- # puts row["name"]
796
+ # puts row[:name]
785
797
  # end
786
798
  #
787
799
  # @example Query using positional query parameters:
@@ -794,7 +806,7 @@ module Google
794
806
  # params: [1]
795
807
  #
796
808
  # data.each do |row|
797
- # puts row["name"]
809
+ # puts row[:name]
798
810
  # end
799
811
  #
800
812
  # @example Query using named query parameters:
@@ -807,7 +819,7 @@ module Google
807
819
  # params: { id: 1 }
808
820
  #
809
821
  # data.each do |row|
810
- # puts row["name"]
822
+ # puts row[:name]
811
823
  # end
812
824
  #
813
825
  # @!group Data
@@ -824,6 +836,199 @@ module Google
824
836
  QueryData.from_gapi gapi, service
825
837
  end
826
838
 
839
+ ##
840
+ # Loads data into the provided destination table. For the source of the
841
+ # data, you can pass a google-cloud storage file path or a
842
+ # google-cloud-storage `File` instance. Or, you can upload a file
843
+ # directly. See [Loading Data with a POST
844
+ # Request](https://cloud.google.com/bigquery/loading-data-post-request#multipart).
845
+ #
846
+ # @param [String] table_id The destination table to load the data into.
847
+ # @param [File, Google::Cloud::Storage::File, String] file A file or the
848
+ # URI of a Google Cloud Storage file containing data to load into the
849
+ # table.
850
+ # @param [String] format The exported file format. The default value is
851
+ # `csv`.
852
+ #
853
+ # The following values are supported:
854
+ #
855
+ # * `csv` - CSV
856
+ # * `json` - [Newline-delimited JSON](http://jsonlines.org/)
857
+ # * `avro` - [Avro](http://avro.apache.org/)
858
+ # * `datastore_backup` - Cloud Datastore backup
859
+ # @param [String] create Specifies whether the job is allowed to create
860
+ # new tables. The default value is `needed`.
861
+ #
862
+ # The following values are supported:
863
+ #
864
+ # * `needed` - Create the table if it does not exist.
865
+ # * `never` - The table must already exist. A 'notFound' error is
866
+ # raised if the table does not exist.
867
+ # @param [String] write Specifies how to handle data already present in
868
+ # the table. The default value is `append`.
869
+ #
870
+ # The following values are supported:
871
+ #
872
+ # * `truncate` - BigQuery overwrites the table data.
873
+ # * `append` - BigQuery appends the data to the table.
874
+ # * `empty` - An error will be returned if the table already contains
875
+ # data.
876
+ # @param [Array<String>] projection_fields If the `format` option is set
877
+ # to `datastore_backup`, indicates which entity properties to load
878
+ # from a Cloud Datastore backup. Property names are case sensitive and
879
+ # must be top-level properties. If not set, BigQuery loads all
880
+ # properties. If any named property isn't found in the Cloud Datastore
881
+ # backup, an invalid error is returned.
882
+ # @param [Boolean] jagged_rows Accept rows that are missing trailing
883
+ # optional columns. The missing values are treated as nulls. If
884
+ # `false`, records with missing trailing columns are treated as bad
885
+ # records, and if there are too many bad records, an invalid error is
886
+ # returned in the job result. The default value is `false`. Only
887
+ # applicable to CSV, ignored for other formats.
888
+ # @param [Boolean] quoted_newlines Indicates if BigQuery should allow
889
+ # quoted data sections that contain newline characters in a CSV file.
890
+ # The default value is `false`.
891
+ # @param [String] encoding The character encoding of the data. The
892
+ # supported values are `UTF-8` or `ISO-8859-1`. The default value is
893
+ # `UTF-8`.
894
+ # @param [String] delimiter Specifices the separator for fields in a CSV
895
+ # file. BigQuery converts the string to `ISO-8859-1` encoding, and
896
+ # then uses the first byte of the encoded string to split the data in
897
+ # its raw, binary state. Default is <code>,</code>.
898
+ # @param [Boolean] ignore_unknown Indicates if BigQuery should allow
899
+ # extra values that are not represented in the table schema. If true,
900
+ # the extra values are ignored. If false, records with extra columns
901
+ # are treated as bad records, and if there are too many bad records,
902
+ # an invalid error is returned in the job result. The default value is
903
+ # `false`.
904
+ #
905
+ # The `format` property determines what BigQuery treats as an extra
906
+ # value:
907
+ #
908
+ # * `CSV`: Trailing columns
909
+ # * `JSON`: Named values that don't match any column names
910
+ # @param [Integer] max_bad_records The maximum number of bad records
911
+ # that BigQuery can ignore when running the job. If the number of bad
912
+ # records exceeds this value, an invalid error is returned in the job
913
+ # result. The default value is `0`, which requires that all records
914
+ # are valid.
915
+ # @param [String] quote The value that is used to quote data sections in
916
+ # a CSV file. BigQuery converts the string to ISO-8859-1 encoding, and
917
+ # then uses the first byte of the encoded string to split the data in
918
+ # its raw, binary state. The default value is a double-quote
919
+ # <code>"</code>. If your data does not contain quoted sections, set
920
+ # the property value to an empty string. If your data contains quoted
921
+ # newline characters, you must also set the allowQuotedNewlines
922
+ # property to true.
923
+ # @param [Integer] skip_leading The number of rows at the top of a CSV
924
+ # file that BigQuery will skip when loading the data. The default
925
+ # value is `0`. This property is useful if you have header rows in the
926
+ # file that should be skipped.
927
+ # @param [Google::Cloud::Bigquery::Schema] schema The schema for the
928
+ # destination table. Optional. The schema can be omitted if the
929
+ # destination table already exists, or if you're loading data from a
930
+ # Google Cloud Datastore backup.
931
+ #
932
+ # See {Project#schema} for the creation of the schema for use with
933
+ # this option. Also note that for most use cases, the block yielded by
934
+ # this method is a more convenient way to configure the schema.
935
+ #
936
+ # @yield [schema] A block for setting the schema for the destination
937
+ # table. The schema can be omitted if the destination table already
938
+ # exists, or if you're loading data from a Google Cloud Datastore
939
+ # backup.
940
+ # @yieldparam [Google::Cloud::Bigquery::Schema] schema The schema
941
+ # instance provided using the `schema` option, or a new, empty schema
942
+ # instance
943
+ #
944
+ # @return [Google::Cloud::Bigquery::LoadJob]
945
+ #
946
+ # @example
947
+ # require "google/cloud/bigquery"
948
+ #
949
+ # bigquery = Google::Cloud::Bigquery.new
950
+ # dataset = bigquery.dataset "my_dataset"
951
+ #
952
+ # gs_url = "gs://my-bucket/file-name.csv"
953
+ # load_job = dataset.load "my_new_table", gs_url do |schema|
954
+ # schema.string "first_name", mode: :required
955
+ # schema.record "cities_lived", mode: :repeated do |nested_schema|
956
+ # nested_schema.string "place", mode: :required
957
+ # nested_schema.integer "number_of_years", mode: :required
958
+ # end
959
+ # end
960
+ #
961
+ # @example Pass a google-cloud-storage `File` instance:
962
+ # require "google/cloud/bigquery"
963
+ # require "google/cloud/storage"
964
+ #
965
+ # bigquery = Google::Cloud::Bigquery.new
966
+ # dataset = bigquery.dataset "my_dataset"
967
+ #
968
+ # storage = Google::Cloud::Storage.new
969
+ # bucket = storage.bucket "my-bucket"
970
+ # file = bucket.file "file-name.csv"
971
+ # load_job = dataset.load "my_new_table", file do |schema|
972
+ # schema.string "first_name", mode: :required
973
+ # schema.record "cities_lived", mode: :repeated do |nested_schema|
974
+ # nested_schema.string "place", mode: :required
975
+ # nested_schema.integer "number_of_years", mode: :required
976
+ # end
977
+ # end
978
+ #
979
+ # @example Upload a file directly:
980
+ # require "google/cloud/bigquery"
981
+ #
982
+ # bigquery = Google::Cloud::Bigquery.new
983
+ # dataset = bigquery.dataset "my_dataset"
984
+ #
985
+ # file = File.open "my_data.csv"
986
+ # load_job = dataset.load "my_new_table", file do |schema|
987
+ # schema.string "first_name", mode: :required
988
+ # schema.record "cities_lived", mode: :repeated do |nested_schema|
989
+ # nested_schema.string "place", mode: :required
990
+ # nested_schema.integer "number_of_years", mode: :required
991
+ # end
992
+ # end
993
+ #
994
+ # @example Schema is not required with a Cloud Datastore backup:
995
+ # require "google/cloud/bigquery"
996
+ #
997
+ # bigquery = Google::Cloud::Bigquery.new
998
+ # dataset = bigquery.dataset "my_dataset"
999
+ #
1000
+ # load_job = dataset.load "my_new_table",
1001
+ # "gs://my-bucket/xxxx.kind_name.backup_info",
1002
+ # format: "datastore_backup"
1003
+ #
1004
+ # @!group Data
1005
+ #
1006
+ def load table_id, file, format: nil, create: nil, write: nil,
1007
+ projection_fields: nil, jagged_rows: nil, quoted_newlines: nil,
1008
+ encoding: nil, delimiter: nil, ignore_unknown: nil,
1009
+ max_bad_records: nil, quote: nil, skip_leading: nil,
1010
+ dryrun: nil, schema: nil
1011
+ ensure_service!
1012
+
1013
+ if block_given?
1014
+ schema ||= Schema.from_gapi
1015
+ yield schema
1016
+ end
1017
+ schema_gapi = schema.to_gapi if schema
1018
+
1019
+ options = { format: format, create: create, write: write,
1020
+ projection_fields: projection_fields,
1021
+ jagged_rows: jagged_rows,
1022
+ quoted_newlines: quoted_newlines, encoding: encoding,
1023
+ delimiter: delimiter, ignore_unknown: ignore_unknown,
1024
+ max_bad_records: max_bad_records, quote: quote,
1025
+ skip_leading: skip_leading, dryrun: dryrun,
1026
+ schema: schema_gapi }
1027
+ return load_storage(table_id, file, options) if storage_url? file
1028
+ return load_local(table_id, file, options) if local_file? file
1029
+ fail Google::Cloud::Error, "Don't know how to load #{file}"
1030
+ end
1031
+
827
1032
  ##
828
1033
  # @private New Dataset from a Google API Client object.
829
1034
  def self.from_gapi gapi, conn
@@ -868,6 +1073,34 @@ module Google
868
1073
  @gapi.is_a? Google::Apis::BigqueryV2::Dataset
869
1074
  end
870
1075
 
1076
+ def load_storage table_id, url, options = {}
1077
+ # Convert to storage URL
1078
+ url = url.to_gs_url if url.respond_to? :to_gs_url
1079
+
1080
+ gapi = service.load_table_gs_url dataset_id, table_id, url, options
1081
+ Job.from_gapi gapi, service
1082
+ end
1083
+
1084
+ def load_local table_id, file, options = {}
1085
+ # Convert to storage URL
1086
+ file = file.to_gs_url if file.respond_to? :to_gs_url
1087
+
1088
+ gapi = service.load_table_file dataset_id, table_id, file, options
1089
+ Job.from_gapi gapi, service
1090
+ end
1091
+
1092
+ def storage_url? file
1093
+ file.respond_to?(:to_gs_url) ||
1094
+ (file.respond_to?(:to_str) &&
1095
+ file.to_str.downcase.start_with?("gs://"))
1096
+ end
1097
+
1098
+ def local_file? file
1099
+ ::File.file? file
1100
+ rescue
1101
+ false
1102
+ end
1103
+
871
1104
  ##
872
1105
  # Yielded to a block to accumulate changes for a patch request.
873
1106
  class Updater < Dataset
@@ -22,6 +22,7 @@ require "google/cloud/bigquery/job"
22
22
  require "google/cloud/bigquery/query_data"
23
23
  require "google/cloud/bigquery/project/list"
24
24
  require "google/cloud/bigquery/time"
25
+ require "google/cloud/bigquery/schema"
25
26
 
26
27
  module Google
27
28
  module Cloud
@@ -155,6 +156,8 @@ module Google
155
156
  # * `append` - BigQuery appends the data to the table.
156
157
  # * `empty` - A 'duplicate' error is returned in the job result if the
157
158
  # table exists and contains data.
159
+ # @param [Dataset, String] dataset The default dataset to use for
160
+ # unqualified table names in the query. Optional.
158
161
  # @param [Boolean] large_results If `true`, allows the query to produce
159
162
  # arbitrarily large result tables at a slight cost in performance.
160
163
  # Requires `table` parameter to be set.
@@ -180,6 +183,15 @@ module Google
180
183
  # Flattens all nested and repeated fields in the query results. The
181
184
  # default value is `true`. `large_results` parameter must be `true` if
182
185
  # this is set to `false`.
186
+ # @param [Integer] maximum_billing_tier Limits the billing tier for this
187
+ # job. Queries that have resource usage beyond this tier will fail
188
+ # (without incurring a charge). Optional. If unspecified, this will be
189
+ # set to your project default. For more information, see [High-Compute
190
+ # queries](https://cloud.google.com/bigquery/pricing#high-compute).
191
+ # @param [Integer] maximum_bytes_billed Limits the bytes billed for this
192
+ # job. Queries that will have bytes billed beyond this limit will fail
193
+ # (without incurring a charge). Optional. If unspecified, this will be
194
+ # set to your project default.
183
195
  #
184
196
  # @return [Google::Cloud::Bigquery::QueryJob]
185
197
  #
@@ -194,7 +206,7 @@ module Google
194
206
  # job.wait_until_done!
195
207
  # if !job.failed?
196
208
  # job.query_results.each do |row|
197
- # puts row["name"]
209
+ # puts row[:name]
198
210
  # end
199
211
  # end
200
212
  #
@@ -210,7 +222,7 @@ module Google
210
222
  # job.wait_until_done!
211
223
  # if !job.failed?
212
224
  # job.query_results.each do |row|
213
- # puts row["name"]
225
+ # puts row[:name]
214
226
  # end
215
227
  # end
216
228
  #
@@ -227,7 +239,7 @@ module Google
227
239
  # job.wait_until_done!
228
240
  # if !job.failed?
229
241
  # job.query_results.each do |row|
230
- # puts row["name"]
242
+ # puts row[:name]
231
243
  # end
232
244
  # end
233
245
  #
@@ -244,20 +256,24 @@ module Google
244
256
  # job.wait_until_done!
245
257
  # if !job.failed?
246
258
  # job.query_results.each do |row|
247
- # puts row["name"]
259
+ # puts row[:name]
248
260
  # end
249
261
  # end
250
262
  #
251
263
  def query_job query, params: nil, priority: "INTERACTIVE", cache: true,
252
264
  table: nil, create: nil, write: nil, dataset: nil,
253
265
  standard_sql: nil, legacy_sql: nil, large_results: nil,
254
- flatten: nil
266
+ flatten: nil, maximum_billing_tier: nil,
267
+ maximum_bytes_billed: nil
255
268
  ensure_service!
256
269
  options = { priority: priority, cache: cache, table: table,
257
270
  create: create, write: write,
258
271
  large_results: large_results, flatten: flatten,
259
272
  dataset: dataset, legacy_sql: legacy_sql,
260
- standard_sql: standard_sql, params: params }
273
+ standard_sql: standard_sql,
274
+ maximum_billing_tier: maximum_billing_tier,
275
+ maximum_bytes_billed: maximum_bytes_billed,
276
+ params: params }
261
277
  gapi = service.query_job query, options
262
278
  Job.from_gapi gapi, service
263
279
  end
@@ -356,7 +372,7 @@ module Google
356
372
  # data = bigquery.query sql
357
373
  #
358
374
  # data.each do |row|
359
- # puts row["name"]
375
+ # puts row[:name]
360
376
  # end
361
377
  #
362
378
  # @example Query using legacy SQL:
@@ -368,7 +384,7 @@ module Google
368
384
  # data = bigquery.query sql, legacy_sql: true
369
385
  #
370
386
  # data.each do |row|
371
- # puts row["name"]
387
+ # puts row[:name]
372
388
  # end
373
389
  #
374
390
  # @example Retrieve all rows: (See {QueryData#all})
@@ -379,7 +395,7 @@ module Google
379
395
  # data = bigquery.query "SELECT name FROM `my_dataset.my_table`"
380
396
  #
381
397
  # data.all do |row|
382
- # puts row["name"]
398
+ # puts row[:name]
383
399
  # end
384
400
  #
385
401
  # @example Query using positional query parameters:
@@ -393,7 +409,7 @@ module Google
393
409
  # params: [1]
394
410
  #
395
411
  # data.each do |row|
396
- # puts row["name"]
412
+ # puts row[:name]
397
413
  # end
398
414
  #
399
415
  # @example Query using named query parameters:
@@ -407,7 +423,7 @@ module Google
407
423
  # params: { id: 1 }
408
424
  #
409
425
  # data.each do |row|
410
- # puts row["name"]
426
+ # puts row[:name]
411
427
  # end
412
428
  #
413
429
  def query query, params: nil, max: nil, timeout: 10000, dryrun: nil,
@@ -714,7 +730,7 @@ module Google
714
730
  # params: { time: fourpm }
715
731
  #
716
732
  # data.each do |row|
717
- # puts row["name"]
733
+ # puts row[:name]
718
734
  # end
719
735
  #
720
736
  # @example Create Time with fractional seconds:
@@ -729,13 +745,52 @@ module Google
729
745
  # params: { time: precise_time }
730
746
  #
731
747
  # data.each do |row|
732
- # puts row["name"]
748
+ # puts row[:name]
733
749
  # end
734
750
  #
735
751
  def time hour, minute, second
736
752
  Bigquery::Time.new "#{hour}:#{minute}:#{second}"
737
753
  end
738
754
 
755
+ ##
756
+ # Creates a new schema instance. An optional block may be given to
757
+ # configure the schema, otherwise the schema is returned empty and may
758
+ # be configured directly.
759
+ #
760
+ # The returned schema can be passed to {Dataset#load} using the `schema`
761
+ # option. However, for most use cases, the block yielded by
762
+ # {Dataset#load} is a more convenient way to configure the schema for
763
+ # the destination table.
764
+ #
765
+ # @yield [schema] a block for setting the schema
766
+ # @yieldparam [Schema] schema the object accepting the schema
767
+ #
768
+ # @return [Google::Cloud::Bigquery::Schema]
769
+ #
770
+ # @example
771
+ # require "google/cloud/bigquery"
772
+ #
773
+ # bigquery = Google::Cloud::Bigquery.new
774
+ #
775
+ # schema = bigquery.schema do |s|
776
+ # s.string "first_name", mode: :required
777
+ # s.record "cities_lived", mode: :repeated do |nested_schema|
778
+ # nested_schema.string "place", mode: :required
779
+ # nested_schema.integer "number_of_years", mode: :required
780
+ # end
781
+ # end
782
+ #
783
+ # dataset = bigquery.dataset "my_dataset"
784
+ #
785
+ # gs_url = "gs://my-bucket/file-name.csv"
786
+ # load_job = dataset.load "my_new_table", gs_url, schema: schema
787
+ #
788
+ def schema
789
+ s = Schema.from_gapi
790
+ yield s if block_given?
791
+ s
792
+ end
793
+
739
794
  ##
740
795
  # @private New Project from a Google API Client object, using the
741
796
  # same Credentials as this project.
@@ -144,7 +144,7 @@ module Google
144
144
  #
145
145
  # data = job.query_results
146
146
  # data.all do |row|
147
- # puts row["word"]
147
+ # puts row[:word]
148
148
  # end
149
149
  #
150
150
  # @example Using the enumerator by not passing a block:
@@ -155,7 +155,7 @@ module Google
155
155
  #
156
156
  # data = job.query_results
157
157
  # words = data.all.map do |row|
158
- # row["word"]
158
+ # row[:word]
159
159
  # end
160
160
  #
161
161
  # @example Limit the number of API calls made:
@@ -166,7 +166,7 @@ module Google
166
166
  #
167
167
  # data = job.query_results
168
168
  # data.all(request_limit: 10) do |row|
169
- # puts row["word"]
169
+ # puts row[:word]
170
170
  # end
171
171
  #
172
172
  def all request_limit: nil
@@ -74,6 +74,22 @@ module Google
74
74
  val
75
75
  end
76
76
 
77
+ ##
78
+ # Limits the billing tier for this job.
79
+ # For more information, see [High-Compute
80
+ # queries](https://cloud.google.com/bigquery/pricing#high-compute).
81
+ def maximum_billing_tier
82
+ @gapi.configuration.query.maximum_billing_tier
83
+ end
84
+
85
+ ##
86
+ # Limits the bytes billed for this job.
87
+ def maximum_bytes_billed
88
+ Integer @gapi.configuration.query.maximum_bytes_billed
89
+ rescue
90
+ nil
91
+ end
92
+
77
93
  ##
78
94
  # Checks if the query results are from the query cache.
79
95
  def cache_hit?
@@ -136,7 +152,7 @@ module Google
136
152
  # job.wait_until_done!
137
153
  # data = job.query_results
138
154
  # data.each do |row|
139
- # puts row["word"]
155
+ # puts row[:word]
140
156
  # end
141
157
  # data = data.next if data.next?
142
158
  #
@@ -260,7 +260,10 @@ module Google
260
260
  end
261
261
 
262
262
  # @private
263
- def self.from_gapi gapi
263
+ # @param [Google::Apis::BigqueryV2::TableSchema, nil] gapi Returns an
264
+ # empty schema if nil or no arg is provided. The default is nil.
265
+ #
266
+ def self.from_gapi gapi = nil
264
267
  gapi ||= Google::Apis::BigqueryV2::TableSchema.new fields: []
265
268
  gapi.fields ||= []
266
269
  new.tap do |s|
@@ -412,8 +412,9 @@ module Google
412
412
  flatten_results: options[:flatten],
413
413
  default_dataset: default_dataset,
414
414
  use_legacy_sql: Convert.resolve_legacy_sql(
415
- options[:standard_sql], options[:legacy_sql])
416
-
415
+ options[:standard_sql], options[:legacy_sql]),
416
+ maximum_billing_tier: options[:maximum_billing_tier],
417
+ maximum_bytes_billed: options[:maximum_bytes_billed]
417
418
  )
418
419
  )
419
420
  )
@@ -361,11 +361,7 @@ module Google
361
361
  ensure_full_data!
362
362
  schema_builder = Schema.from_gapi @gapi.schema
363
363
  if block_given?
364
- if replace
365
- empty_schema = Google::Apis::BigqueryV2::TableSchema.new(
366
- fields: [])
367
- schema_builder = Schema.from_gapi empty_schema
368
- end
364
+ schema_builder = Schema.from_gapi if replace
369
365
  yield schema_builder
370
366
  if schema_builder.changed?
371
367
  @gapi.schema = schema_builder.to_gapi
@@ -413,7 +409,7 @@ module Google
413
409
  #
414
410
  # data = table.data
415
411
  # data.each do |row|
416
- # puts row["first_name"]
412
+ # puts row[:first_name]
417
413
  # end
418
414
  # if data.next?
419
415
  # more_data = data.next if data.next?
@@ -428,7 +424,7 @@ module Google
428
424
  #
429
425
  # data = table.data
430
426
  # data.all do |row|
431
- # puts row["first_name"]
427
+ # puts row[:first_name]
432
428
  # end
433
429
  #
434
430
  # @!group Data
@@ -646,7 +642,7 @@ module Google
646
642
  #
647
643
  # load_job = table.load "gs://my-bucket/file-name.csv"
648
644
  #
649
- # @example Pass a google-cloud storage file instance:
645
+ # @example Pass a google-cloud-storage `File` instance:
650
646
  # require "google/cloud/bigquery"
651
647
  # require "google/cloud/storage"
652
648
  #
@@ -909,10 +905,7 @@ module Google
909
905
  # TODO: make sure to call ensure_full_data! on Dataset#update
910
906
  @schema ||= Schema.from_gapi @gapi.schema
911
907
  if block_given?
912
- if replace
913
- @schema = Schema.from_gapi \
914
- Google::Apis::BigqueryV2::TableSchema.new(fields: [])
915
- end
908
+ @schema = Schema.from_gapi if replace
916
909
  yield @schema
917
910
  check_for_mutated_schema!
918
911
  end
@@ -35,7 +35,7 @@ module Google
35
35
  # params: { time: fourpm }
36
36
  #
37
37
  # data.each do |row|
38
- # puts row["name"]
38
+ # puts row[:name]
39
39
  # end
40
40
  #
41
41
  # @example Create Time with fractional seconds:
@@ -50,7 +50,7 @@ module Google
50
50
  # params: { time: precise_time }
51
51
  #
52
52
  # data.each do |row|
53
- # puts row["name"]
53
+ # puts row[:name]
54
54
  # end
55
55
  #
56
56
  Time = Struct.new :value
@@ -16,7 +16,7 @@
16
16
  module Google
17
17
  module Cloud
18
18
  module Bigquery
19
- VERSION = "0.26.0"
19
+ VERSION = "0.27.0"
20
20
  end
21
21
  end
22
22
  end
@@ -384,7 +384,7 @@ module Google
384
384
  #
385
385
  # data = view.data
386
386
  # data.each do |row|
387
- # puts row["first_name"]
387
+ # puts row[:first_name]
388
388
  # end
389
389
  # more_data = data.next if data.next?
390
390
  #
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: google-cloud-bigquery
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.26.0
4
+ version: 0.27.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Mike Moore
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2017-04-05 00:00:00.000000000 Z
12
+ date: 2017-06-28 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: google-cloud-core
@@ -31,14 +31,14 @@ dependencies:
31
31
  requirements:
32
32
  - - "~>"
33
33
  - !ruby/object:Gem::Version
34
- version: 0.11.0
34
+ version: 0.13.0
35
35
  type: :runtime
36
36
  prerelease: false
37
37
  version_requirements: !ruby/object:Gem::Requirement
38
38
  requirements:
39
39
  - - "~>"
40
40
  - !ruby/object:Gem::Version
41
- version: 0.11.0
41
+ version: 0.13.0
42
42
  - !ruby/object:Gem::Dependency
43
43
  name: minitest
44
44
  requirement: !ruby/object:Gem::Requirement
@@ -202,7 +202,7 @@ files:
202
202
  - lib/google/cloud/bigquery/time.rb
203
203
  - lib/google/cloud/bigquery/version.rb
204
204
  - lib/google/cloud/bigquery/view.rb
205
- homepage: http://googlecloudplatform.github.io/google-cloud-ruby/
205
+ homepage: https://github.com/GoogleCloudPlatform/google-cloud-ruby/tree/master/google-cloud-bigquery
206
206
  licenses:
207
207
  - Apache-2.0
208
208
  metadata: {}
@@ -222,7 +222,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
222
222
  version: '0'
223
223
  requirements: []
224
224
  rubyforge_project:
225
- rubygems_version: 2.6.11
225
+ rubygems_version: 2.6.12
226
226
  signing_key:
227
227
  specification_version: 4
228
228
  summary: API Client library for Google BigQuery