google-cloud-bigquery 1.17.0 → 1.18.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -35,7 +35,7 @@ module Google
35
35
  # access.add_owner_group "owners@example.com"
36
36
  # access.add_writer_user "writer@example.com"
37
37
  # access.remove_writer_user "readers@example.com"
38
- # access.add_reader_special :all
38
+ # access.add_reader_special :all_users
39
39
  # end
40
40
  #
41
41
  class Access
@@ -74,7 +74,9 @@ module Google
74
74
  "projectWriters" => "projectWriters",
75
75
  "all" => "allAuthenticatedUsers",
76
76
  "all_authenticated_users" => "allAuthenticatedUsers",
77
- "allAuthenticatedUsers" => "allAuthenticatedUsers"
77
+ "allAuthenticatedUsers" => "allAuthenticatedUsers",
78
+ "all_users" => "allUsers",
79
+ "allUsers" => "allUsers"
78
80
  }.freeze
79
81
 
80
82
  ##
@@ -172,7 +174,7 @@ module Google
172
174
  # Add reader access to a special group.
173
175
  #
174
176
  # @param [String] group Accepted values are `owners`, `writers`,
175
- # `readers`, and `all`.
177
+ # `readers`, `all_authenticated_users`, and `all_users`.
176
178
  #
177
179
  # @example
178
180
  # require "google/cloud/bigquery"
@@ -181,7 +183,7 @@ module Google
181
183
  # dataset = bigquery.dataset "my_dataset"
182
184
  #
183
185
  # dataset.access do |access|
184
- # access.add_reader_special :all
186
+ # access.add_reader_special :all_users
185
187
  # end
186
188
  #
187
189
  def add_reader_special group
@@ -277,7 +279,7 @@ module Google
277
279
  # Add writer access to a special group.
278
280
  #
279
281
  # @param [String] group Accepted values are `owners`, `writers`,
280
- # `readers`, and `all`.
282
+ # `readers`, `all_authenticated_users`, and `all_users`.
281
283
  #
282
284
  # @example
283
285
  # require "google/cloud/bigquery"
@@ -286,7 +288,7 @@ module Google
286
288
  # dataset = bigquery.dataset "my_dataset"
287
289
  #
288
290
  # dataset.access do |access|
289
- # access.add_writer_special :all
291
+ # access.add_writer_special :all_users
290
292
  # end
291
293
  #
292
294
  def add_writer_special group
@@ -355,7 +357,7 @@ module Google
355
357
  # Add owner access to a special group.
356
358
  #
357
359
  # @param [String] group Accepted values are `owners`, `writers`,
358
- # `readers`, and `all`.
360
+ # `readers`, `all_authenticated_users`, and `all_users`.
359
361
  #
360
362
  # @example
361
363
  # require "google/cloud/bigquery"
@@ -364,7 +366,7 @@ module Google
364
366
  # dataset = bigquery.dataset "my_dataset"
365
367
  #
366
368
  # dataset.access do |access|
367
- # access.add_owner_special :all
369
+ # access.add_owner_special :all_users
368
370
  # end
369
371
  #
370
372
  def add_owner_special group
@@ -433,7 +435,7 @@ module Google
433
435
  # Remove reader access from a special group.
434
436
  #
435
437
  # @param [String] group Accepted values are `owners`, `writers`,
436
- # `readers`, and `all`.
438
+ # `readers`, `all_authenticated_users`, and `all_users`.
437
439
  #
438
440
  # @example
439
441
  # require "google/cloud/bigquery"
@@ -442,7 +444,7 @@ module Google
442
444
  # dataset = bigquery.dataset "my_dataset"
443
445
  #
444
446
  # dataset.access do |access|
445
- # access.remove_reader_special :all
447
+ # access.remove_reader_special :all_users
446
448
  # end
447
449
  #
448
450
  def remove_reader_special group
@@ -538,7 +540,7 @@ module Google
538
540
  # Remove writer access from a special group.
539
541
  #
540
542
  # @param [String] group Accepted values are `owners`, `writers`,
541
- # `readers`, and `all`.
543
+ # `readers`, `all_authenticated_users`, and `all_users`.
542
544
  #
543
545
  # @example
544
546
  # require "google/cloud/bigquery"
@@ -547,7 +549,7 @@ module Google
547
549
  # dataset = bigquery.dataset "my_dataset"
548
550
  #
549
551
  # dataset.access do |access|
550
- # access.remove_writer_special :all
552
+ # access.remove_writer_special :all_users
551
553
  # end
552
554
  #
553
555
  def remove_writer_special group
@@ -616,7 +618,7 @@ module Google
616
618
  # Remove owner access from a special group.
617
619
  #
618
620
  # @param [String] group Accepted values are `owners`, `writers`,
619
- # `readers`, and `all`.
621
+ # `readers`, `all_authenticated_users`, and `all_users`.
620
622
  #
621
623
  # @example
622
624
  # require "google/cloud/bigquery"
@@ -625,7 +627,7 @@ module Google
625
627
  # dataset = bigquery.dataset "my_dataset"
626
628
  #
627
629
  # dataset.access do |access|
628
- # access.remove_owner_special :all
630
+ # access.remove_owner_special :all_users
629
631
  # end
630
632
  #
631
633
  def remove_owner_special group
@@ -691,7 +693,7 @@ module Google
691
693
  # Checks reader access for a special group.
692
694
  #
693
695
  # @param [String] group Accepted values are `owners`, `writers`,
694
- # `readers`, and `all`.
696
+ # `readers`, `all_authenticated_users`, and `all_users`.
695
697
  #
696
698
  # @example
697
699
  # require "google/cloud/bigquery"
@@ -700,7 +702,7 @@ module Google
700
702
  # dataset = bigquery.dataset "my_dataset"
701
703
  #
702
704
  # access = dataset.access
703
- # access.reader_special? :all #=> false
705
+ # access.reader_special? :all_users #=> false
704
706
  #
705
707
  def reader_special? group
706
708
  lookup_access_role_scope_value :reader, :special, group
@@ -791,7 +793,7 @@ module Google
791
793
  # Checks writer access for a special group.
792
794
  #
793
795
  # @param [String] group Accepted values are `owners`, `writers`,
794
- # `readers`, and `all`.
796
+ # `readers`, `all_authenticated_users`, and `all_users`.
795
797
  #
796
798
  # @example
797
799
  # require "google/cloud/bigquery"
@@ -800,7 +802,7 @@ module Google
800
802
  # dataset = bigquery.dataset "my_dataset"
801
803
  #
802
804
  # access = dataset.access
803
- # access.writer_special? :all #=> false
805
+ # access.writer_special? :all_users #=> false
804
806
  #
805
807
  def writer_special? group
806
808
  lookup_access_role_scope_value :writer, :special, group
@@ -865,7 +867,7 @@ module Google
865
867
  # Checks owner access for a special group.
866
868
  #
867
869
  # @param [String] group Accepted values are `owners`, `writers`,
868
- # `readers`, and `all`.
870
+ # `readers`, `all_authenticated_users`, and `all_users`.
869
871
  #
870
872
  # @example
871
873
  # require "google/cloud/bigquery"
@@ -874,7 +876,7 @@ module Google
874
876
  # dataset = bigquery.dataset "my_dataset"
875
877
  #
876
878
  # access = dataset.access
877
- # access.owner_special? :all #=> false
879
+ # access.owner_special? :all_users #=> false
878
880
  #
879
881
  def owner_special? group
880
882
  lookup_access_role_scope_value :owner, :special, group
@@ -964,6 +964,8 @@ module Google
964
964
  ##
965
965
  # The fields of the schema.
966
966
  #
967
+ # @return [Array<Schema::Field>] An array of field objects.
968
+ #
967
969
  def fields
968
970
  schema.fields
969
971
  end
@@ -971,10 +973,22 @@ module Google
971
973
  ##
972
974
  # The names of the columns in the schema.
973
975
  #
976
+ # @return [Array<Symbol>] An array of column names.
977
+ #
974
978
  def headers
975
979
  schema.headers
976
980
  end
977
981
 
982
+ ##
983
+ # The types of the fields in the data in the schema, using the same
984
+ # format as the optional query parameter types.
985
+ #
986
+ # @return [Hash] A hash with field names as keys, and types as values.
987
+ #
988
+ def param_types
989
+ schema.param_types
990
+ end
991
+
978
992
  ##
979
993
  # @private Google API Client object.
980
994
  def to_gapi
@@ -1095,6 +1109,8 @@ module Google
1095
1109
  ##
1096
1110
  # The fields of the schema.
1097
1111
  #
1112
+ # @return [Array<Schema::Field>] An array of field objects.
1113
+ #
1098
1114
  def fields
1099
1115
  schema.fields
1100
1116
  end
@@ -1102,10 +1118,22 @@ module Google
1102
1118
  ##
1103
1119
  # The names of the columns in the schema.
1104
1120
  #
1121
+ # @return [Array<Symbol>] An array of column names.
1122
+ #
1105
1123
  def headers
1106
1124
  schema.headers
1107
1125
  end
1108
1126
 
1127
+ ##
1128
+ # The types of the fields in the data in the schema, using the same
1129
+ # format as the optional query parameter types.
1130
+ #
1131
+ # @return [Hash] A hash with field names as keys, and types as values.
1132
+ #
1133
+ def param_types
1134
+ schema.param_types
1135
+ end
1136
+
1109
1137
  ##
1110
1138
  # @private Google API Client object.
1111
1139
  def to_gapi
@@ -269,27 +269,6 @@ module Google
269
269
  # Queries data by creating a [query
270
270
  # job](https://cloud.google.com/bigquery/docs/query-overview#query_jobs).
271
271
  #
272
- # When using standard SQL and passing arguments using `params`, Ruby
273
- # types are mapped to BigQuery types as follows:
274
- #
275
- # | BigQuery | Ruby | Notes |
276
- # |-------------|----------------|---|
277
- # | `BOOL` | `true`/`false` | |
278
- # | `INT64` | `Integer` | |
279
- # | `FLOAT64` | `Float` | |
280
- # | `NUMERIC` | `BigDecimal` | Will be rounded to 9 decimal places |
281
- # | `STRING` | `String` | |
282
- # | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
283
- # | `DATE` | `Date` | |
284
- # | `TIMESTAMP` | `Time` | |
285
- # | `TIME` | `Google::Cloud::BigQuery::Time` | |
286
- # | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
287
- # | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
288
- # | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
289
- #
290
- # See [Data Types](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types)
291
- # for an overview of each BigQuery data type, including allowed values.
292
- #
293
272
  # The geographic location for the job ("US", "EU", etc.) can be set via
294
273
  # {QueryJob::Updater#location=} in a block passed to this method.
295
274
  #
@@ -297,13 +276,55 @@ module Google
297
276
  # syntax](https://cloud.google.com/bigquery/query-reference), of the
298
277
  # query to execute. Example: "SELECT count(f1) FROM
299
278
  # [myProjectId:myDatasetId.myTableId]".
300
- # @param [Array, Hash] params Standard SQL only. Used to pass query
301
- # arguments when the `query` string contains either positional (`?`)
302
- # or named (`@myparam`) query parameters. If value passed is an array
303
- # `["foo"]`, the query must use positional query parameters. If value
304
- # passed is a hash `{ myparam: "foo" }`, the query must use named
305
- # query parameters. When set, `legacy_sql` will automatically be set
306
- # to false and `standard_sql` to true.
279
+ # @param [Array, Hash] params Standard SQL only. Used to pass query arguments when the `query` string contains
280
+ # either positional (`?`) or named (`@myparam`) query parameters. If value passed is an array `["foo"]`, the
281
+ # query must use positional query parameters. If value passed is a hash `{ myparam: "foo" }`, the query must
282
+ # use named query parameters. When set, `legacy_sql` will automatically be set to false and `standard_sql` to
283
+ # true.
284
+ #
285
+ # Ruby types are mapped to BigQuery types as follows:
286
+ #
287
+ # | BigQuery | Ruby | Notes |
288
+ # |-------------|--------------------------------------|------------------------------------------------|
289
+ # | `BOOL` | `true`/`false` | |
290
+ # | `INT64` | `Integer` | |
291
+ # | `FLOAT64` | `Float` | |
292
+ # | `NUMERIC` | `BigDecimal` | Will be rounded to 9 decimal places |
293
+ # | `STRING` | `String` | |
294
+ # | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
295
+ # | `DATE` | `Date` | |
296
+ # | `TIMESTAMP` | `Time` | |
297
+ # | `TIME` | `Google::Cloud::BigQuery::Time` | |
298
+ # | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
299
+ # | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
300
+ # | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
301
+ #
302
+ # See [Data Types](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types) for an overview
303
+ # of each BigQuery data type, including allowed values.
304
+ # @param [Array, Hash] types Standard SQL only. Types of the SQL parameters in `params`. It is not always to
305
+ # infer the right SQL type from a value in `params`. In these cases, `types` must be used to specify the SQL
306
+ # type for these values.
307
+ #
308
+ # Must match the value type passed to `params`. This must be an `Array` when the query uses positional query
309
+ # parameters. This must be an `Hash` when the query uses named query parameters. The values should be BigQuery
310
+ # type codes from the following list:
311
+ #
312
+ # * `:BOOL`
313
+ # * `:INT64`
314
+ # * `:FLOAT64`
315
+ # * `:NUMERIC`
316
+ # * `:STRING`
317
+ # * `:DATETIME`
318
+ # * `:DATE`
319
+ # * `:TIMESTAMP`
320
+ # * `:TIME`
321
+ # * `:BYTES`
322
+ # * `Array` - Lists are specified by providing the type code in an array. For example, an array of integers
323
+ # are specified as `[:INT64]`.
324
+ # * `Hash` - Types for STRUCT values (`Hash` objects) are specified using a `Hash` object, where the keys
325
+ # match the `params` hash, and the values are the types value that matches the data.
326
+ #
327
+ # Types are optional.
307
328
  # @param [Hash<String|Symbol, External::DataSource>] external A Hash
308
329
  # that represents the mapping of the external tables to the table
309
330
  # names used in the SQL query. The hash keys are the table names, and
@@ -449,8 +470,8 @@ module Google
449
470
  # bigquery = Google::Cloud::Bigquery.new
450
471
  #
451
472
  # job = bigquery.query_job "SELECT name FROM " \
452
- # "`my_dataset.my_table`" \
453
- # " WHERE id = ?",
473
+ # "`my_dataset.my_table` " \
474
+ # "WHERE id = ?",
454
475
  # params: [1]
455
476
  #
456
477
  # job.wait_until_done!
@@ -466,8 +487,8 @@ module Google
466
487
  # bigquery = Google::Cloud::Bigquery.new
467
488
  #
468
489
  # job = bigquery.query_job "SELECT name FROM " \
469
- # "`my_dataset.my_table`" \
470
- # " WHERE id = @id",
490
+ # "`my_dataset.my_table` " \
491
+ # "WHERE id = @id",
471
492
  # params: { id: 1 }
472
493
  #
473
494
  # job.wait_until_done!
@@ -477,6 +498,24 @@ module Google
477
498
  # end
478
499
  # end
479
500
  #
501
+ # @example Query using named query parameters with types:
502
+ # require "google/cloud/bigquery"
503
+ #
504
+ # bigquery = Google::Cloud::Bigquery.new
505
+ #
506
+ # job = bigquery.query_job "SELECT name FROM " \
507
+ # "`my_dataset.my_table` " \
508
+ # "WHERE id IN UNNEST(@ids)",
509
+ # params: { ids: [] },
510
+ # types: { ids: [:INT64] }
511
+ #
512
+ # job.wait_until_done!
513
+ # if !job.failed?
514
+ # job.data.each do |row|
515
+ # puts row[:name]
516
+ # end
517
+ # end
518
+ #
480
519
  # @example Execute a DDL statement:
481
520
  # require "google/cloud/bigquery"
482
521
  #
@@ -530,16 +569,17 @@ module Google
530
569
  # end
531
570
  # end
532
571
  #
533
- def query_job query, params: nil, external: nil, priority: "INTERACTIVE", cache: true, table: nil, create: nil,
534
- write: nil, dryrun: nil, dataset: nil, project: nil, standard_sql: nil, legacy_sql: nil,
535
- large_results: nil, flatten: nil, maximum_billing_tier: nil, maximum_bytes_billed: nil,
536
- job_id: nil, prefix: nil, labels: nil, udfs: nil
572
+ def query_job query, params: nil, types: nil, external: nil, priority: "INTERACTIVE", cache: true, table: nil,
573
+ create: nil, write: nil, dryrun: nil, dataset: nil, project: nil, standard_sql: nil,
574
+ legacy_sql: nil, large_results: nil, flatten: nil, maximum_billing_tier: nil,
575
+ maximum_bytes_billed: nil, job_id: nil, prefix: nil, labels: nil, udfs: nil
537
576
  ensure_service!
538
- options = { priority: priority, cache: cache, table: table, create: create, write: write, dryrun: dryrun,
539
- large_results: large_results, flatten: flatten, dataset: dataset,
540
- project: (project || self.project), legacy_sql: legacy_sql, standard_sql: standard_sql,
541
- maximum_billing_tier: maximum_billing_tier, maximum_bytes_billed: maximum_bytes_billed,
542
- external: external, job_id: job_id, prefix: prefix, labels: labels, udfs: udfs, params: params }
577
+ options = { params: params, types: types, external: external, priority: priority, cache: cache, table: table,
578
+ create: create, write: write, dryrun: dryrun, dataset: dataset,
579
+ project: (project || self.project), standard_sql: standard_sql, legacy_sql: legacy_sql,
580
+ large_results: large_results, flatten: flatten, maximum_billing_tier: maximum_billing_tier,
581
+ maximum_bytes_billed: maximum_bytes_billed, job_id: job_id, prefix: prefix, labels: labels,
582
+ udfs: udfs }
543
583
 
544
584
  updater = QueryJob::Updater.from_options service, query, options
545
585
 
@@ -556,27 +596,6 @@ module Google
556
596
  # as needed to complete the query. When used for executing DDL/DML
557
597
  # statements, this method does not return row data.
558
598
  #
559
- # When using standard SQL and passing arguments using `params`, Ruby
560
- # types are mapped to BigQuery types as follows:
561
- #
562
- # | BigQuery | Ruby | Notes |
563
- # |-------------|----------------|---|
564
- # | `BOOL` | `true`/`false` | |
565
- # | `INT64` | `Integer` | |
566
- # | `FLOAT64` | `Float` | |
567
- # | `NUMERIC` | `BigDecimal` | Will be rounded to 9 decimal places |
568
- # | `STRING` | `String` | |
569
- # | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
570
- # | `DATE` | `Date` | |
571
- # | `TIMESTAMP` | `Time` | |
572
- # | `TIME` | `Google::Cloud::BigQuery::Time` | |
573
- # | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
574
- # | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
575
- # | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
576
- #
577
- # See [Data Types](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types)
578
- # for an overview of each BigQuery data type, including allowed values.
579
- #
580
599
  # The geographic location for the job ("US", "EU", etc.) can be set via
581
600
  # {QueryJob::Updater#location=} in a block passed to this method.
582
601
  #
@@ -586,13 +605,55 @@ module Google
586
605
  # syntax](https://cloud.google.com/bigquery/query-reference), of the
587
606
  # query to execute. Example: "SELECT count(f1) FROM
588
607
  # [myProjectId:myDatasetId.myTableId]".
589
- # @param [Array, Hash] params Standard SQL only. Used to pass query
590
- # arguments when the `query` string contains either positional (`?`)
591
- # or named (`@myparam`) query parameters. If value passed is an array
592
- # `["foo"]`, the query must use positional query parameters. If value
593
- # passed is a hash `{ myparam: "foo" }`, the query must use named
594
- # query parameters. When set, `legacy_sql` will automatically be set
595
- # to false and `standard_sql` to true.
608
+ # @param [Array, Hash] params Standard SQL only. Used to pass query arguments when the `query` string contains
609
+ # either positional (`?`) or named (`@myparam`) query parameters. If value passed is an array `["foo"]`, the
610
+ # query must use positional query parameters. If value passed is a hash `{ myparam: "foo" }`, the query must
611
+ # use named query parameters. When set, `legacy_sql` will automatically be set to false and `standard_sql` to
612
+ # true.
613
+ #
614
+ # Ruby types are mapped to BigQuery types as follows:
615
+ #
616
+ # | BigQuery | Ruby | Notes |
617
+ # |-------------|--------------------------------------|------------------------------------------------|
618
+ # | `BOOL` | `true`/`false` | |
619
+ # | `INT64` | `Integer` | |
620
+ # | `FLOAT64` | `Float` | |
621
+ # | `NUMERIC` | `BigDecimal` | Will be rounded to 9 decimal places |
622
+ # | `STRING` | `String` | |
623
+ # | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
624
+ # | `DATE` | `Date` | |
625
+ # | `TIMESTAMP` | `Time` | |
626
+ # | `TIME` | `Google::Cloud::BigQuery::Time` | |
627
+ # | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
628
+ # | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
629
+ # | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
630
+ #
631
+ # See [Data Types](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types) for an overview
632
+ # of each BigQuery data type, including allowed values.
633
+ # @param [Array, Hash] types Standard SQL only. Types of the SQL parameters in `params`. It is not always to
634
+ # infer the right SQL type from a value in `params`. In these cases, `types` must be used to specify the SQL
635
+ # type for these values.
636
+ #
637
+ # Must match the value type passed to `params`. This must be an `Array` when the query uses positional query
638
+ # parameters. This must be an `Hash` when the query uses named query parameters. The values should be BigQuery
639
+ # type codes from the following list:
640
+ #
641
+ # * `:BOOL`
642
+ # * `:INT64`
643
+ # * `:FLOAT64`
644
+ # * `:NUMERIC`
645
+ # * `:STRING`
646
+ # * `:DATETIME`
647
+ # * `:DATE`
648
+ # * `:TIMESTAMP`
649
+ # * `:TIME`
650
+ # * `:BYTES`
651
+ # * `Array` - Lists are specified by providing the type code in an array. For example, an array of integers
652
+ # are specified as `[:INT64]`.
653
+ # * `Hash` - Types for STRUCT values (`Hash` objects) are specified using a `Hash` object, where the keys
654
+ # match the `params` hash, and the values are the types value that matches the data.
655
+ #
656
+ # Types are optional.
596
657
  # @param [Hash<String|Symbol, External::DataSource>] external A Hash
597
658
  # that represents the mapping of the external tables to the table
598
659
  # names used in the SQL query. The hash keys are the table names, and
@@ -681,7 +742,7 @@ module Google
681
742
  # bigquery = Google::Cloud::Bigquery.new
682
743
  #
683
744
  # data = bigquery.query "SELECT name " \
684
- # "FROM `my_dataset.my_table`" \
745
+ # "FROM `my_dataset.my_table` " \
685
746
  # "WHERE id = ?",
686
747
  # params: [1]
687
748
  #
@@ -695,7 +756,7 @@ module Google
695
756
  # bigquery = Google::Cloud::Bigquery.new
696
757
  #
697
758
  # data = bigquery.query "SELECT name " \
698
- # "FROM `my_dataset.my_table`" \
759
+ # "FROM `my_dataset.my_table` " \
699
760
  # "WHERE id = @id",
700
761
  # params: { id: 1 }
701
762
  #
@@ -703,6 +764,21 @@ module Google
703
764
  # puts row[:name]
704
765
  # end
705
766
  #
767
+ # @example Query using named query parameters with types:
768
+ # require "google/cloud/bigquery"
769
+ #
770
+ # bigquery = Google::Cloud::Bigquery.new
771
+ #
772
+ # data = bigquery.query "SELECT name FROM " \
773
+ # "`my_dataset.my_table` " \
774
+ # "WHERE id IN UNNEST(@ids)",
775
+ # params: { ids: [] },
776
+ # types: { ids: [:INT64] }
777
+ #
778
+ # data.each do |row|
779
+ # puts row[:name]
780
+ # end
781
+ #
706
782
  # @example Execute a DDL statement:
707
783
  # require "google/cloud/bigquery"
708
784
  #
@@ -744,10 +820,10 @@ module Google
744
820
  # puts row[:name]
745
821
  # end
746
822
  #
747
- def query query, params: nil, external: nil, max: nil, cache: true, dataset: nil, project: nil,
823
+ def query query, params: nil, types: nil, external: nil, max: nil, cache: true, dataset: nil, project: nil,
748
824
  standard_sql: nil, legacy_sql: nil, &block
749
- job = query_job query, params: params, external: external, cache: cache, dataset: dataset, project: project,
750
- standard_sql: standard_sql, legacy_sql: legacy_sql, &block
825
+ job = query_job query, params: params, types: types, external: external, cache: cache, dataset: dataset,
826
+ project: project, standard_sql: standard_sql, legacy_sql: legacy_sql, &block
751
827
  job.wait_until_done!
752
828
 
753
829
  if job.failed?