google-cloud-bigquery 1.25.0 → 1.30.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -124,12 +124,12 @@ module Google
124
124
  # puts model.model_id
125
125
  # end
126
126
  #
127
- def all request_limit: nil
127
+ def all request_limit: nil, &block
128
128
  request_limit = request_limit.to_i if request_limit
129
129
  return enum_for :all, request_limit: request_limit unless block_given?
130
130
  results = self
131
131
  loop do
132
- results.each { |r| yield r }
132
+ results.each(&block)
133
133
  if request_limit
134
134
  request_limit -= 1
135
135
  break if request_limit.negative?
@@ -96,7 +96,8 @@ module Google
96
96
  # end
97
97
  #
98
98
  class Policy
99
- attr_reader :etag, :bindings
99
+ attr_reader :etag
100
+ attr_reader :bindings
100
101
 
101
102
  # @private
102
103
  def initialize etag, bindings
@@ -56,7 +56,8 @@ module Google
56
56
  # @private The Service object.
57
57
  attr_accessor :service
58
58
 
59
- attr_reader :name, :numeric_id
59
+ attr_reader :name
60
+ attr_reader :numeric_id
60
61
 
61
62
  ##
62
63
  # Creates a new Service instance.
@@ -292,35 +293,37 @@ module Google
292
293
  #
293
294
  # Ruby types are mapped to BigQuery types as follows:
294
295
  #
295
- # | BigQuery | Ruby | Notes |
296
- # |-------------|--------------------------------------|------------------------------------------------|
297
- # | `BOOL` | `true`/`false` | |
298
- # | `INT64` | `Integer` | |
299
- # | `FLOAT64` | `Float` | |
300
- # | `NUMERIC` | `BigDecimal` | Will be rounded to 9 decimal places |
301
- # | `STRING` | `String` | |
302
- # | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
303
- # | `DATE` | `Date` | |
304
- # | `TIMESTAMP` | `Time` | |
305
- # | `TIME` | `Google::Cloud::BigQuery::Time` | |
306
- # | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
307
- # | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
308
- # | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
296
+ # | BigQuery | Ruby | Notes |
297
+ # |--------------|--------------------------------------|----------------------------------------------------|
298
+ # | `BOOL` | `true`/`false` | |
299
+ # | `INT64` | `Integer` | |
300
+ # | `FLOAT64` | `Float` | |
301
+ # | `NUMERIC` | `BigDecimal` | `BigDecimal` values will be rounded to scale 9. |
302
+ # | `BIGNUMERIC` | | Query param values must be mapped in `types`. |
303
+ # | `STRING` | `String` | |
304
+ # | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
305
+ # | `DATE` | `Date` | |
306
+ # | `TIMESTAMP` | `Time` | |
307
+ # | `TIME` | `Google::Cloud::BigQuery::Time` | |
308
+ # | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
309
+ # | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
310
+ # | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
309
311
  #
310
312
  # See [Data Types](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types) for an overview
311
313
  # of each BigQuery data type, including allowed values.
312
- # @param [Array, Hash] types Standard SQL only. Types of the SQL parameters in `params`. It is not always to
313
- # infer the right SQL type from a value in `params`. In these cases, `types` must be used to specify the SQL
314
- # type for these values.
314
+ # @param [Array, Hash] types Standard SQL only. Types of the SQL parameters in `params`. It is not always
315
+ # possible to infer the right SQL type from a value in `params`. In these cases, `types` must be used to
316
+ # specify the SQL type for these values.
315
317
  #
316
- # Must match the value type passed to `params`. This must be an `Array` when the query uses positional query
317
- # parameters. This must be an `Hash` when the query uses named query parameters. The values should be BigQuery
318
- # type codes from the following list:
318
+ # Arguments must match the value type passed to `params`. This must be an `Array` when the query uses
319
+ # positional query parameters. This must be an `Hash` when the query uses named query parameters. The values
320
+ # should be BigQuery type codes from the following list:
319
321
  #
320
322
  # * `:BOOL`
321
323
  # * `:INT64`
322
324
  # * `:FLOAT64`
323
325
  # * `:NUMERIC`
326
+ # * `:BIGNUMERIC`
324
327
  # * `:STRING`
325
328
  # * `:DATETIME`
326
329
  # * `:DATE`
@@ -637,35 +640,37 @@ module Google
637
640
  #
638
641
  # Ruby types are mapped to BigQuery types as follows:
639
642
  #
640
- # | BigQuery | Ruby | Notes |
641
- # |-------------|--------------------------------------|------------------------------------------------|
642
- # | `BOOL` | `true`/`false` | |
643
- # | `INT64` | `Integer` | |
644
- # | `FLOAT64` | `Float` | |
645
- # | `NUMERIC` | `BigDecimal` | Will be rounded to 9 decimal places |
646
- # | `STRING` | `String` | |
647
- # | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
648
- # | `DATE` | `Date` | |
649
- # | `TIMESTAMP` | `Time` | |
650
- # | `TIME` | `Google::Cloud::BigQuery::Time` | |
651
- # | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
652
- # | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
653
- # | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
643
+ # | BigQuery | Ruby | Notes |
644
+ # |--------------|--------------------------------------|----------------------------------------------------|
645
+ # | `BOOL` | `true`/`false` | |
646
+ # | `INT64` | `Integer` | |
647
+ # | `FLOAT64` | `Float` | |
648
+ # | `NUMERIC` | `BigDecimal` | `BigDecimal` values will be rounded to scale 9. |
649
+ # | `BIGNUMERIC` | | Query param values must be mapped in `types`. |
650
+ # | `STRING` | `String` | |
651
+ # | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
652
+ # | `DATE` | `Date` | |
653
+ # | `TIMESTAMP` | `Time` | |
654
+ # | `TIME` | `Google::Cloud::BigQuery::Time` | |
655
+ # | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
656
+ # | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
657
+ # | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
654
658
  #
655
659
  # See [Data Types](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types) for an overview
656
660
  # of each BigQuery data type, including allowed values.
657
- # @param [Array, Hash] types Standard SQL only. Types of the SQL parameters in `params`. It is not always to
658
- # infer the right SQL type from a value in `params`. In these cases, `types` must be used to specify the SQL
659
- # type for these values.
661
+ # @param [Array, Hash] types Standard SQL only. Types of the SQL parameters in `params`. It is not always
662
+ # possible to infer the right SQL type from a value in `params`. In these cases, `types` must be used to
663
+ # specify the SQL type for these values.
660
664
  #
661
- # Must match the value type passed to `params`. This must be an `Array` when the query uses positional query
662
- # parameters. This must be an `Hash` when the query uses named query parameters. The values should be BigQuery
663
- # type codes from the following list:
665
+ # Arguments must match the value type passed to `params`. This must be an `Array` when the query uses
666
+ # positional query parameters. This must be an `Hash` when the query uses named query parameters. The values
667
+ # should be BigQuery type codes from the following list:
664
668
  #
665
669
  # * `:BOOL`
666
670
  # * `:INT64`
667
671
  # * `:FLOAT64`
668
672
  # * `:NUMERIC`
673
+ # * `:BIGNUMERIC`
669
674
  # * `:STRING`
670
675
  # * `:DATETIME`
671
676
  # * `:DATE`
@@ -981,8 +986,7 @@ module Google
981
986
  # @param [String] description A user-friendly description of the
982
987
  # dataset.
983
988
  # @param [Integer] expiration The default lifetime of all tables in the
984
- # dataset, in milliseconds. The minimum value is 3600000 milliseconds
985
- # (one hour).
989
+ # dataset, in milliseconds. The minimum value is `3_600_000` (one hour).
986
990
  # @param [String] location The geographic location where the dataset
987
991
  # should reside. Possible values include `EU` and `US`. The default
988
992
  # value is `US`.
@@ -121,12 +121,12 @@ module Google
121
121
  # puts project.name
122
122
  # end
123
123
  #
124
- def all request_limit: nil
124
+ def all request_limit: nil, &block
125
125
  request_limit = request_limit.to_i if request_limit
126
126
  return enum_for :all, request_limit: request_limit unless block_given?
127
127
  results = self
128
128
  loop do
129
- results.each { |r| yield r }
129
+ results.each(&block)
130
130
  if request_limit
131
131
  request_limit -= 1
132
132
  break if request_limit.negative?
@@ -94,8 +94,7 @@ module Google
94
94
  # otherwise.
95
95
  #
96
96
  def batch?
97
- val = @gapi.configuration.query.priority
98
- val == "BATCH"
97
+ @gapi.configuration.query.priority == "BATCH"
99
98
  end
100
99
 
101
100
  ##
@@ -693,8 +692,11 @@ module Google
693
692
  end
694
693
  ensure_schema!
695
694
 
696
- options = { token: token, max: max, start: start }
697
- data_hash = service.list_tabledata destination_table_dataset_id, destination_table_table_id, options
695
+ data_hash = service.list_tabledata destination_table_dataset_id,
696
+ destination_table_table_id,
697
+ token: token,
698
+ max: max,
699
+ start: start
698
700
  Data.from_gapi_json data_hash, destination_table_gapi, @gapi, service
699
701
  end
700
702
  alias query_results data
@@ -705,12 +707,11 @@ module Google
705
707
  ##
706
708
  # @private Create an Updater object.
707
709
  def initialize service, gapi
710
+ super()
708
711
  @service = service
709
712
  @gapi = gapi
710
713
  end
711
714
 
712
- # rubocop:disable all
713
-
714
715
  ##
715
716
  # @private Create an Updater from an options hash.
716
717
  #
@@ -749,8 +750,6 @@ module Google
749
750
  updater
750
751
  end
751
752
 
752
- # rubocop:enable all
753
-
754
753
  ##
755
754
  # Sets the geographic location where the job should run. Required
756
755
  # except for US and EU.
@@ -854,20 +853,21 @@ module Google
854
853
  #
855
854
  # Ruby types are mapped to BigQuery types as follows:
856
855
  #
857
- # | BigQuery | Ruby | Notes |
858
- # |-------------|--------------------------------------|------------------------------------------------|
859
- # | `BOOL` | `true`/`false` | |
860
- # | `INT64` | `Integer` | |
861
- # | `FLOAT64` | `Float` | |
862
- # | `NUMERIC` | `BigDecimal` | Will be rounded to 9 decimal places |
863
- # | `STRING` | `String` | |
864
- # | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
865
- # | `DATE` | `Date` | |
866
- # | `TIMESTAMP` | `Time` | |
867
- # | `TIME` | `Google::Cloud::BigQuery::Time` | |
868
- # | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
869
- # | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
870
- # | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
856
+ # | BigQuery | Ruby | Notes |
857
+ # |--------------|--------------------------------------|--------------------------------------------------|
858
+ # | `BOOL` | `true`/`false` | |
859
+ # | `INT64` | `Integer` | |
860
+ # | `FLOAT64` | `Float` | |
861
+ # | `NUMERIC` | `BigDecimal` | `BigDecimal` values will be rounded to scale 9. |
862
+ # | `BIGNUMERIC` | | Query param values must be mapped in `types`. |
863
+ # | `STRING` | `String` | |
864
+ # | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
865
+ # | `DATE` | `Date` | |
866
+ # | `TIMESTAMP` | `Time` | |
867
+ # | `TIME` | `Google::Cloud::BigQuery::Time` | |
868
+ # | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
869
+ # | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
870
+ # | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
871
871
  #
872
872
  # See [Data Types](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types) for an overview
873
873
  # of each BigQuery data type, including allowed values.
@@ -888,35 +888,37 @@ module Google
888
888
  #
889
889
  # Ruby types are mapped to BigQuery types as follows:
890
890
  #
891
- # | BigQuery | Ruby | Notes |
892
- # |-------------|--------------------------------------|------------------------------------------------|
893
- # | `BOOL` | `true`/`false` | |
894
- # | `INT64` | `Integer` | |
895
- # | `FLOAT64` | `Float` | |
896
- # | `NUMERIC` | `BigDecimal` | Will be rounded to 9 decimal places |
897
- # | `STRING` | `String` | |
898
- # | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
899
- # | `DATE` | `Date` | |
900
- # | `TIMESTAMP` | `Time` | |
901
- # | `TIME` | `Google::Cloud::BigQuery::Time` | |
902
- # | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
903
- # | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
904
- # | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
891
+ # | BigQuery | Ruby | Notes |
892
+ # |--------------|--------------------------------------|--------------------------------------------------|
893
+ # | `BOOL` | `true`/`false` | |
894
+ # | `INT64` | `Integer` | |
895
+ # | `FLOAT64` | `Float` | |
896
+ # | `NUMERIC` | `BigDecimal` | `BigDecimal` values will be rounded to scale 9. |
897
+ # | `BIGNUMERIC` | | Query param values must be mapped in `types`. |
898
+ # | `STRING` | `String` | |
899
+ # | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
900
+ # | `DATE` | `Date` | |
901
+ # | `TIMESTAMP` | `Time` | |
902
+ # | `TIME` | `Google::Cloud::BigQuery::Time` | |
903
+ # | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
904
+ # | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
905
+ # | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
905
906
  #
906
907
  # See [Data Types](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types) for an overview
907
908
  # of each BigQuery data type, including allowed values.
908
- # @param [Array, Hash] types Standard SQL only. Types of the SQL parameters in `params`. It is not always to
909
- # infer the right SQL type from a value in `params`. In these cases, `types` must be used to specify the SQL
910
- # type for these values.
909
+ # @param [Array, Hash] types Standard SQL only. Types of the SQL parameters in `params`. It is not always
910
+ # possible to infer the right SQL type from a value in `params`. In these cases, `types` must be used to
911
+ # specify the SQL type for these values.
911
912
  #
912
- # Must match the value type passed to `params`. This must be an `Array` when the query uses positional query
913
- # parameters. This must be an `Hash` when the query uses named query parameters. The values should be
914
- # BigQuery type codes from the following list:
913
+ # Arguments must match the value type passed to `params`. This must be an `Array` when the query uses
914
+ # positional query parameters. This must be an `Hash` when the query uses named query parameters. The values
915
+ # should be BigQuery type codes from the following list:
915
916
  #
916
917
  # * `:BOOL`
917
918
  # * `:INT64`
918
919
  # * `:FLOAT64`
919
920
  # * `:NUMERIC`
921
+ # * `:BIGNUMERIC`
920
922
  # * `:STRING`
921
923
  # * `:DATETIME`
922
924
  # * `:DATE`
@@ -936,13 +938,13 @@ module Google
936
938
  raise ArgumentError, "types must use the same format as params" if types.class != params.class
937
939
 
938
940
  case params
939
- when Array then
941
+ when Array
940
942
  @gapi.configuration.query.use_legacy_sql = false
941
943
  @gapi.configuration.query.parameter_mode = "POSITIONAL"
942
944
  @gapi.configuration.query.query_parameters = params.zip(types).map do |param, type|
943
945
  Convert.to_query_param param, type
944
946
  end
945
- when Hash then
947
+ when Hash
946
948
  @gapi.configuration.query.use_legacy_sql = false
947
949
  @gapi.configuration.query.parameter_mode = "NAMED"
948
950
  @gapi.configuration.query.query_parameters = params.map do |name, param|
@@ -1593,9 +1595,20 @@ module Google
1593
1595
  # end
1594
1596
  #
1595
1597
  class Stage
1596
- attr_reader :compute_ratio_avg, :compute_ratio_max, :id, :name, :read_ratio_avg, :read_ratio_max,
1597
- :records_read, :records_written, :status, :steps, :wait_ratio_avg, :wait_ratio_max,
1598
- :write_ratio_avg, :write_ratio_max
1598
+ attr_reader :compute_ratio_avg
1599
+ attr_reader :compute_ratio_max
1600
+ attr_reader :id
1601
+ attr_reader :name
1602
+ attr_reader :read_ratio_avg
1603
+ attr_reader :read_ratio_max
1604
+ attr_reader :records_read
1605
+ attr_reader :records_written
1606
+ attr_reader :status
1607
+ attr_reader :steps
1608
+ attr_reader :wait_ratio_avg
1609
+ attr_reader :wait_ratio_max
1610
+ attr_reader :write_ratio_avg
1611
+ attr_reader :write_ratio_max
1599
1612
 
1600
1613
  ##
1601
1614
  # @private Creates a new Stage instance.
@@ -1658,7 +1671,8 @@ module Google
1658
1671
  # end
1659
1672
  #
1660
1673
  class Step
1661
- attr_reader :kind, :substeps
1674
+ attr_reader :kind
1675
+ attr_reader :substeps
1662
1676
 
1663
1677
  ##
1664
1678
  # @private Creates a new Stage instance.
@@ -603,6 +603,93 @@ module Google
603
603
  update_gapi!
604
604
  end
605
605
 
606
+ ###
607
+ # The JavaScript UDF determinism level. Optional.
608
+ #
609
+ # * `DETERMINISTIC` - Deterministic indicates that two calls with the same input to a UDF yield the same output.
610
+ # If all JavaScript UDFs are `DETERMINISTIC`, the query result is potentially cachable.
611
+ # * `NOT_DETERMINISTIC` - Not deterministic indicates that the output of the UDF is not guaranteed to yield the
612
+ # same output each time for a given set of inputs. If any JavaScript UDF is `NOT_DETERMINISTIC`, the query
613
+ # result is not cacheable.
614
+ #
615
+ # Even if a JavaScript UDF is deterministic, many other factors can prevent usage of cached query results.
616
+ # Example factors include but not limited to: DDL/DML, non-deterministic SQL function calls, update of
617
+ # referenced tables/views/UDFs or imported JavaScript libraries. SQL UDFs cannot have determinism specified.
618
+ # Their determinism is automatically determined.
619
+ #
620
+ # @return [String, nil] The routine determinism level in upper case, or `nil` if not set or the object is a
621
+ # reference (see {#reference?}).
622
+ #
623
+ # @example
624
+ # require "google/cloud/bigquery"
625
+ #
626
+ # bigquery = Google::Cloud::Bigquery.new
627
+ # dataset = bigquery.dataset "my_dataset"
628
+ # routine = dataset.routine "my_routine"
629
+ #
630
+ # routine.determinism_level #=> "NOT_DETERMINISTIC"
631
+ #
632
+ # @!group Attributes
633
+ #
634
+ def determinism_level
635
+ return nil if reference?
636
+ ensure_full_data!
637
+ @gapi.determinism_level
638
+ end
639
+
640
+ ##
641
+ # Updates the JavaScript UDF determinism level. Optional.
642
+ #
643
+ # * `DETERMINISTIC` - Deterministic indicates that two calls with the same input to a UDF yield the same output.
644
+ # If all JavaScript UDFs are `DETERMINISTIC`, the query result is potentially cachable.
645
+ # * `NOT_DETERMINISTIC` - Not deterministic indicates that the output of the UDF is not guaranteed to yield the
646
+ # same output each time for a given set of inputs. If any JavaScript UDF is `NOT_DETERMINISTIC`, the query
647
+ # result is not cacheable.
648
+ #
649
+ # @param [String, nil] new_determinism_level The new routine determinism level in upper case.
650
+ #
651
+ # @example
652
+ # require "google/cloud/bigquery"
653
+ #
654
+ # bigquery = Google::Cloud::Bigquery.new
655
+ # dataset = bigquery.dataset "my_dataset"
656
+ # routine = dataset.routine "my_routine"
657
+ #
658
+ # routine.determinism_level #=> "NOT_DETERMINISTIC"
659
+ # routine.determinism_level = "DETERMINISTIC"
660
+ #
661
+ # @!group Attributes
662
+ #
663
+ def determinism_level= new_determinism_level
664
+ ensure_full_data!
665
+ @gapi.determinism_level = new_determinism_level
666
+ update_gapi!
667
+ end
668
+
669
+ ##
670
+ # Checks if the value of {#determinism_level} is `DETERMINISTIC`. The default is `false`.
671
+ #
672
+ # @return [Boolean] `true` when `DETERMINISTIC` and the object is not a reference (see {#reference?}), `false`
673
+ # otherwise.
674
+ #
675
+ # @!group Attributes
676
+ #
677
+ def determinism_level_deterministic?
678
+ @gapi.determinism_level == "DETERMINISTIC"
679
+ end
680
+
681
+ ##
682
+ # Checks if the value of {#determinism_level} is `NOT_DETERMINISTIC`. The default is `false`.
683
+ #
684
+ # @return [Boolean] `true` when `NOT_DETERMINISTIC` and the object is not a reference (see {#reference?}),
685
+ # `false` otherwise.
686
+ #
687
+ # @!group Attributes
688
+ #
689
+ def determinism_level_not_deterministic?
690
+ @gapi.determinism_level == "NOT_DETERMINISTIC"
691
+ end
692
+
606
693
  ##
607
694
  # Updates the routine with changes made in the given block in a single update request. The following attributes
608
695
  # may be set: {Updater#routine_type=}, {Updater#language=}, {Updater#arguments=}, {Updater#return_type=},
@@ -919,6 +1006,7 @@ module Google
919
1006
  ##
920
1007
  # @private Create an Updater object.
921
1008
  def initialize gapi
1009
+ super()
922
1010
  @original_gapi = gapi
923
1011
  @gapi = gapi.dup
924
1012
  end
@@ -999,7 +1087,9 @@ module Google
999
1087
  # routine = dataset.routine "my_routine"
1000
1088
  #
1001
1089
  # routine.return_type.type_kind #=> "INT64"
1002
- # routine.return_type = "STRING"
1090
+ # routine.update do |r|
1091
+ # r.return_type = "STRING"
1092
+ # end
1003
1093
  #
1004
1094
  def return_type= new_return_type
1005
1095
  @gapi.return_type = StandardSql::DataType.gapi_from_string_or_data_type new_return_type
@@ -1019,9 +1109,11 @@ module Google
1019
1109
  # dataset = bigquery.dataset "my_dataset"
1020
1110
  # routine = dataset.routine "my_routine"
1021
1111
  #
1022
- # routine.imported_libraries = [
1023
- # "gs://cloud-samples-data/bigquery/udfs/max-value.js"
1024
- # ]
1112
+ # routine.update do |r|
1113
+ # r.imported_libraries = [
1114
+ # "gs://cloud-samples-data/bigquery/udfs/max-value.js"
1115
+ # ]
1116
+ # end
1025
1117
  #
1026
1118
  def imported_libraries= new_imported_libraries
1027
1119
  @gapi.imported_libraries = new_imported_libraries
@@ -1069,12 +1161,43 @@ module Google
1069
1161
  # routine = dataset.routine "my_routine"
1070
1162
  #
1071
1163
  # routine.description #=> "My routine description"
1072
- # routine.description = "My updated routine description"
1164
+ # routine.update do |r|
1165
+ # r.description = "My updated routine description"
1166
+ # end
1073
1167
  #
1074
1168
  def description= new_description
1075
1169
  @gapi.description = new_description
1076
1170
  end
1077
1171
 
1172
+ ##
1173
+ # Updates the JavaScript UDF determinism level. Optional.
1174
+ #
1175
+ # * `DETERMINISTIC` - Deterministic indicates that two calls with the same input to a UDF yield the same
1176
+ # output. If all JavaScript UDFs are `DETERMINISTIC`, the query result is potentially cachable.
1177
+ # * `NOT_DETERMINISTIC` - Not deterministic indicates that the output of the UDF is not guaranteed to yield
1178
+ # the same output each time for a given set of inputs. If any JavaScript UDF is `NOT_DETERMINISTIC`, the
1179
+ # query result is not cacheable.
1180
+ #
1181
+ # @param [String, nil] new_determinism_level The new routine determinism level in upper case.
1182
+ #
1183
+ # @example
1184
+ # require "google/cloud/bigquery"
1185
+ #
1186
+ # bigquery = Google::Cloud::Bigquery.new
1187
+ # dataset = bigquery.dataset "my_dataset"
1188
+ # routine = dataset.routine "my_routine"
1189
+ #
1190
+ # routine.determinism_level #=> "NOT_DETERMINISTIC"
1191
+ # routine.update do |r|
1192
+ # r.determinism_level = "DETERMINISTIC"
1193
+ # end
1194
+ #
1195
+ # @!group Attributes
1196
+ #
1197
+ def determinism_level= new_determinism_level
1198
+ @gapi.determinism_level = new_determinism_level
1199
+ end
1200
+
1078
1201
  def update
1079
1202
  raise "not implemented in #{self.class}"
1080
1203
  end
@@ -1088,15 +1211,11 @@ module Google
1088
1211
  end
1089
1212
  alias refresh! reload!
1090
1213
 
1091
- # rubocop:disable Style/CaseEquality
1092
-
1093
1214
  # @private
1094
1215
  def updates?
1095
1216
  !(@gapi === @original_gapi)
1096
1217
  end
1097
1218
 
1098
- # rubocop:enable Style/CaseEquality
1099
-
1100
1219
  # @private
1101
1220
  def to_gapi
1102
1221
  @gapi