google-cloud-bigquery 1.38.1 → 1.42.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -108,6 +108,10 @@ module Google
108
108
  # @private A Google API Client Table Reference object.
109
109
  attr_reader :reference
110
110
 
111
+ ##
112
+ # @private The metadata view type string.
113
+ attr_accessor :metadata_view
114
+
111
115
  ##
112
116
  # @private Create an empty Table object.
113
117
  def initialize
@@ -154,6 +158,45 @@ module Google
154
158
  @gapi.table_reference.project_id
155
159
  end
156
160
 
161
+ ##
162
+ # The type of the table like if its a TABLE, VIEW or SNAPSHOT etc.,
163
+ #
164
+ # @return [String, nil] Type of the table, or
165
+ # `nil` if the object is a reference (see {#reference?}).
166
+ #
167
+ # @!group Attributes
168
+ #
169
+ def type
170
+ return nil if reference?
171
+ @gapi.type
172
+ end
173
+
174
+ ##
175
+ # The Information about base table and snapshot time of the table.
176
+ #
177
+ # @return [Google::Apis::BigqueryV2::SnapshotDefinition, nil] Snapshot definition of table snapshot, or
178
+ # `nil` if not snapshot or the object is a reference (see {#reference?}).
179
+ #
180
+ # @!group Attributes
181
+ #
182
+ def snapshot_definition
183
+ return nil if reference?
184
+ @gapi.snapshot_definition
185
+ end
186
+
187
+ ##
188
+ # The Information about base table and clone time of the table.
189
+ #
190
+ # @return [Google::Apis::BigqueryV2::CloneDefinition, nil] Clone definition of table clone, or
191
+ # `nil` if not clone or the object is a reference (see {#reference?}).
192
+ #
193
+ # @!group Attributes
194
+ #
195
+ def clone_definition
196
+ return nil if reference?
197
+ @gapi.clone_definition
198
+ end
199
+
157
200
  ##
158
201
  # @private The gapi fragment containing the Project ID, Dataset ID, and
159
202
  # Table ID.
@@ -820,6 +863,40 @@ module Google
820
863
  @gapi.type == "VIEW"
821
864
  end
822
865
 
866
+ ##
867
+ # Checks if the table's type is `SNAPSHOT`, indicating that the table
868
+ # represents a BigQuery table snapshot.
869
+ #
870
+ # @see https://cloud.google.com/bigquery/docs/table-snapshots-intro
871
+ #
872
+ # @return [Boolean, nil] `true` when the type is `SNAPSHOT`, `false`
873
+ # otherwise, if the object is a resource (see {#resource?}); `nil` if
874
+ # the object is a reference (see {#reference?}).
875
+ #
876
+ # @!group Attributes
877
+ #
878
+ def snapshot?
879
+ return nil if reference?
880
+ @gapi.type == "SNAPSHOT"
881
+ end
882
+
883
+ ##
884
+ # Checks if the table's type is `CLONE`, indicating that the table
885
+ # represents a BigQuery table clone.
886
+ #
887
+ # @see https://cloud.google.com/bigquery/docs/table-clones-intro
888
+ #
889
+ # @return [Boolean, nil] `true` when the type is `CLONE`, `false`
890
+ # otherwise, if the object is a resource (see {#resource?}); `nil` if
891
+ # the object is a reference (see {#reference?}).
892
+ #
893
+ # @!group Attributes
894
+ #
895
+ def clone?
896
+ return nil if reference?
897
+ !@gapi.clone_definition.nil?
898
+ end
899
+
823
900
  ##
824
901
  # Checks if the table's type is `MATERIALIZED_VIEW`, indicating that
825
902
  # the table represents a BigQuery materialized view.
@@ -1697,9 +1774,16 @@ module Google
1697
1774
  #
1698
1775
  # @!group Data
1699
1776
  #
1700
- def copy_job destination_table, create: nil, write: nil, job_id: nil, prefix: nil, labels: nil, dryrun: nil
1777
+ def copy_job destination_table, create: nil, write: nil, job_id: nil, prefix: nil, labels: nil, dryrun: nil,
1778
+ operation_type: nil
1701
1779
  ensure_service!
1702
- options = { create: create, write: write, dryrun: dryrun, labels: labels, job_id: job_id, prefix: prefix }
1780
+ options = { create: create,
1781
+ write: write,
1782
+ dryrun: dryrun,
1783
+ labels: labels,
1784
+ job_id: job_id,
1785
+ prefix: prefix,
1786
+ operation_type: operation_type }
1703
1787
  updater = CopyJob::Updater.from_options(
1704
1788
  service,
1705
1789
  table_ref,
@@ -1780,10 +1864,195 @@ module Google
1780
1864
  # @!group Data
1781
1865
  #
1782
1866
  def copy destination_table, create: nil, write: nil, &block
1783
- job = copy_job destination_table, create: create, write: write, &block
1784
- job.wait_until_done!
1785
- ensure_job_succeeded! job
1786
- true
1867
+ copy_job_with_operation_type destination_table,
1868
+ create: create,
1869
+ write: write,
1870
+ operation_type: OperationType::COPY,
1871
+ &block
1872
+ end
1873
+
1874
+ ##
1875
+ # Clones the data from the table to another table using a synchronous
1876
+ # method that blocks for a response.
1877
+ # The source and destination table have the same table type, but only bill for
1878
+ # unique data.
1879
+ # Timeouts and transient errors are generally handled as needed to complete the job.
1880
+ # See also {#copy_job}.
1881
+ #
1882
+ # The geographic location for the job ("US", "EU", etc.) can be set via
1883
+ # {CopyJob::Updater#location=} in a block passed to this method. If the
1884
+ # table is a full resource representation (see {#resource_full?}), the
1885
+ # location of the job will be automatically set to the location of the
1886
+ # table.
1887
+ #
1888
+ # @param [Table, String] destination_table The destination for the
1889
+ # copied data. This can also be a string identifier as specified by
1890
+ # the [Standard SQL Query
1891
+ # Reference](https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#from-clause)
1892
+ # (`project-name.dataset_id.table_id`) or the [Legacy SQL Query
1893
+ # Reference](https://cloud.google.com/bigquery/query-reference#from)
1894
+ # (`project-name:dataset_id.table_id`). This is useful for referencing
1895
+ # tables in other projects and datasets.
1896
+ #
1897
+ # @yield [job] a job configuration object
1898
+ # @yieldparam [Google::Cloud::Bigquery::CopyJob::Updater] job a job
1899
+ # configuration object for setting additional options.
1900
+ #
1901
+ # @return [Boolean] Returns `true` if the copy operation succeeded.
1902
+ #
1903
+ # @example
1904
+ # require "google/cloud/bigquery"
1905
+ #
1906
+ # bigquery = Google::Cloud::Bigquery.new
1907
+ # dataset = bigquery.dataset "my_dataset"
1908
+ # table = dataset.table "my_table"
1909
+ # destination_table = dataset.table "my_destination_table"
1910
+ #
1911
+ # table.clone destination_table
1912
+ #
1913
+ # @example Passing a string identifier for the destination table:
1914
+ # require "google/cloud/bigquery"
1915
+ #
1916
+ # bigquery = Google::Cloud::Bigquery.new
1917
+ # dataset = bigquery.dataset "my_dataset"
1918
+ # table = dataset.table "my_table"
1919
+ #
1920
+ # table.clone "other-project:other_dataset.other_table"
1921
+ #
1922
+ # @!group Data
1923
+ #
1924
+ def clone destination_table, &block
1925
+ copy_job_with_operation_type destination_table,
1926
+ operation_type: OperationType::CLONE,
1927
+ &block
1928
+ end
1929
+
1930
+ ##
1931
+ # Takes snapshot of the data from the table to another table using a synchronous
1932
+ # method that blocks for a response.
1933
+ # The source table type is TABLE and the destination table type is SNAPSHOT.
1934
+ # Timeouts and transient errors are generally handled as needed to complete the job.
1935
+ # See also {#copy_job}.
1936
+ #
1937
+ # The geographic location for the job ("US", "EU", etc.) can be set via
1938
+ # {CopyJob::Updater#location=} in a block passed to this method. If the
1939
+ # table is a full resource representation (see {#resource_full?}), the
1940
+ # location of the job will be automatically set to the location of the
1941
+ # table.
1942
+ #
1943
+ # @param [Table, String] destination_table The destination for the
1944
+ # copied data. This can also be a string identifier as specified by
1945
+ # the [Standard SQL Query
1946
+ # Reference](https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#from-clause)
1947
+ # (`project-name.dataset_id.table_id`) or the [Legacy SQL Query
1948
+ # Reference](https://cloud.google.com/bigquery/query-reference#from)
1949
+ # (`project-name:dataset_id.table_id`). This is useful for referencing
1950
+ # tables in other projects and datasets.
1951
+ #
1952
+ # @yield [job] a job configuration object
1953
+ # @yieldparam [Google::Cloud::Bigquery::CopyJob::Updater] job a job
1954
+ # configuration object for setting additional options.
1955
+ #
1956
+ # @return [Boolean] Returns `true` if the copy operation succeeded.
1957
+ #
1958
+ # @example
1959
+ # require "google/cloud/bigquery"
1960
+ #
1961
+ # bigquery = Google::Cloud::Bigquery.new
1962
+ # dataset = bigquery.dataset "my_dataset"
1963
+ # table = dataset.table "my_table"
1964
+ # destination_table = dataset.table "my_destination_table"
1965
+ #
1966
+ # table.snapshot destination_table
1967
+ #
1968
+ # @example Passing a string identifier for the destination table:
1969
+ # require "google/cloud/bigquery"
1970
+ #
1971
+ # bigquery = Google::Cloud::Bigquery.new
1972
+ # dataset = bigquery.dataset "my_dataset"
1973
+ # table = dataset.table "my_table"
1974
+ #
1975
+ # table.snapshot "other-project:other_dataset.other_table"
1976
+ #
1977
+ # @!group Data
1978
+ #
1979
+ def snapshot destination_table, &block
1980
+ copy_job_with_operation_type destination_table,
1981
+ operation_type: OperationType::SNAPSHOT,
1982
+ &block
1983
+ end
1984
+
1985
+ ##
1986
+ # Restore the data from the table to another table using a synchronous
1987
+ # method that blocks for a response.
1988
+ # The source table type is SNAPSHOT and the destination table type is TABLE.
1989
+ # Timeouts and transient errors are generally handled as needed to complete the job.
1990
+ # See also {#copy_job}.
1991
+ #
1992
+ # The geographic location for the job ("US", "EU", etc.) can be set via
1993
+ # {CopyJob::Updater#location=} in a block passed to this method. If the
1994
+ # table is a full resource representation (see {#resource_full?}), the
1995
+ # location of the job will be automatically set to the location of the
1996
+ # table.
1997
+ #
1998
+ # @param [Table, String] destination_table The destination for the
1999
+ # copied data. This can also be a string identifier as specified by
2000
+ # the [Standard SQL Query
2001
+ # Reference](https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#from-clause)
2002
+ # (`project-name.dataset_id.table_id`) or the [Legacy SQL Query
2003
+ # Reference](https://cloud.google.com/bigquery/query-reference#from)
2004
+ # (`project-name:dataset_id.table_id`). This is useful for referencing
2005
+ # tables in other projects and datasets.
2006
+ # @param [String] create Specifies whether the job is allowed to create
2007
+ # new tables. The default value is `needed`.
2008
+ #
2009
+ # The following values are supported:
2010
+ #
2011
+ # * `needed` - Create the table if it does not exist.
2012
+ # * `never` - The table must already exist. A 'notFound' error is
2013
+ # raised if the table does not exist.
2014
+ # @param [String] write Specifies how to handle data already present in
2015
+ # the destination table. The default value is `empty`.
2016
+ #
2017
+ # The following values are supported:
2018
+ #
2019
+ # * `truncate` - BigQuery overwrites the table data.
2020
+ # * `append` - BigQuery appends the data to the table.
2021
+ # * `empty` - An error will be returned if the destination table
2022
+ # already contains data.
2023
+ # @yield [job] a job configuration object
2024
+ # @yieldparam [Google::Cloud::Bigquery::CopyJob::Updater] job a job
2025
+ # configuration object for setting additional options.
2026
+ #
2027
+ # @return [Boolean] Returns `true` if the copy operation succeeded.
2028
+ #
2029
+ # @example
2030
+ # require "google/cloud/bigquery"
2031
+ #
2032
+ # bigquery = Google::Cloud::Bigquery.new
2033
+ # dataset = bigquery.dataset "my_dataset"
2034
+ # table = dataset.table "my_table"
2035
+ # destination_table = dataset.table "my_destination_table"
2036
+ #
2037
+ # table.restore destination_table
2038
+ #
2039
+ # @example Passing a string identifier for the destination table:
2040
+ # require "google/cloud/bigquery"
2041
+ #
2042
+ # bigquery = Google::Cloud::Bigquery.new
2043
+ # dataset = bigquery.dataset "my_dataset"
2044
+ # table = dataset.table "my_table"
2045
+ #
2046
+ # table.restore "other-project:other_dataset.other_table"
2047
+ #
2048
+ # @!group Data
2049
+ #
2050
+ def restore destination_table, create: nil, write: nil, &block
2051
+ copy_job_with_operation_type destination_table,
2052
+ create: create,
2053
+ write: write,
2054
+ operation_type: OperationType::RESTORE,
2055
+ &block
1787
2056
  end
1788
2057
 
1789
2058
  ##
@@ -1812,7 +2081,7 @@ module Google
1812
2081
  # The following values are supported:
1813
2082
  #
1814
2083
  # * `csv` - CSV
1815
- # * `json` - [Newline-delimited JSON](http://jsonlines.org/)
2084
+ # * `json` - [Newline-delimited JSON](https://jsonlines.org/)
1816
2085
  # * `avro` - [Avro](http://avro.apache.org/)
1817
2086
  # @param [String] compression The compression type to use for exported
1818
2087
  # files. Possible values include `GZIP` and `NONE`. The default value
@@ -1915,7 +2184,7 @@ module Google
1915
2184
  # The following values are supported:
1916
2185
  #
1917
2186
  # * `csv` - CSV
1918
- # * `json` - [Newline-delimited JSON](http://jsonlines.org/)
2187
+ # * `json` - [Newline-delimited JSON](https://jsonlines.org/)
1919
2188
  # * `avro` - [Avro](http://avro.apache.org/)
1920
2189
  # @param [String] compression The compression type to use for exported
1921
2190
  # files. Possible values include `GZIP` and `NONE`. The default value
@@ -1986,7 +2255,7 @@ module Google
1986
2255
  # The following values are supported:
1987
2256
  #
1988
2257
  # * `csv` - CSV
1989
- # * `json` - [Newline-delimited JSON](http://jsonlines.org/)
2258
+ # * `json` - [Newline-delimited JSON](https://jsonlines.org/)
1990
2259
  # * `avro` - [Avro](http://avro.apache.org/)
1991
2260
  # * `orc` - [ORC](https://cloud.google.com/bigquery/docs/loading-data-cloud-storage-orc)
1992
2261
  # * `parquet` - [Parquet](https://parquet.apache.org/)
@@ -2199,7 +2468,7 @@ module Google
2199
2468
  # The following values are supported:
2200
2469
  #
2201
2470
  # * `csv` - CSV
2202
- # * `json` - [Newline-delimited JSON](http://jsonlines.org/)
2471
+ # * `json` - [Newline-delimited JSON](https://jsonlines.org/)
2203
2472
  # * `avro` - [Avro](http://avro.apache.org/)
2204
2473
  # * `orc` - [ORC](https://cloud.google.com/bigquery/docs/loading-data-cloud-storage-orc)
2205
2474
  # * `parquet` - [Parquet](https://parquet.apache.org/)
@@ -2571,7 +2840,7 @@ module Google
2571
2840
  #
2572
2841
  def reload!
2573
2842
  ensure_service!
2574
- @gapi = service.get_table dataset_id, table_id
2843
+ @gapi = service.get_table dataset_id, table_id, metadata_view: metadata_view
2575
2844
  @reference = nil
2576
2845
  @exists = nil
2577
2846
  self
@@ -2705,10 +2974,11 @@ module Google
2705
2974
 
2706
2975
  ##
2707
2976
  # @private New Table from a Google API Client object.
2708
- def self.from_gapi gapi, service
2977
+ def self.from_gapi gapi, service, metadata_view: nil
2709
2978
  new.tap do |f|
2710
2979
  f.gapi = gapi
2711
2980
  f.service = service
2981
+ f.metadata_view = metadata_view
2712
2982
  end
2713
2983
  end
2714
2984
 
@@ -2739,6 +3009,17 @@ module Google
2739
3009
 
2740
3010
  protected
2741
3011
 
3012
+ def copy_job_with_operation_type destination_table, create: nil, write: nil, operation_type: nil, &block
3013
+ job = copy_job destination_table,
3014
+ create: create,
3015
+ write: write,
3016
+ operation_type: operation_type,
3017
+ &block
3018
+ job.wait_until_done!
3019
+ ensure_job_succeeded! job
3020
+ true
3021
+ end
3022
+
2742
3023
  ##
2743
3024
  # Raise an error unless an active service is available.
2744
3025
  def ensure_service!
@@ -2766,7 +3047,7 @@ module Google
2766
3047
  def patch_gapi! *attributes
2767
3048
  return if attributes.empty?
2768
3049
  ensure_service!
2769
- patch_args = Hash[attributes.map { |attr| [attr, @gapi.send(attr)] }]
3050
+ patch_args = attributes.to_h { |attr| [attr, @gapi.send(attr)] }
2770
3051
  patch_gapi = Google::Apis::BigqueryV2::Table.new(**patch_args)
2771
3052
  patch_gapi.etag = etag if etag
2772
3053
  @gapi = service.patch_table dataset_id, table_id, patch_gapi
@@ -3706,8 +3987,6 @@ module Google
3706
3987
  schema.record name, description: description, mode: mode, &block
3707
3988
  end
3708
3989
 
3709
- # rubocop:disable Style/MethodDefParentheses
3710
-
3711
3990
  ##
3712
3991
  # @raise [RuntimeError] not implemented
3713
3992
  def data(*)
@@ -3793,8 +4072,6 @@ module Google
3793
4072
  end
3794
4073
  alias refresh! reload!
3795
4074
 
3796
- # rubocop:enable Style/MethodDefParentheses
3797
-
3798
4075
  ##
3799
4076
  # @private Make sure any access changes are saved
3800
4077
  def check_for_mutated_schema!
@@ -16,7 +16,7 @@
16
16
  module Google
17
17
  module Cloud
18
18
  module Bigquery
19
- VERSION = "1.38.1".freeze
19
+ VERSION = "1.42.0".freeze
20
20
  end
21
21
  end
22
22
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: google-cloud-bigquery
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.38.1
4
+ version: 1.42.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Mike Moore
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2022-01-13 00:00:00.000000000 Z
12
+ date: 2023-01-16 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: concurrent-ruby
@@ -121,28 +121,28 @@ dependencies:
121
121
  requirements:
122
122
  - - "~>"
123
123
  - !ruby/object:Gem::Version
124
- version: 1.25.1
124
+ version: 1.26.1
125
125
  type: :development
126
126
  prerelease: false
127
127
  version_requirements: !ruby/object:Gem::Requirement
128
128
  requirements:
129
129
  - - "~>"
130
130
  - !ruby/object:Gem::Version
131
- version: 1.25.1
131
+ version: 1.26.1
132
132
  - !ruby/object:Gem::Dependency
133
133
  name: minitest
134
134
  requirement: !ruby/object:Gem::Requirement
135
135
  requirements:
136
136
  - - "~>"
137
137
  - !ruby/object:Gem::Version
138
- version: '5.14'
138
+ version: '5.16'
139
139
  type: :development
140
140
  prerelease: false
141
141
  version_requirements: !ruby/object:Gem::Requirement
142
142
  requirements:
143
143
  - - "~>"
144
144
  - !ruby/object:Gem::Version
145
- version: '5.14'
145
+ version: '5.16'
146
146
  - !ruby/object:Gem::Dependency
147
147
  name: minitest-autotest
148
148
  requirement: !ruby/object:Gem::Requirement
@@ -268,6 +268,7 @@ files:
268
268
  - lib/google/cloud/bigquery/dataset.rb
269
269
  - lib/google/cloud/bigquery/dataset/access.rb
270
270
  - lib/google/cloud/bigquery/dataset/list.rb
271
+ - lib/google/cloud/bigquery/dataset/tag.rb
271
272
  - lib/google/cloud/bigquery/encryption_configuration.rb
272
273
  - lib/google/cloud/bigquery/external.rb
273
274
  - lib/google/cloud/bigquery/external/avro_source.rb
@@ -313,14 +314,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
313
314
  requirements:
314
315
  - - ">="
315
316
  - !ruby/object:Gem::Version
316
- version: '2.5'
317
+ version: '2.6'
317
318
  required_rubygems_version: !ruby/object:Gem::Requirement
318
319
  requirements:
319
320
  - - ">="
320
321
  - !ruby/object:Gem::Version
321
322
  version: '0'
322
323
  requirements: []
323
- rubygems_version: 3.3.5
324
+ rubygems_version: 3.4.2
324
325
  signing_key:
325
326
  specification_version: 4
326
327
  summary: API Client library for Google BigQuery