google-cloud-bigquery 1.21.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.yardopts +16 -0
- data/AUTHENTICATION.md +158 -0
- data/CHANGELOG.md +397 -0
- data/CODE_OF_CONDUCT.md +40 -0
- data/CONTRIBUTING.md +188 -0
- data/LICENSE +201 -0
- data/LOGGING.md +27 -0
- data/OVERVIEW.md +463 -0
- data/TROUBLESHOOTING.md +31 -0
- data/lib/google-cloud-bigquery.rb +139 -0
- data/lib/google/cloud/bigquery.rb +145 -0
- data/lib/google/cloud/bigquery/argument.rb +197 -0
- data/lib/google/cloud/bigquery/convert.rb +383 -0
- data/lib/google/cloud/bigquery/copy_job.rb +316 -0
- data/lib/google/cloud/bigquery/credentials.rb +50 -0
- data/lib/google/cloud/bigquery/data.rb +526 -0
- data/lib/google/cloud/bigquery/dataset.rb +2845 -0
- data/lib/google/cloud/bigquery/dataset/access.rb +1021 -0
- data/lib/google/cloud/bigquery/dataset/list.rb +162 -0
- data/lib/google/cloud/bigquery/encryption_configuration.rb +123 -0
- data/lib/google/cloud/bigquery/external.rb +2432 -0
- data/lib/google/cloud/bigquery/extract_job.rb +368 -0
- data/lib/google/cloud/bigquery/insert_response.rb +180 -0
- data/lib/google/cloud/bigquery/job.rb +657 -0
- data/lib/google/cloud/bigquery/job/list.rb +162 -0
- data/lib/google/cloud/bigquery/load_job.rb +1704 -0
- data/lib/google/cloud/bigquery/model.rb +740 -0
- data/lib/google/cloud/bigquery/model/list.rb +164 -0
- data/lib/google/cloud/bigquery/project.rb +1655 -0
- data/lib/google/cloud/bigquery/project/list.rb +161 -0
- data/lib/google/cloud/bigquery/query_job.rb +1695 -0
- data/lib/google/cloud/bigquery/routine.rb +1108 -0
- data/lib/google/cloud/bigquery/routine/list.rb +165 -0
- data/lib/google/cloud/bigquery/schema.rb +564 -0
- data/lib/google/cloud/bigquery/schema/field.rb +668 -0
- data/lib/google/cloud/bigquery/service.rb +589 -0
- data/lib/google/cloud/bigquery/standard_sql.rb +495 -0
- data/lib/google/cloud/bigquery/table.rb +3340 -0
- data/lib/google/cloud/bigquery/table/async_inserter.rb +520 -0
- data/lib/google/cloud/bigquery/table/list.rb +172 -0
- data/lib/google/cloud/bigquery/time.rb +65 -0
- data/lib/google/cloud/bigquery/version.rb +22 -0
- metadata +297 -0
@@ -0,0 +1,383 @@
|
|
1
|
+
# Copyright 2017 Google LLC
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at
|
6
|
+
#
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
# See the License for the specific language governing permissions and
|
13
|
+
# limitations under the License.
|
14
|
+
|
15
|
+
|
16
|
+
require "google/apis/bigquery_v2"
|
17
|
+
require "stringio"
|
18
|
+
require "base64"
|
19
|
+
require "bigdecimal"
|
20
|
+
require "time"
|
21
|
+
require "date"
|
22
|
+
|
23
|
+
module Google
|
24
|
+
module Cloud
|
25
|
+
module Bigquery
|
26
|
+
# rubocop:disable Metrics/ModuleLength
|
27
|
+
|
28
|
+
##
|
29
|
+
# @private
|
30
|
+
#
|
31
|
+
# Internal conversion of raw data values to/from Bigquery values
|
32
|
+
#
|
33
|
+
# | BigQuery | Ruby | Notes |
|
34
|
+
# |-------------|----------------|---|
|
35
|
+
# | `BOOL` | `true`/`false` | |
|
36
|
+
# | `INT64` | `Integer` | |
|
37
|
+
# | `FLOAT64` | `Float` | |
|
38
|
+
# | `NUMERIC` | `BigDecimal` | Will be rounded to 9 decimal places |
|
39
|
+
# | `STRING` | `String` | |
|
40
|
+
# | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
|
41
|
+
# | `DATE` | `Date` | |
|
42
|
+
# | `TIMESTAMP` | `Time` | |
|
43
|
+
# | `TIME` | `Google::Cloud::BigQuery::Time` | |
|
44
|
+
# | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
|
45
|
+
# | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
|
46
|
+
# | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
|
47
|
+
module Convert
|
48
|
+
##
|
49
|
+
# @private
|
50
|
+
def self.format_rows rows, fields
|
51
|
+
Array(rows).map do |row|
|
52
|
+
# convert TableRow to hash to handle nested TableCell values
|
53
|
+
format_row row.to_h, fields
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
##
|
58
|
+
# @private
|
59
|
+
def self.format_row row, fields
|
60
|
+
row_pairs = fields.zip(row[:f]).map do |f, v|
|
61
|
+
[f.name.to_sym, format_value(v, f)]
|
62
|
+
end
|
63
|
+
Hash[row_pairs]
|
64
|
+
end
|
65
|
+
|
66
|
+
# rubocop:disable all
|
67
|
+
|
68
|
+
def self.format_value value, field
|
69
|
+
if value.nil?
|
70
|
+
nil
|
71
|
+
elsif value.empty?
|
72
|
+
nil
|
73
|
+
elsif value[:v].nil?
|
74
|
+
nil
|
75
|
+
elsif Array === value[:v]
|
76
|
+
value[:v].map { |v| format_value v, field }
|
77
|
+
elsif Hash === value[:v]
|
78
|
+
format_row value[:v], field.fields
|
79
|
+
elsif field.type == "STRING"
|
80
|
+
String value[:v]
|
81
|
+
elsif field.type == "INTEGER"
|
82
|
+
Integer value[:v]
|
83
|
+
elsif field.type == "FLOAT"
|
84
|
+
if value[:v] == "Infinity"
|
85
|
+
Float::INFINITY
|
86
|
+
elsif value[:v] == "-Infinity"
|
87
|
+
-Float::INFINITY
|
88
|
+
elsif value[:v] == "NaN"
|
89
|
+
Float::NAN
|
90
|
+
else
|
91
|
+
Float value[:v]
|
92
|
+
end
|
93
|
+
elsif field.type == "NUMERIC"
|
94
|
+
BigDecimal value[:v]
|
95
|
+
elsif field.type == "BOOLEAN"
|
96
|
+
(value[:v] == "true" ? true : (value[:v] == "false" ? false : nil))
|
97
|
+
elsif field.type == "BYTES"
|
98
|
+
StringIO.new Base64.decode64 value[:v]
|
99
|
+
elsif field.type == "TIMESTAMP"
|
100
|
+
::Time.at Float(value[:v])
|
101
|
+
elsif field.type == "TIME"
|
102
|
+
Bigquery::Time.new value[:v]
|
103
|
+
elsif field.type == "DATETIME"
|
104
|
+
::Time.parse("#{value[:v]} UTC").to_datetime
|
105
|
+
elsif field.type == "DATE"
|
106
|
+
Date.parse value[:v]
|
107
|
+
else
|
108
|
+
value[:v]
|
109
|
+
end
|
110
|
+
end
|
111
|
+
|
112
|
+
##
|
113
|
+
# @private
|
114
|
+
def self.to_query_param param, type = nil
|
115
|
+
type ||= default_query_param_type_for param
|
116
|
+
|
117
|
+
Google::Apis::BigqueryV2::QueryParameter.new(
|
118
|
+
parameter_type: to_query_param_type(type),
|
119
|
+
parameter_value: to_query_param_value(param)
|
120
|
+
)
|
121
|
+
end
|
122
|
+
|
123
|
+
##
|
124
|
+
# @private
|
125
|
+
def self.to_query_param_value value
|
126
|
+
return Google::Apis::BigqueryV2::QueryParameterValue.new value: nil if value.nil?
|
127
|
+
|
128
|
+
json_value = to_json_value value
|
129
|
+
|
130
|
+
if Array === json_value
|
131
|
+
array_values = json_value.map { |v| to_query_param_value v }
|
132
|
+
Google::Apis::BigqueryV2::QueryParameterValue.new array_values: array_values
|
133
|
+
elsif Hash === json_value
|
134
|
+
struct_pairs = json_value.map do |key, value|
|
135
|
+
[String(key), to_query_param_value(value)]
|
136
|
+
end
|
137
|
+
struct_values = Hash[struct_pairs]
|
138
|
+
Google::Apis::BigqueryV2::QueryParameterValue.new struct_values: struct_values
|
139
|
+
else
|
140
|
+
# Everything else is converted to a string, per the API expectations.
|
141
|
+
Google::Apis::BigqueryV2::QueryParameterValue.new value: json_value.to_s
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
def self.to_query_param_type type
|
146
|
+
if Array === type
|
147
|
+
Google::Apis::BigqueryV2::QueryParameterType.new(
|
148
|
+
type: "ARRAY".freeze,
|
149
|
+
array_type: to_query_param_type(type.first)
|
150
|
+
)
|
151
|
+
elsif Hash === type
|
152
|
+
Google::Apis::BigqueryV2::QueryParameterType.new(
|
153
|
+
type: "STRUCT".freeze,
|
154
|
+
struct_types: type.map do |key, val|
|
155
|
+
Google::Apis::BigqueryV2::QueryParameterType::StructType.new(
|
156
|
+
name: String(key),
|
157
|
+
type: to_query_param_type(val)
|
158
|
+
)
|
159
|
+
end
|
160
|
+
)
|
161
|
+
else
|
162
|
+
Google::Apis::BigqueryV2::QueryParameterType.new(type: type.to_s.freeze)
|
163
|
+
end
|
164
|
+
end
|
165
|
+
|
166
|
+
def self.default_query_param_type_for param
|
167
|
+
raise ArgumentError, "nil params are not supported, must assign optional type" if param.nil?
|
168
|
+
|
169
|
+
case param
|
170
|
+
when String
|
171
|
+
:STRING
|
172
|
+
when Symbol
|
173
|
+
:STRING
|
174
|
+
when TrueClass
|
175
|
+
:BOOL
|
176
|
+
when FalseClass
|
177
|
+
:BOOL
|
178
|
+
when Integer
|
179
|
+
:INT64
|
180
|
+
when BigDecimal
|
181
|
+
:NUMERIC
|
182
|
+
when Numeric
|
183
|
+
:FLOAT64
|
184
|
+
when ::Time
|
185
|
+
:TIMESTAMP
|
186
|
+
when Bigquery::Time
|
187
|
+
:TIME
|
188
|
+
when DateTime
|
189
|
+
:DATETIME
|
190
|
+
when Date
|
191
|
+
:DATE
|
192
|
+
when Array
|
193
|
+
if param.empty?
|
194
|
+
raise ArgumentError, "Cannot determine type for empty array values"
|
195
|
+
end
|
196
|
+
non_nil_values = param.compact.map { |p| default_query_param_type_for p }.compact
|
197
|
+
if non_nil_values.empty?
|
198
|
+
raise ArgumentError, "Cannot determine type for array of nil values"
|
199
|
+
end
|
200
|
+
if non_nil_values.uniq.count > 1
|
201
|
+
raise ArgumentError, "Cannot determine type for array of different types of values"
|
202
|
+
end
|
203
|
+
[non_nil_values.first]
|
204
|
+
when Hash
|
205
|
+
Hash[param.map do |key, value|
|
206
|
+
[key, default_query_param_type_for(value)]
|
207
|
+
end]
|
208
|
+
else
|
209
|
+
if param.respond_to?(:read) && param.respond_to?(:rewind)
|
210
|
+
:BYTES
|
211
|
+
else
|
212
|
+
raise "A query parameter of type #{param.class} is not supported"
|
213
|
+
end
|
214
|
+
end
|
215
|
+
end
|
216
|
+
|
217
|
+
##
|
218
|
+
# @private
|
219
|
+
def self.to_json_value value
|
220
|
+
if DateTime === value
|
221
|
+
value.strftime "%Y-%m-%d %H:%M:%S.%6N"
|
222
|
+
elsif Date === value
|
223
|
+
value.to_s
|
224
|
+
elsif ::Time === value
|
225
|
+
value.strftime "%Y-%m-%d %H:%M:%S.%6N%:z"
|
226
|
+
elsif Bigquery::Time === value
|
227
|
+
value.value
|
228
|
+
elsif BigDecimal === value
|
229
|
+
# Round to precision of 9
|
230
|
+
value.finite? ? value.round(9).to_s("F") : value.to_s
|
231
|
+
elsif value.respond_to?(:read) && value.respond_to?(:rewind)
|
232
|
+
value.rewind
|
233
|
+
Base64.strict_encode64(value.read.force_encoding("ASCII-8BIT"))
|
234
|
+
elsif Array === value
|
235
|
+
value.map { |v| to_json_value v }
|
236
|
+
elsif Hash === value
|
237
|
+
Hash[value.map { |k, v| [k.to_s, to_json_value(v)] }]
|
238
|
+
else
|
239
|
+
value
|
240
|
+
end
|
241
|
+
end
|
242
|
+
|
243
|
+
# rubocop:enable all
|
244
|
+
|
245
|
+
##
|
246
|
+
# @private
|
247
|
+
def self.to_json_rows rows
|
248
|
+
rows.map { |row| to_json_row row }
|
249
|
+
end
|
250
|
+
|
251
|
+
##
|
252
|
+
# @private
|
253
|
+
def self.to_json_row row
|
254
|
+
Hash[row.map { |k, v| [k.to_s, to_json_value(v)] }]
|
255
|
+
end
|
256
|
+
|
257
|
+
def self.resolve_legacy_sql standard_sql, legacy_sql
|
258
|
+
return !standard_sql unless standard_sql.nil?
|
259
|
+
return legacy_sql unless legacy_sql.nil?
|
260
|
+
false
|
261
|
+
end
|
262
|
+
|
263
|
+
##
|
264
|
+
# @private
|
265
|
+
#
|
266
|
+
# Converts create disposition strings to API values.
|
267
|
+
#
|
268
|
+
# @return [String] API representation of create disposition.
|
269
|
+
def self.create_disposition str
|
270
|
+
val = {
|
271
|
+
"create_if_needed" => "CREATE_IF_NEEDED",
|
272
|
+
"createifneeded" => "CREATE_IF_NEEDED",
|
273
|
+
"if_needed" => "CREATE_IF_NEEDED",
|
274
|
+
"needed" => "CREATE_IF_NEEDED",
|
275
|
+
"create_never" => "CREATE_NEVER",
|
276
|
+
"createnever" => "CREATE_NEVER",
|
277
|
+
"never" => "CREATE_NEVER"
|
278
|
+
}[str.to_s.downcase]
|
279
|
+
return val unless val.nil?
|
280
|
+
str
|
281
|
+
end
|
282
|
+
|
283
|
+
##
|
284
|
+
# @private
|
285
|
+
#
|
286
|
+
# Converts write disposition strings to API values.
|
287
|
+
#
|
288
|
+
# @return [String] API representation of write disposition.
|
289
|
+
def self.write_disposition str
|
290
|
+
val = {
|
291
|
+
"write_truncate" => "WRITE_TRUNCATE",
|
292
|
+
"writetruncate" => "WRITE_TRUNCATE",
|
293
|
+
"truncate" => "WRITE_TRUNCATE",
|
294
|
+
"write_append" => "WRITE_APPEND",
|
295
|
+
"writeappend" => "WRITE_APPEND",
|
296
|
+
"append" => "WRITE_APPEND",
|
297
|
+
"write_empty" => "WRITE_EMPTY",
|
298
|
+
"writeempty" => "WRITE_EMPTY",
|
299
|
+
"empty" => "WRITE_EMPTY"
|
300
|
+
}[str.to_s.downcase]
|
301
|
+
return val unless val.nil?
|
302
|
+
str
|
303
|
+
end
|
304
|
+
|
305
|
+
##
|
306
|
+
# @private
|
307
|
+
#
|
308
|
+
# Converts source format strings to API values.
|
309
|
+
#
|
310
|
+
# @return [String] API representation of source format.
|
311
|
+
def self.source_format format
|
312
|
+
val = {
|
313
|
+
"csv" => "CSV",
|
314
|
+
"json" => "NEWLINE_DELIMITED_JSON",
|
315
|
+
"newline_delimited_json" => "NEWLINE_DELIMITED_JSON",
|
316
|
+
"avro" => "AVRO",
|
317
|
+
"orc" => "ORC",
|
318
|
+
"parquet" => "PARQUET",
|
319
|
+
"datastore" => "DATASTORE_BACKUP",
|
320
|
+
"backup" => "DATASTORE_BACKUP",
|
321
|
+
"datastore_backup" => "DATASTORE_BACKUP"
|
322
|
+
}[format.to_s.downcase]
|
323
|
+
return val unless val.nil?
|
324
|
+
format
|
325
|
+
end
|
326
|
+
|
327
|
+
##
|
328
|
+
# @private
|
329
|
+
#
|
330
|
+
# Converts file paths into source format by extension.
|
331
|
+
#
|
332
|
+
# @return [String] API representation of source format.
|
333
|
+
def self.derive_source_format_from_list paths
|
334
|
+
paths.map do |path|
|
335
|
+
derive_source_format path
|
336
|
+
end.compact.uniq.first
|
337
|
+
end
|
338
|
+
|
339
|
+
##
|
340
|
+
# @private
|
341
|
+
#
|
342
|
+
# Converts file path into source format by extension.
|
343
|
+
#
|
344
|
+
# @return [String] API representation of source format.
|
345
|
+
def self.derive_source_format path
|
346
|
+
return "CSV" if path.end_with? ".csv"
|
347
|
+
return "NEWLINE_DELIMITED_JSON" if path.end_with? ".json"
|
348
|
+
return "AVRO" if path.end_with? ".avro"
|
349
|
+
return "ORC" if path.end_with? ".orc"
|
350
|
+
return "PARQUET" if path.end_with? ".parquet"
|
351
|
+
return "DATASTORE_BACKUP" if path.end_with? ".backup_info"
|
352
|
+
nil
|
353
|
+
end
|
354
|
+
|
355
|
+
##
|
356
|
+
# @private
|
357
|
+
#
|
358
|
+
# Converts a primitive time value in milliseconds to a Ruby Time object.
|
359
|
+
#
|
360
|
+
# @return [Time, nil] The Ruby Time object, or nil if the given argument
|
361
|
+
# is nil.
|
362
|
+
def self.millis_to_time time_millis
|
363
|
+
return nil unless time_millis
|
364
|
+
::Time.at Rational(time_millis, 1000)
|
365
|
+
end
|
366
|
+
|
367
|
+
##
|
368
|
+
# @private
|
369
|
+
#
|
370
|
+
# Converts a Ruby Time object to a primitive time value in milliseconds.
|
371
|
+
#
|
372
|
+
# @return [Integer, nil] The primitive time value in milliseconds, or
|
373
|
+
# nil if the given argument is nil.
|
374
|
+
def self.time_to_millis time_obj
|
375
|
+
return nil unless time_obj
|
376
|
+
(time_obj.to_i * 1000) + (time_obj.nsec / 1_000_000)
|
377
|
+
end
|
378
|
+
end
|
379
|
+
|
380
|
+
# rubocop:enable Metrics/ModuleLength
|
381
|
+
end
|
382
|
+
end
|
383
|
+
end
|
@@ -0,0 +1,316 @@
|
|
1
|
+
# Copyright 2015 Google LLC
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at
|
6
|
+
#
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
# See the License for the specific language governing permissions and
|
13
|
+
# limitations under the License.
|
14
|
+
|
15
|
+
require "google/cloud/bigquery/encryption_configuration"
|
16
|
+
|
17
|
+
module Google
|
18
|
+
module Cloud
|
19
|
+
module Bigquery
|
20
|
+
##
|
21
|
+
# # CopyJob
|
22
|
+
#
|
23
|
+
# A {Job} subclass representing a copy operation that may be performed on
|
24
|
+
# a {Table}. A CopyJob instance is created when you call {Table#copy_job}.
|
25
|
+
#
|
26
|
+
# @see https://cloud.google.com/bigquery/docs/tables#copy-table Copying
|
27
|
+
# an Existing Table
|
28
|
+
# @see https://cloud.google.com/bigquery/docs/reference/v2/jobs Jobs API
|
29
|
+
# reference
|
30
|
+
#
|
31
|
+
# @example
|
32
|
+
# require "google/cloud/bigquery"
|
33
|
+
#
|
34
|
+
# bigquery = Google::Cloud::Bigquery.new
|
35
|
+
# dataset = bigquery.dataset "my_dataset"
|
36
|
+
# table = dataset.table "my_table"
|
37
|
+
# destination_table = dataset.table "my_destination_table"
|
38
|
+
#
|
39
|
+
# copy_job = table.copy_job destination_table
|
40
|
+
#
|
41
|
+
# copy_job.wait_until_done!
|
42
|
+
# copy_job.done? #=> true
|
43
|
+
#
|
44
|
+
class CopyJob < Job
|
45
|
+
##
|
46
|
+
# The table from which data is copied. This is the table on
|
47
|
+
# which {Table#copy_job} was called.
|
48
|
+
#
|
49
|
+
# @return [Table] A table instance.
|
50
|
+
#
|
51
|
+
def source
|
52
|
+
table = @gapi.configuration.copy.source_table
|
53
|
+
return nil unless table
|
54
|
+
retrieve_table table.project_id, table.dataset_id, table.table_id
|
55
|
+
end
|
56
|
+
|
57
|
+
##
|
58
|
+
# The table to which data is copied.
|
59
|
+
#
|
60
|
+
# @return [Table] A table instance.
|
61
|
+
#
|
62
|
+
def destination
|
63
|
+
table = @gapi.configuration.copy.destination_table
|
64
|
+
return nil unless table
|
65
|
+
retrieve_table table.project_id, table.dataset_id, table.table_id
|
66
|
+
end
|
67
|
+
|
68
|
+
##
|
69
|
+
# Checks if the create disposition for the job is `CREATE_IF_NEEDED`,
|
70
|
+
# which provides the following behavior: If the table does not exist,
|
71
|
+
# the copy operation creates the table. This is the default create
|
72
|
+
# disposition for copy jobs.
|
73
|
+
#
|
74
|
+
# @return [Boolean] `true` when `CREATE_IF_NEEDED`, `false` otherwise.
|
75
|
+
#
|
76
|
+
def create_if_needed?
|
77
|
+
disp = @gapi.configuration.copy.create_disposition
|
78
|
+
disp == "CREATE_IF_NEEDED"
|
79
|
+
end
|
80
|
+
|
81
|
+
##
|
82
|
+
# Checks if the create disposition for the job is `CREATE_NEVER`, which
|
83
|
+
# provides the following behavior: The table must already exist; if it
|
84
|
+
# does not, an error is returned in the job result.
|
85
|
+
#
|
86
|
+
# @return [Boolean] `true` when `CREATE_NEVER`, `false` otherwise.
|
87
|
+
#
|
88
|
+
def create_never?
|
89
|
+
disp = @gapi.configuration.copy.create_disposition
|
90
|
+
disp == "CREATE_NEVER"
|
91
|
+
end
|
92
|
+
|
93
|
+
##
|
94
|
+
# Checks if the write disposition for the job is `WRITE_TRUNCATE`, which
|
95
|
+
# provides the following behavior: If the table already exists, the copy
|
96
|
+
# operation overwrites the table data.
|
97
|
+
#
|
98
|
+
# @return [Boolean] `true` when `WRITE_TRUNCATE`, `false` otherwise.
|
99
|
+
#
|
100
|
+
def write_truncate?
|
101
|
+
disp = @gapi.configuration.copy.write_disposition
|
102
|
+
disp == "WRITE_TRUNCATE"
|
103
|
+
end
|
104
|
+
|
105
|
+
##
|
106
|
+
# Checks if the write disposition for the job is `WRITE_APPEND`, which
|
107
|
+
# provides the following behavior: If the table already exists, the copy
|
108
|
+
# operation appends the data to the table.
|
109
|
+
#
|
110
|
+
# @return [Boolean] `true` when `WRITE_APPEND`, `false` otherwise.
|
111
|
+
#
|
112
|
+
def write_append?
|
113
|
+
disp = @gapi.configuration.copy.write_disposition
|
114
|
+
disp == "WRITE_APPEND"
|
115
|
+
end
|
116
|
+
|
117
|
+
##
|
118
|
+
# Checks if the write disposition for the job is `WRITE_EMPTY`, which
|
119
|
+
# provides the following behavior: If the table already exists and
|
120
|
+
# contains data, the job will have an error. This is the default write
|
121
|
+
# disposition for copy jobs.
|
122
|
+
#
|
123
|
+
# @return [Boolean] `true` when `WRITE_EMPTY`, `false` otherwise.
|
124
|
+
#
|
125
|
+
def write_empty?
|
126
|
+
disp = @gapi.configuration.copy.write_disposition
|
127
|
+
disp == "WRITE_EMPTY"
|
128
|
+
end
|
129
|
+
|
130
|
+
##
|
131
|
+
# The encryption configuration of the destination table.
|
132
|
+
#
|
133
|
+
# @return [Google::Cloud::BigQuery::EncryptionConfiguration] Custom
|
134
|
+
# encryption configuration (e.g., Cloud KMS keys).
|
135
|
+
#
|
136
|
+
# @!group Attributes
|
137
|
+
def encryption
|
138
|
+
EncryptionConfiguration.from_gapi @gapi.configuration.copy.destination_encryption_configuration
|
139
|
+
end
|
140
|
+
|
141
|
+
##
|
142
|
+
# Yielded to a block to accumulate changes for an API request.
|
143
|
+
class Updater < CopyJob
|
144
|
+
##
|
145
|
+
# @private Create an Updater object.
|
146
|
+
def initialize gapi
|
147
|
+
@gapi = gapi
|
148
|
+
end
|
149
|
+
|
150
|
+
##
|
151
|
+
# @private Create an Updater from an options hash.
|
152
|
+
#
|
153
|
+
# @return [Google::Cloud::Bigquery::CopyJob::Updater] A job
|
154
|
+
# configuration object for setting copy options.
|
155
|
+
def self.from_options service, source, target, options
|
156
|
+
job_ref = service.job_ref_from options[:job_id], options[:prefix]
|
157
|
+
copy_cfg = Google::Apis::BigqueryV2::JobConfigurationTableCopy.new(
|
158
|
+
source_table: source,
|
159
|
+
destination_table: target
|
160
|
+
)
|
161
|
+
req = Google::Apis::BigqueryV2::Job.new(
|
162
|
+
job_reference: job_ref,
|
163
|
+
configuration: Google::Apis::BigqueryV2::JobConfiguration.new(
|
164
|
+
copy: copy_cfg,
|
165
|
+
dry_run: options[:dryrun]
|
166
|
+
)
|
167
|
+
)
|
168
|
+
|
169
|
+
updater = CopyJob::Updater.new req
|
170
|
+
updater.create = options[:create]
|
171
|
+
updater.write = options[:write]
|
172
|
+
updater.labels = options[:labels] if options[:labels]
|
173
|
+
updater
|
174
|
+
end
|
175
|
+
|
176
|
+
##
|
177
|
+
# Sets the geographic location where the job should run. Required
|
178
|
+
# except for US and EU.
|
179
|
+
#
|
180
|
+
# @param [String] value A geographic location, such as "US", "EU" or
|
181
|
+
# "asia-northeast1". Required except for US and EU.
|
182
|
+
#
|
183
|
+
# @example
|
184
|
+
# require "google/cloud/bigquery"
|
185
|
+
#
|
186
|
+
# bigquery = Google::Cloud::Bigquery.new
|
187
|
+
# dataset = bigquery.dataset "my_dataset"
|
188
|
+
# table = dataset.table "my_table"
|
189
|
+
# destination_table = dataset.table "my_destination_table"
|
190
|
+
#
|
191
|
+
# copy_job = table.copy_job destination_table do |j|
|
192
|
+
# j.location = "EU"
|
193
|
+
# end
|
194
|
+
#
|
195
|
+
# copy_job.wait_until_done!
|
196
|
+
# copy_job.done? #=> true
|
197
|
+
#
|
198
|
+
# @!group Attributes
|
199
|
+
def location= value
|
200
|
+
@gapi.job_reference.location = value
|
201
|
+
return unless value.nil?
|
202
|
+
|
203
|
+
# Treat assigning value of nil the same as unsetting the value.
|
204
|
+
unset = @gapi.job_reference.instance_variables.include? :@location
|
205
|
+
@gapi.job_reference.remove_instance_variable :@location if unset
|
206
|
+
end
|
207
|
+
|
208
|
+
##
|
209
|
+
# Sets the create disposition.
|
210
|
+
#
|
211
|
+
# This specifies whether the job is allowed to create new tables. The
|
212
|
+
# default value is `needed`.
|
213
|
+
#
|
214
|
+
# The following values are supported:
|
215
|
+
#
|
216
|
+
# * `needed` - Create the table if it does not exist.
|
217
|
+
# * `never` - The table must already exist. A 'notFound' error is
|
218
|
+
# raised if the table does not exist.
|
219
|
+
#
|
220
|
+
# @param [String] new_create The new create disposition.
|
221
|
+
#
|
222
|
+
# @!group Attributes
|
223
|
+
def create= new_create
|
224
|
+
@gapi.configuration.copy.update! create_disposition: Convert.create_disposition(new_create)
|
225
|
+
end
|
226
|
+
|
227
|
+
##
|
228
|
+
# Sets the write disposition.
|
229
|
+
#
|
230
|
+
# This specifies how to handle data already present in the table. The
|
231
|
+
# default value is `append`.
|
232
|
+
#
|
233
|
+
# The following values are supported:
|
234
|
+
#
|
235
|
+
# * `truncate` - BigQuery overwrites the table data.
|
236
|
+
# * `append` - BigQuery appends the data to the table.
|
237
|
+
# * `empty` - An error will be returned if the table already contains
|
238
|
+
# data.
|
239
|
+
#
|
240
|
+
# @param [String] new_write The new write disposition.
|
241
|
+
#
|
242
|
+
# @!group Attributes
|
243
|
+
def write= new_write
|
244
|
+
@gapi.configuration.copy.update! write_disposition: Convert.write_disposition(new_write)
|
245
|
+
end
|
246
|
+
|
247
|
+
##
|
248
|
+
# Sets the encryption configuration of the destination table.
|
249
|
+
#
|
250
|
+
# @param [Google::Cloud::BigQuery::EncryptionConfiguration] val
|
251
|
+
# Custom encryption configuration (e.g., Cloud KMS keys).
|
252
|
+
#
|
253
|
+
# @example
|
254
|
+
# require "google/cloud/bigquery"
|
255
|
+
#
|
256
|
+
# bigquery = Google::Cloud::Bigquery.new
|
257
|
+
# dataset = bigquery.dataset "my_dataset"
|
258
|
+
# table = dataset.table "my_table"
|
259
|
+
#
|
260
|
+
# key_name = "projects/a/locations/b/keyRings/c/cryptoKeys/d"
|
261
|
+
# encrypt_config = bigquery.encryption kms_key: key_name
|
262
|
+
# job = table.copy_job "my_dataset.new_table" do |job|
|
263
|
+
# job.encryption = encrypt_config
|
264
|
+
# end
|
265
|
+
#
|
266
|
+
# @!group Attributes
|
267
|
+
def encryption= val
|
268
|
+
@gapi.configuration.copy.update! destination_encryption_configuration: val.to_gapi
|
269
|
+
end
|
270
|
+
|
271
|
+
##
|
272
|
+
# Sets the labels to use for the job.
|
273
|
+
#
|
274
|
+
# @param [Hash] value A hash of user-provided labels associated with
|
275
|
+
# the job. You can use these to organize and group your jobs. Label
|
276
|
+
# keys and values can be no longer than 63 characters, can only
|
277
|
+
# contain lowercase letters, numeric characters, underscores and
|
278
|
+
# dashes. International characters are allowed. Label values are
|
279
|
+
# optional. Label keys must start with a letter and each label in
|
280
|
+
# the list must have a different key.
|
281
|
+
#
|
282
|
+
# @!group Attributes
|
283
|
+
def labels= value
|
284
|
+
@gapi.configuration.update! labels: value
|
285
|
+
end
|
286
|
+
|
287
|
+
def cancel
|
288
|
+
raise "not implemented in #{self.class}"
|
289
|
+
end
|
290
|
+
|
291
|
+
def rerun!
|
292
|
+
raise "not implemented in #{self.class}"
|
293
|
+
end
|
294
|
+
|
295
|
+
def reload!
|
296
|
+
raise "not implemented in #{self.class}"
|
297
|
+
end
|
298
|
+
alias refresh! reload!
|
299
|
+
|
300
|
+
def wait_until_done!
|
301
|
+
raise "not implemented in #{self.class}"
|
302
|
+
end
|
303
|
+
|
304
|
+
##
|
305
|
+
# @private Returns the Google API client library version of this job.
|
306
|
+
#
|
307
|
+
# @return [<Google::Apis::BigqueryV2::Job>] (See
|
308
|
+
# {Google::Apis::BigqueryV2::Job})
|
309
|
+
def to_gapi
|
310
|
+
@gapi
|
311
|
+
end
|
312
|
+
end
|
313
|
+
end
|
314
|
+
end
|
315
|
+
end
|
316
|
+
end
|