google-cloud-bigquery 1.18.0 → 1.21.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,165 @@
1
+ # Copyright 2020 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+
16
+ require "delegate"
17
+
18
+ module Google
19
+ module Cloud
20
+ module Bigquery
21
+ class Routine
22
+ ##
23
+ # Routine::List is a special case Array with additional values.
24
+ class List < DelegateClass(::Array)
25
+ ##
26
+ # If not empty, indicates that there are more records that match
27
+ # the request and this value should be passed to continue.
28
+ attr_accessor :token
29
+
30
+ ##
31
+ # @private Create a new Routine::List with an array of routines.
32
+ def initialize arr = []
33
+ super arr
34
+ end
35
+
36
+ ##
37
+ # Whether there is a next page of routines.
38
+ #
39
+ # @return [Boolean]
40
+ #
41
+ # @example
42
+ # require "google/cloud/bigquery"
43
+ #
44
+ # bigquery = Google::Cloud::Bigquery.new
45
+ # dataset = bigquery.dataset "my_dataset"
46
+ #
47
+ # routines = dataset.routines
48
+ # if routines.next?
49
+ # next_routines = routines.next
50
+ # end
51
+ #
52
+ def next?
53
+ !token.nil?
54
+ end
55
+
56
+ ##
57
+ # Retrieve the next page of routines.
58
+ #
59
+ # @return [Routine::List]
60
+ #
61
+ # @example
62
+ # require "google/cloud/bigquery"
63
+ #
64
+ # bigquery = Google::Cloud::Bigquery.new
65
+ # dataset = bigquery.dataset "my_dataset"
66
+ #
67
+ # routines = dataset.routines
68
+ # if routines.next?
69
+ # next_routines = routines.next
70
+ # end
71
+ #
72
+ def next
73
+ return nil unless next?
74
+ ensure_service!
75
+ gapi = @service.list_routines @dataset_id, token: token, max: @max, filter: @filter
76
+ self.class.from_gapi gapi, @service, @dataset_id, @max, filter: @filter
77
+ end
78
+
79
+ ##
80
+ # Retrieves remaining results by repeatedly invoking {#next} until
81
+ # {#next?} returns `false`. Calls the given block once for each
82
+ # result, which is passed as the argument to the block.
83
+ #
84
+ # An Enumerator is returned if no block is given.
85
+ #
86
+ # This method will make repeated API calls until all remaining results
87
+ # are retrieved. (Unlike `#each`, for example, which merely iterates
88
+ # over the results returned by a single API call.) Use with caution.
89
+ #
90
+ # @param [Integer] request_limit The upper limit of API requests to
91
+ # make to load all routines. Default is no limit.
92
+ # @yield [routine] The block for accessing each routine.
93
+ # @yieldparam [Routine] routine The routine object.
94
+ #
95
+ # @return [Enumerator]
96
+ #
97
+ # @example Iterating each result by passing a block:
98
+ # require "google/cloud/bigquery"
99
+ #
100
+ # bigquery = Google::Cloud::Bigquery.new
101
+ # dataset = bigquery.dataset "my_dataset"
102
+ #
103
+ # dataset.routines.all do |routine|
104
+ # puts routine.routine_id
105
+ # end
106
+ #
107
+ # @example Using the enumerator by not passing a block:
108
+ # require "google/cloud/bigquery"
109
+ #
110
+ # bigquery = Google::Cloud::Bigquery.new
111
+ # dataset = bigquery.dataset "my_dataset"
112
+ #
113
+ # all_names = dataset.routines.all.map do |routine|
114
+ # routine.routine_id
115
+ # end
116
+ #
117
+ # @example Limit the number of API requests made:
118
+ # require "google/cloud/bigquery"
119
+ #
120
+ # bigquery = Google::Cloud::Bigquery.new
121
+ # dataset = bigquery.dataset "my_dataset"
122
+ #
123
+ # dataset.routines.all(request_limit: 10) do |routine|
124
+ # puts routine.routine_id
125
+ # end
126
+ #
127
+ def all request_limit: nil
128
+ request_limit = request_limit.to_i if request_limit
129
+ return enum_for :all, request_limit: request_limit unless block_given?
130
+ results = self
131
+ loop do
132
+ results.each { |r| yield r }
133
+ if request_limit
134
+ request_limit -= 1
135
+ break if request_limit.negative?
136
+ end
137
+ break unless results.next?
138
+ results = results.next
139
+ end
140
+ end
141
+
142
+ ##
143
+ # @private New Routine::List from a response object.
144
+ def self.from_gapi gapi_list, service, dataset_id = nil, max = nil, filter: nil
145
+ routines = List.new(Array(gapi_list.routines).map { |gapi| Routine.from_gapi gapi, service })
146
+ routines.instance_variable_set :@token, gapi_list.next_page_token
147
+ routines.instance_variable_set :@service, service
148
+ routines.instance_variable_set :@dataset_id, dataset_id
149
+ routines.instance_variable_set :@max, max
150
+ routines.instance_variable_set :@filter, filter
151
+ routines
152
+ end
153
+
154
+ protected
155
+
156
+ ##
157
+ # Raise an error unless an active service is available.
158
+ def ensure_service!
159
+ raise "Must have active connection" unless @service
160
+ end
161
+ end
162
+ end
163
+ end
164
+ end
165
+ end
@@ -232,8 +232,8 @@ module Google
232
232
  #
233
233
  # bigquery = Google::Cloud::Bigquery.new
234
234
  # dataset = bigquery.dataset "my_dataset"
235
- # table = dataset.table "my_table" do |table|
236
- # table.schema.load File.read("path/to/schema.json")
235
+ # table = dataset.table "my_table" do |t|
236
+ # t.schema.load File.read("path/to/schema.json")
237
237
  # end
238
238
  #
239
239
  def load source
@@ -78,12 +78,10 @@ module Google
78
78
  ##
79
79
  # Lists all datasets in the specified project to which you have
80
80
  # been granted the READER dataset role.
81
- def list_datasets options = {}
81
+ def list_datasets all: nil, filter: nil, max: nil, token: nil
82
82
  # The list operation is considered idempotent
83
83
  execute backoff: true do
84
- service.list_datasets \
85
- @project, all: options[:all], filter: options[:filter],
86
- max_results: options[:max], page_token: options[:token]
84
+ service.list_datasets @project, all: all, filter: filter, max_results: max, page_token: token
87
85
  end
88
86
  end
89
87
 
@@ -133,10 +131,10 @@ module Google
133
131
  ##
134
132
  # Lists all tables in the specified dataset.
135
133
  # Requires the READER dataset role.
136
- def list_tables dataset_id, options = {}
134
+ def list_tables dataset_id, max: nil, token: nil
137
135
  # The list operation is considered idempotent
138
136
  execute backoff: true do
139
- service.list_tables @project, dataset_id, max_results: options[:max], page_token: options[:token]
137
+ service.list_tables @project, dataset_id, max_results: max, page_token: token
140
138
  end
141
139
  end
142
140
 
@@ -190,38 +188,45 @@ module Google
190
188
 
191
189
  ##
192
190
  # Retrieves data from the table.
193
- def list_tabledata dataset_id, table_id, options = {}
191
+ def list_tabledata dataset_id, table_id, max: nil, token: nil, start: nil
194
192
  # The list operation is considered idempotent
195
193
  execute backoff: true do
196
194
  json_txt = service.list_table_data \
197
195
  @project, dataset_id, table_id,
198
- max_results: options.delete(:max),
199
- page_token: options.delete(:token),
200
- start_index: options.delete(:start),
196
+ max_results: max,
197
+ page_token: token,
198
+ start_index: start,
201
199
  options: { skip_deserialization: true }
202
200
  JSON.parse json_txt, symbolize_names: true
203
201
  end
204
202
  end
205
203
 
206
- def insert_tabledata dataset_id, table_id, rows, options = {}
204
+ def insert_tabledata dataset_id, table_id, rows, insert_ids: nil, ignore_unknown: nil, skip_invalid: nil
207
205
  json_rows = Array(rows).map { |row| Convert.to_json_row row }
208
- insert_tabledata_json_rows dataset_id, table_id, json_rows, options
206
+ insert_tabledata_json_rows dataset_id, table_id, json_rows, insert_ids: insert_ids,
207
+ ignore_unknown: ignore_unknown,
208
+ skip_invalid: skip_invalid
209
209
  end
210
210
 
211
- def insert_tabledata_json_rows dataset_id, table_id, json_rows, options = {}
212
- rows_and_ids = Array(json_rows).zip Array(options[:insert_ids])
211
+ def insert_tabledata_json_rows dataset_id, table_id, json_rows, insert_ids: nil, ignore_unknown: nil,
212
+ skip_invalid: nil
213
+ rows_and_ids = Array(json_rows).zip Array(insert_ids)
213
214
  insert_rows = rows_and_ids.map do |json_row, insert_id|
214
- insert_id ||= SecureRandom.uuid
215
- {
216
- insertId: insert_id,
217
- json: json_row
218
- }
215
+ if insert_id == :skip
216
+ { json: json_row }
217
+ else
218
+ insert_id ||= SecureRandom.uuid
219
+ {
220
+ insertId: insert_id,
221
+ json: json_row
222
+ }
223
+ end
219
224
  end
220
225
 
221
226
  insert_req = {
222
227
  rows: insert_rows,
223
- ignoreUnknownValues: options[:ignore_unknown],
224
- skipInvalidRows: options[:skip_invalid]
228
+ ignoreUnknownValues: ignore_unknown,
229
+ skipInvalidRows: skip_invalid
225
230
  }.to_json
226
231
 
227
232
  # The insertAll with insertId operation is considered idempotent
@@ -281,17 +286,69 @@ module Google
281
286
  execute { service.delete_model @project, dataset_id, model_id }
282
287
  end
283
288
 
289
+ ##
290
+ # Creates a new routine in the dataset.
291
+ def insert_routine dataset_id, new_routine_gapi
292
+ execute { service.insert_routine @project, dataset_id, new_routine_gapi }
293
+ end
294
+
295
+ ##
296
+ # Lists all routines in the specified dataset.
297
+ # Requires the READER dataset role.
298
+ # Unless readMask is set in the request, only the following fields are populated:
299
+ # etag, projectId, datasetId, routineId, routineType, creationTime, lastModifiedTime, and language.
300
+ def list_routines dataset_id, max: nil, token: nil, filter: nil
301
+ # The list operation is considered idempotent
302
+ execute backoff: true do
303
+ service.list_routines @project, dataset_id, max_results: max,
304
+ page_token: token,
305
+ filter: filter
306
+ end
307
+ end
308
+
309
+ ##
310
+ # Gets the specified routine resource by routine ID.
311
+ def get_routine dataset_id, routine_id
312
+ # The get operation is considered idempotent
313
+ execute backoff: true do
314
+ service.get_routine @project, dataset_id, routine_id
315
+ end
316
+ end
317
+
318
+ ##
319
+ # Updates information in an existing routine, replacing the entire routine resource.
320
+ def update_routine dataset_id, routine_id, new_routine_gapi
321
+ update_with_backoff = false
322
+ options = {}
323
+ if new_routine_gapi.etag
324
+ options[:header] = { "If-Match" => new_routine_gapi.etag }
325
+ # The update with etag operation is considered idempotent
326
+ update_with_backoff = true
327
+ end
328
+ execute backoff: update_with_backoff do
329
+ service.update_routine @project, dataset_id, routine_id, new_routine_gapi, options: options
330
+ end
331
+ end
332
+
333
+ ##
334
+ # Deletes the routine specified by routine_id from the dataset.
335
+ def delete_routine dataset_id, routine_id
336
+ execute { service.delete_routine @project, dataset_id, routine_id }
337
+ end
338
+
284
339
  ##
285
340
  # Lists all jobs in the specified project to which you have
286
341
  # been granted the READER job role.
287
- def list_jobs options = {}
342
+ def list_jobs all: nil, token: nil, max: nil, filter: nil, min_created_at: nil, max_created_at: nil,
343
+ parent_job_id: nil
288
344
  # The list operation is considered idempotent
289
- min_creation_time = Convert.time_to_millis options[:min_created_at]
290
- max_creation_time = Convert.time_to_millis options[:max_created_at]
345
+ min_creation_time = Convert.time_to_millis min_created_at
346
+ max_creation_time = Convert.time_to_millis max_created_at
291
347
  execute backoff: true do
292
- service.list_jobs @project, all_users: options[:all], max_results: options[:max],
293
- page_token: options[:token], projection: "full", state_filter: options[:filter],
294
- min_creation_time: min_creation_time, max_creation_time: max_creation_time
348
+ service.list_jobs @project, all_users: all, max_results: max,
349
+ page_token: token, projection: "full", state_filter: filter,
350
+ min_creation_time: min_creation_time, max_creation_time: max_creation_time,
351
+ parent_job_id: parent_job_id
295
352
  end
296
353
  end
297
354
 
@@ -329,15 +386,15 @@ module Google
329
386
 
330
387
  ##
331
388
  # Returns the query data for the job
332
- def job_query_results job_id, options = {}
389
+ def job_query_results job_id, location: nil, max: nil, token: nil, start: nil, timeout: nil
333
390
  # The get operation is considered idempotent
334
391
  execute backoff: true do
335
392
  service.get_job_query_results @project, job_id,
336
- location: options.delete(:location),
337
- max_results: options.delete(:max),
338
- page_token: options.delete(:token),
339
- start_index: options.delete(:start),
340
- timeout_ms: options.delete(:timeout)
393
+ location: location,
394
+ max_results: max,
395
+ page_token: token,
396
+ start_index: start,
397
+ timeout_ms: timeout
341
398
  end
342
399
  end
343
400
 
@@ -405,9 +462,9 @@ module Google
405
462
 
406
463
  ##
407
464
  # Lists all projects to which you have been granted any project role.
408
- def list_projects options = {}
465
+ def list_projects max: nil, token: nil
409
466
  execute backoff: true do
410
- service.list_projects max_results: options[:max], page_token: options[:token]
467
+ service.list_projects max_results: max, page_token: token
411
468
  end
412
469
  end
413
470
 
@@ -485,10 +542,10 @@ module Google
485
542
  sleep delay
486
543
  end
487
544
 
488
- def initialize options = {}
489
- @retries = (options[:retries] || Backoff.retries).to_i
490
- @reasons = (options[:reasons] || Backoff.reasons).to_a
491
- @backoff = options[:backoff] || Backoff.backoff
545
+ def initialize retries: nil, reasons: nil, backoff: nil
546
+ @retries = (retries || Backoff.retries).to_i
547
+ @reasons = (reasons || Backoff.reasons).to_a
548
+ @backoff = backoff || Backoff.backoff
492
549
  end
493
550
 
494
551
  def execute
@@ -18,89 +18,226 @@ module Google
18
18
  module Bigquery
19
19
  ##
20
20
  # BigQuery standard SQL is compliant with the SQL 2011 standard and has
21
- # extensions that support querying nested and repeated data.
21
+ # extensions that support querying nested and repeated data. See {Routine} and {Argument}.
22
+ #
23
+ # @example
24
+ # require "google/cloud/bigquery"
25
+ #
26
+ # bigquery = Google::Cloud::Bigquery.new
27
+ # dataset = bigquery.dataset "my_dataset"
28
+ # routine = dataset.create_routine "my_routine" do |r|
29
+ # r.routine_type = "SCALAR_FUNCTION"
30
+ # r.language = :SQL
31
+ # r.body = "(SELECT SUM(IF(elem.name = \"foo\",elem.val,null)) FROM UNNEST(arr) AS elem)"
32
+ # r.arguments = [
33
+ # Google::Cloud::Bigquery::Argument.new(
34
+ # name: "arr",
35
+ # argument_kind: "FIXED_TYPE",
36
+ # data_type: Google::Cloud::Bigquery::StandardSql::DataType.new(
37
+ # type_kind: "ARRAY",
38
+ # array_element_type: Google::Cloud::Bigquery::StandardSql::DataType.new(
39
+ # type_kind: "STRUCT",
40
+ # struct_type: Google::Cloud::Bigquery::StandardSql::StructType.new(
41
+ # fields: [
42
+ # Google::Cloud::Bigquery::StandardSql::Field.new(
43
+ # name: "name",
44
+ # type: Google::Cloud::Bigquery::StandardSql::DataType.new(type_kind: "STRING")
45
+ # ),
46
+ # Google::Cloud::Bigquery::StandardSql::Field.new(
47
+ # name: "val",
48
+ # type: Google::Cloud::Bigquery::StandardSql::DataType.new(type_kind: "INT64")
49
+ # )
50
+ # ]
51
+ # )
52
+ # )
53
+ # )
54
+ # )
55
+ # ]
56
+ # end
57
+ #
22
58
  module StandardSql
23
59
  ##
24
- # A field or a column.
60
+ # A field or a column. See {Routine} and {Argument}.
61
+ #
62
+ # @example
63
+ # require "google/cloud/bigquery"
64
+ #
65
+ # bigquery = Google::Cloud::Bigquery.new
66
+ # dataset = bigquery.dataset "my_dataset"
67
+ # routine = dataset.create_routine "my_routine" do |r|
68
+ # r.routine_type = "SCALAR_FUNCTION"
69
+ # r.language = :SQL
70
+ # r.body = "(SELECT SUM(IF(elem.name = \"foo\",elem.val,null)) FROM UNNEST(arr) AS elem)"
71
+ # r.arguments = [
72
+ # Google::Cloud::Bigquery::Argument.new(
73
+ # name: "arr",
74
+ # argument_kind: "FIXED_TYPE",
75
+ # data_type: Google::Cloud::Bigquery::StandardSql::DataType.new(
76
+ # type_kind: "ARRAY",
77
+ # array_element_type: Google::Cloud::Bigquery::StandardSql::DataType.new(
78
+ # type_kind: "STRUCT",
79
+ # struct_type: Google::Cloud::Bigquery::StandardSql::StructType.new(
80
+ # fields: [
81
+ # Google::Cloud::Bigquery::StandardSql::Field.new(
82
+ # name: "name",
83
+ # type: Google::Cloud::Bigquery::StandardSql::DataType.new(type_kind: "STRING")
84
+ # ),
85
+ # Google::Cloud::Bigquery::StandardSql::Field.new(
86
+ # name: "val",
87
+ # type: Google::Cloud::Bigquery::StandardSql::DataType.new(type_kind: "INT64")
88
+ # )
89
+ # ]
90
+ # )
91
+ # )
92
+ # )
93
+ # )
94
+ # ]
95
+ # end
96
+ #
25
97
  class Field
26
98
  ##
27
- # @private Create an empty StandardSql::Field object.
28
- def initialize
29
- @gapi_json = nil
99
+ # Creates a new, immutable StandardSql::Field object.
100
+ #
101
+ # @overload initialize(name, type)
102
+ # @param [String] name The name of the field. Optional. Can be absent for struct fields.
103
+ # @param [StandardSql::DataType, String] type The type of the field. Optional. Absent if not explicitly
104
+ # specified (e.g., `CREATE FUNCTION` statement can omit the return type; in this case the output parameter
105
+ # does not have this "type" field).
106
+ #
107
+ def initialize **kwargs
108
+ # Convert client object kwargs to a gapi object
109
+ kwargs[:type] = DataType.gapi_from_string_or_data_type kwargs[:type] if kwargs[:type]
110
+ @gapi = Google::Apis::BigqueryV2::StandardSqlField.new(**kwargs)
30
111
  end
31
112
 
32
113
  ##
33
- # The name of the field. (Can be absent for struct fields.)
114
+ # The name of the field. Optional. Can be absent for struct fields.
34
115
  #
35
116
  # @return [String, nil]
36
117
  #
37
118
  def name
38
- return nil if @gapi_json[:name] == "".freeze
39
-
40
- @gapi_json[:name]
119
+ return if @gapi.name == "".freeze
120
+ @gapi.name
41
121
  end
42
122
 
43
123
  ##
44
- # The type of the field.
124
+ # The type of the field. Optional. Absent if not explicitly specified (e.g., `CREATE FUNCTION` statement can
125
+ # omit the return type; in this case the output parameter does not have this "type" field).
45
126
  #
46
- # @return [DataType]
127
+ # @return [DataType, nil] The type of the field.
47
128
  #
48
129
  def type
49
- DataType.from_gapi_json @gapi_json[:type]
130
+ DataType.from_gapi @gapi.type if @gapi.type
131
+ end
132
+
133
+ ##
134
+ # @private New Google::Apis::BigqueryV2::StandardSqlField object.
135
+ def to_gapi
136
+ @gapi
50
137
  end
51
138
 
52
139
  ##
53
- # @private New StandardSql::Field from a JSON object.
54
- def self.from_gapi_json gapi_json
140
+ # @private New StandardSql::Field from a Google::Apis::BigqueryV2::StandardSqlField object.
141
+ def self.from_gapi gapi
55
142
  new.tap do |f|
56
- f.instance_variable_set :@gapi_json, gapi_json
143
+ f.instance_variable_set :@gapi, gapi
57
144
  end
58
145
  end
59
146
  end
60
147
 
61
148
  ##
62
- # The type of a field or a column.
149
+ # The type of a variable, e.g., a function argument. See {Routine} and {Argument}.
150
+ #
151
+ # @example
152
+ # require "google/cloud/bigquery"
153
+ #
154
+ # bigquery = Google::Cloud::Bigquery.new
155
+ # dataset = bigquery.dataset "my_dataset"
156
+ # routine = dataset.create_routine "my_routine" do |r|
157
+ # r.routine_type = "SCALAR_FUNCTION"
158
+ # r.language = :SQL
159
+ # r.body = "(SELECT SUM(IF(elem.name = \"foo\",elem.val,null)) FROM UNNEST(arr) AS elem)"
160
+ # r.arguments = [
161
+ # Google::Cloud::Bigquery::Argument.new(
162
+ # name: "arr",
163
+ # argument_kind: "FIXED_TYPE",
164
+ # data_type: Google::Cloud::Bigquery::StandardSql::DataType.new(
165
+ # type_kind: "ARRAY",
166
+ # array_element_type: Google::Cloud::Bigquery::StandardSql::DataType.new(
167
+ # type_kind: "STRUCT",
168
+ # struct_type: Google::Cloud::Bigquery::StandardSql::StructType.new(
169
+ # fields: [
170
+ # Google::Cloud::Bigquery::StandardSql::Field.new(
171
+ # name: "name",
172
+ # type: Google::Cloud::Bigquery::StandardSql::DataType.new(type_kind: "STRING")
173
+ # ),
174
+ # Google::Cloud::Bigquery::StandardSql::Field.new(
175
+ # name: "val",
176
+ # type: Google::Cloud::Bigquery::StandardSql::DataType.new(type_kind: "INT64")
177
+ # )
178
+ # ]
179
+ # )
180
+ # )
181
+ # )
182
+ # )
183
+ # ]
184
+ # end
185
+ #
186
+ # @see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types Standard SQL Data Types
187
+ #
63
188
  class DataType
64
189
  ##
65
- # @private Create an empty StandardSql::DataType object.
66
- def initialize
67
- @gapi_json = nil
190
+ # Creates a new, immutable StandardSql::DataType object.
191
+ #
192
+ # @overload initialize(type_kind, array_element_type, struct_type)
193
+ # @param [String] type_kind The top level type of this field. Required. Can be [any standard SQL data
194
+ # type](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types) (e.g., `INT64`, `DATE`,
195
+ # `ARRAY`).
196
+ # @param [DataType, String] array_element_type The type of the array's elements, if {#type_kind} is `ARRAY`.
197
+ # See {#array?}. Optional.
198
+ # @param [StructType] struct_type The fields of the struct, in order, if {#type_kind} is `STRUCT`. See
199
+ # {#struct?}. Optional.
200
+ #
201
+ def initialize **kwargs
202
+ # Convert client object kwargs to a gapi object
203
+ if kwargs[:array_element_type]
204
+ kwargs[:array_element_type] = self.class.gapi_from_string_or_data_type kwargs[:array_element_type]
205
+ end
206
+ kwargs[:struct_type] = kwargs[:struct_type].to_gapi if kwargs[:struct_type]
207
+
208
+ @gapi = Google::Apis::BigqueryV2::StandardSqlDataType.new(**kwargs)
68
209
  end
69
210
 
70
211
  ##
71
- # The top level type of this field.
72
- #
73
- # Can be any standard SQL data type (e.g., "INT64", "DATE", "ARRAY").
212
+ # The top level type of this field. Required. Can be any standard SQL data type (e.g., `INT64`, `DATE`,
213
+ # `ARRAY`).
74
214
  #
75
- # @see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types
76
- # Standard SQL Data Types
215
+ # @see https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types Standard SQL Data Types
77
216
  #
78
- # @return [String]
217
+ # @return [String] The upper case type.
79
218
  #
80
219
  def type_kind
81
- @gapi_json[:typeKind]
220
+ @gapi.type_kind
82
221
  end
83
222
 
84
223
  ##
85
- # The type of a fields when DataType is an Array. (See #array?)
224
+ # The type of the array's elements, if {#type_kind} is `ARRAY`. See {#array?}. Optional.
86
225
  #
87
226
  # @return [DataType, nil]
88
227
  #
89
228
  def array_element_type
90
- return if @gapi_json[:arrayElementType].nil?
91
-
92
- DataType.from_gapi_json @gapi_json[:arrayElementType]
229
+ return if @gapi.array_element_type.nil?
230
+ DataType.from_gapi @gapi.array_element_type
93
231
  end
94
232
 
95
233
  ##
96
- # The fields of the struct. (See #struct?)
234
+ # The fields of the struct, in order, if {#type_kind} is `STRUCT`. See {#struct?}. Optional.
97
235
  #
98
236
  # @return [StructType, nil]
99
237
  #
100
238
  def struct_type
101
- return if @gapi_json[:structType].nil?
102
-
103
- StructType.from_gapi_json @gapi_json[:structType]
239
+ return if @gapi.struct_type.nil?
240
+ StructType.from_gapi @gapi.struct_type
104
241
  end
105
242
 
106
243
  ##
@@ -247,41 +384,108 @@ module Google
247
384
  end
248
385
 
249
386
  ##
250
- # @private New StandardSql::DataType from a JSON object.
251
- def self.from_gapi_json gapi_json
252
- new.tap do |dt|
253
- dt.instance_variable_set :@gapi_json, gapi_json
387
+ # @private New Google::Apis::BigqueryV2::StandardSqlDataType object.
388
+ def to_gapi
389
+ @gapi
390
+ end
391
+
392
+ ##
393
+ # @private New StandardSql::DataType from a Google::Apis::BigqueryV2::StandardSqlDataType object.
394
+ def self.from_gapi gapi
395
+ new.tap do |f|
396
+ f.instance_variable_set :@gapi, gapi
397
+ end
398
+ end
399
+
400
+ ##
401
+ # @private New Google::Apis::BigqueryV2::StandardSqlDataType from a String or StandardSql::DataType object.
402
+ def self.gapi_from_string_or_data_type data_type
403
+ return if data_type.nil?
404
+ if data_type.is_a? StandardSql::DataType
405
+ data_type.to_gapi
406
+ elsif data_type.is_a? Hash
407
+ data_type
408
+ elsif data_type.is_a?(String) || data_type.is_a?(Symbol)
409
+ Google::Apis::BigqueryV2::StandardSqlDataType.new type_kind: data_type.to_s.upcase
410
+ else
411
+ raise ArgumentError, "Unable to convert #{data_type} to Google::Apis::BigqueryV2::StandardSqlDataType"
254
412
  end
255
413
  end
256
414
  end
257
415
 
258
416
  ##
259
- # The type of a `STRUCT` field or a column.
417
+ # The fields of a `STRUCT` type. See {DataType#struct_type}. See {Routine} and {Argument}.
418
+ #
419
+ # @example
420
+ # require "google/cloud/bigquery"
421
+ #
422
+ # bigquery = Google::Cloud::Bigquery.new
423
+ # dataset = bigquery.dataset "my_dataset"
424
+ # routine = dataset.create_routine "my_routine" do |r|
425
+ # r.routine_type = "SCALAR_FUNCTION"
426
+ # r.language = :SQL
427
+ # r.body = "(SELECT SUM(IF(elem.name = \"foo\",elem.val,null)) FROM UNNEST(arr) AS elem)"
428
+ # r.arguments = [
429
+ # Google::Cloud::Bigquery::Argument.new(
430
+ # name: "arr",
431
+ # argument_kind: "FIXED_TYPE",
432
+ # data_type: Google::Cloud::Bigquery::StandardSql::DataType.new(
433
+ # type_kind: "ARRAY",
434
+ # array_element_type: Google::Cloud::Bigquery::StandardSql::DataType.new(
435
+ # type_kind: "STRUCT",
436
+ # struct_type: Google::Cloud::Bigquery::StandardSql::StructType.new(
437
+ # fields: [
438
+ # Google::Cloud::Bigquery::StandardSql::Field.new(
439
+ # name: "name",
440
+ # type: Google::Cloud::Bigquery::StandardSql::DataType.new(type_kind: "STRING")
441
+ # ),
442
+ # Google::Cloud::Bigquery::StandardSql::Field.new(
443
+ # name: "val",
444
+ # type: Google::Cloud::Bigquery::StandardSql::DataType.new(type_kind: "INT64")
445
+ # )
446
+ # ]
447
+ # )
448
+ # )
449
+ # )
450
+ # )
451
+ # ]
452
+ # end
453
+ #
260
454
  class StructType
261
455
  ##
262
- # @private Create an empty StandardSql::DataType object.
263
- def initialize
264
- @gapi_json = nil
456
+ # Creates a new, immutable StandardSql::StructType object.
457
+ #
458
+ # @overload initialize(fields)
459
+ # @param [Array<Field>] fields The fields of the struct. Required.
460
+ #
461
+ def initialize **kwargs
462
+ # Convert each field client object to gapi object, if fields given (self.from_gapi does not pass kwargs)
463
+ kwargs[:fields] = kwargs[:fields]&.map(&:to_gapi) if kwargs[:fields]
464
+ @gapi = Google::Apis::BigqueryV2::StandardSqlStructType.new(**kwargs)
265
465
  end
266
466
 
267
467
  ##
268
- # The top level type of this field.
468
+ # The fields of the struct.
269
469
  #
270
- # Can be any standard SQL data type (e.g., "INT64", "DATE", "ARRAY").
271
- #
272
- # @return [Array<Field>]
470
+ # @return [Array<Field>] A frozen array of fields.
273
471
  #
274
472
  def fields
275
- Array(@gapi_json[:fields]).map do |field_gapi_json|
276
- Field.from_gapi_json field_gapi_json
277
- end
473
+ Array(@gapi.fields).map do |field_gapi|
474
+ Field.from_gapi field_gapi
475
+ end.freeze
476
+ end
477
+
478
+ ##
479
+ # @private New Google::Apis::BigqueryV2::StandardSqlStructType object.
480
+ def to_gapi
481
+ @gapi
278
482
  end
279
483
 
280
484
  ##
281
- # @private New StandardSql::StructType from a JSON object.
282
- def self.from_gapi_json gapi_json
283
- new.tap do |st|
284
- st.instance_variable_set :@gapi_json, gapi_json
485
+ # @private New StandardSql::StructType from a Google::Apis::BigqueryV2::StandardSqlStructType object.
486
+ def self.from_gapi gapi
487
+ new.tap do |f|
488
+ f.instance_variable_set :@gapi, gapi
285
489
  end
286
490
  end
287
491
  end