gcloud 0.11.0 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +8 -8
- data/AUTHENTICATION.md +3 -3
- data/CHANGELOG.md +92 -0
- data/OVERVIEW.md +3 -3
- data/lib/gcloud.rb +75 -25
- data/lib/gcloud/backoff.rb +5 -1
- data/lib/gcloud/bigquery.rb +25 -43
- data/lib/gcloud/bigquery/copy_job.rb +13 -13
- data/lib/gcloud/bigquery/data.rb +20 -16
- data/lib/gcloud/bigquery/dataset.rb +202 -177
- data/lib/gcloud/bigquery/dataset/access.rb +118 -104
- data/lib/gcloud/bigquery/dataset/list.rb +14 -18
- data/lib/gcloud/bigquery/extract_job.rb +12 -12
- data/lib/gcloud/bigquery/insert_response.rb +12 -14
- data/lib/gcloud/bigquery/job.rb +45 -57
- data/lib/gcloud/bigquery/job/list.rb +18 -24
- data/lib/gcloud/bigquery/load_job.rb +35 -27
- data/lib/gcloud/bigquery/project.rb +53 -73
- data/lib/gcloud/bigquery/query_data.rb +28 -35
- data/lib/gcloud/bigquery/query_job.rb +18 -18
- data/lib/gcloud/bigquery/schema.rb +359 -0
- data/lib/gcloud/bigquery/service.rb +506 -0
- data/lib/gcloud/bigquery/table.rb +185 -266
- data/lib/gcloud/bigquery/table/list.rb +15 -19
- data/lib/gcloud/bigquery/view.rb +126 -81
- data/lib/gcloud/datastore.rb +39 -27
- data/lib/gcloud/datastore/commit.rb +2 -2
- data/lib/gcloud/datastore/dataset.rb +8 -19
- data/lib/gcloud/datastore/dataset/lookup_results.rb +2 -4
- data/lib/gcloud/datastore/dataset/query_results.rb +0 -2
- data/lib/gcloud/datastore/entity.rb +7 -1
- data/lib/gcloud/datastore/errors.rb +5 -27
- data/lib/gcloud/datastore/grpc_utils.rb +4 -3
- data/lib/gcloud/datastore/key.rb +6 -0
- data/lib/gcloud/datastore/service.rb +18 -12
- data/lib/gcloud/datastore/transaction.rb +0 -10
- data/lib/gcloud/dns.rb +29 -19
- data/lib/gcloud/dns/change.rb +10 -15
- data/lib/gcloud/dns/change/list.rb +4 -4
- data/lib/gcloud/dns/importer.rb +1 -1
- data/lib/gcloud/dns/project.rb +32 -49
- data/lib/gcloud/dns/record.rb +8 -2
- data/lib/gcloud/dns/record/list.rb +4 -4
- data/lib/gcloud/dns/service.rb +167 -0
- data/lib/gcloud/dns/zone.rb +33 -52
- data/lib/gcloud/dns/zone/list.rb +12 -16
- data/lib/gcloud/errors.rb +31 -19
- data/lib/gcloud/logging.rb +50 -39
- data/lib/gcloud/logging/entry.rb +197 -24
- data/lib/gcloud/logging/entry/list.rb +0 -2
- data/lib/gcloud/logging/logger.rb +1 -1
- data/lib/gcloud/logging/metric.rb +3 -9
- data/lib/gcloud/logging/metric/list.rb +0 -2
- data/lib/gcloud/logging/project.rb +58 -54
- data/lib/gcloud/logging/resource_descriptor.rb +2 -2
- data/lib/gcloud/logging/resource_descriptor/list.rb +0 -2
- data/lib/gcloud/logging/service.rb +32 -23
- data/lib/gcloud/logging/sink.rb +8 -14
- data/lib/gcloud/logging/sink/list.rb +0 -2
- data/lib/gcloud/pubsub.rb +21 -16
- data/lib/gcloud/pubsub/policy.rb +204 -0
- data/lib/gcloud/pubsub/project.rb +26 -38
- data/lib/gcloud/pubsub/service.rb +39 -31
- data/lib/gcloud/pubsub/subscription.rb +56 -59
- data/lib/gcloud/pubsub/subscription/list.rb +4 -4
- data/lib/gcloud/pubsub/topic.rb +69 -66
- data/lib/gcloud/pubsub/topic/list.rb +0 -2
- data/lib/gcloud/pubsub/topic/{batch.rb → publisher.rb} +15 -2
- data/lib/gcloud/resource_manager.rb +27 -26
- data/lib/gcloud/resource_manager/manager.rb +19 -39
- data/lib/gcloud/resource_manager/policy.rb +211 -0
- data/lib/gcloud/resource_manager/project.rb +97 -121
- data/lib/gcloud/resource_manager/project/list.rb +7 -7
- data/lib/gcloud/resource_manager/project/updater.rb +4 -9
- data/lib/gcloud/resource_manager/service.rb +127 -0
- data/lib/gcloud/storage.rb +24 -42
- data/lib/gcloud/storage/bucket.rb +104 -192
- data/lib/gcloud/storage/bucket/acl.rb +47 -143
- data/lib/gcloud/storage/bucket/cors.rb +55 -11
- data/lib/gcloud/storage/bucket/list.rb +14 -14
- data/lib/gcloud/storage/errors.rb +3 -43
- data/lib/gcloud/storage/file.rb +114 -111
- data/lib/gcloud/storage/file/acl.rb +27 -113
- data/lib/gcloud/storage/file/list.rb +21 -21
- data/lib/gcloud/storage/project.rb +49 -59
- data/lib/gcloud/storage/service.rb +347 -0
- data/lib/gcloud/translate.rb +24 -14
- data/lib/gcloud/translate/api.rb +12 -21
- data/lib/gcloud/translate/detection.rb +5 -5
- data/lib/gcloud/translate/language.rb +1 -1
- data/lib/gcloud/translate/service.rb +80 -0
- data/lib/gcloud/translate/translation.rb +6 -6
- data/lib/gcloud/version.rb +1 -1
- data/lib/gcloud/vision.rb +24 -15
- data/lib/gcloud/vision/annotate.rb +24 -21
- data/lib/gcloud/vision/annotation.rb +9 -9
- data/lib/gcloud/vision/annotation/entity.rb +11 -11
- data/lib/gcloud/vision/annotation/face.rb +25 -25
- data/lib/gcloud/vision/annotation/properties.rb +8 -8
- data/lib/gcloud/vision/annotation/safe_search.rb +4 -4
- data/lib/gcloud/vision/annotation/text.rb +7 -7
- data/lib/gcloud/vision/annotation/vertex.rb +1 -1
- data/lib/gcloud/vision/image.rb +11 -11
- data/lib/gcloud/vision/location.rb +5 -2
- data/lib/gcloud/vision/project.rb +14 -16
- data/lib/gcloud/vision/service.rb +66 -0
- data/lib/google/api_client.rb +0 -0
- metadata +27 -24
- data/lib/gcloud/bigquery/connection.rb +0 -624
- data/lib/gcloud/bigquery/errors.rb +0 -68
- data/lib/gcloud/bigquery/table/schema.rb +0 -234
- data/lib/gcloud/dns/connection.rb +0 -173
- data/lib/gcloud/dns/errors.rb +0 -68
- data/lib/gcloud/resource_manager/connection.rb +0 -134
- data/lib/gcloud/resource_manager/errors.rb +0 -68
- data/lib/gcloud/storage/connection.rb +0 -444
- data/lib/gcloud/translate/connection.rb +0 -85
- data/lib/gcloud/translate/errors.rb +0 -68
- data/lib/gcloud/upload.rb +0 -95
- data/lib/gcloud/vision/connection.rb +0 -63
- data/lib/gcloud/vision/errors.rb +0 -69
|
@@ -0,0 +1,359 @@
|
|
|
1
|
+
# Copyright 2015 Google Inc. All rights reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
module Gcloud
|
|
17
|
+
module Bigquery
|
|
18
|
+
##
|
|
19
|
+
# # Table Schema
|
|
20
|
+
#
|
|
21
|
+
# A builder for BigQuery table schemas, passed to block arguments to
|
|
22
|
+
# {Dataset#create_table} and {Table#schema}. Supports nested and
|
|
23
|
+
# repeated fields via a nested block.
|
|
24
|
+
#
|
|
25
|
+
# @see https://cloud.google.com/bigquery/preparing-data-for-bigquery
|
|
26
|
+
# Preparing Data for BigQuery
|
|
27
|
+
#
|
|
28
|
+
# @example
|
|
29
|
+
# require "gcloud"
|
|
30
|
+
#
|
|
31
|
+
# gcloud = Gcloud.new
|
|
32
|
+
# bigquery = gcloud.bigquery
|
|
33
|
+
# dataset = bigquery.dataset "my_dataset"
|
|
34
|
+
# table = dataset.create_table "my_table"
|
|
35
|
+
#
|
|
36
|
+
# table.schema do |schema|
|
|
37
|
+
# schema.string "first_name", mode: :required
|
|
38
|
+
# schema.record "cities_lived", mode: :repeated do |cities_lived|
|
|
39
|
+
# cities_lived.string "place", mode: :required
|
|
40
|
+
# cities_lived.integer "number_of_years", mode: :required
|
|
41
|
+
# end
|
|
42
|
+
# end
|
|
43
|
+
#
|
|
44
|
+
class Schema
|
|
45
|
+
def initialize
|
|
46
|
+
@nested = nil
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def fields
|
|
50
|
+
@fields ||= @gapi.fields.map { |f| Field.from_gapi f }
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def fields= new_fields
|
|
54
|
+
@gapi.fields = Array(new_fields).map(&:to_gapi)
|
|
55
|
+
@fields = @gapi.fields.map { |f| Field.from_gapi f }
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def empty?
|
|
59
|
+
fields.empty?
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
# @private
|
|
63
|
+
def changed?
|
|
64
|
+
return false if frozen?
|
|
65
|
+
check_for_mutated_schema!
|
|
66
|
+
@original_json != @gapi.to_json
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
# @private
|
|
70
|
+
def freeze
|
|
71
|
+
@gapi = @gapi.dup.freeze
|
|
72
|
+
@gapi.fields.freeze
|
|
73
|
+
@fields = @gapi.fields.map { |f| Field.from_gapi(f).freeze }
|
|
74
|
+
@fields.freeze
|
|
75
|
+
super
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
##
|
|
79
|
+
# @private Make sure any changes are saved.
|
|
80
|
+
def check_for_mutated_schema!
|
|
81
|
+
return if frozen?
|
|
82
|
+
return if @gapi.frozen?
|
|
83
|
+
return if @fields.nil?
|
|
84
|
+
gapi_fields = Array(@fields).map(&:to_gapi)
|
|
85
|
+
@gapi.update! fields: gapi_fields
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
# @private
|
|
89
|
+
def self.from_gapi gapi
|
|
90
|
+
gapi ||= Google::Apis::BigqueryV2::TableSchema.new fields: []
|
|
91
|
+
gapi.fields ||= []
|
|
92
|
+
new.tap do |s|
|
|
93
|
+
s.instance_variable_set :@gapi, gapi
|
|
94
|
+
s.instance_variable_set :@original_json, gapi.to_json
|
|
95
|
+
end
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
# @private
|
|
99
|
+
def to_gapi
|
|
100
|
+
check_for_mutated_schema!
|
|
101
|
+
@gapi
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
# @private
|
|
105
|
+
def == other
|
|
106
|
+
return false unless other.is_a? Schema
|
|
107
|
+
to_gapi.to_h == other.to_gapi.to_h
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
##
|
|
111
|
+
# Adds a string field to the schema.
|
|
112
|
+
#
|
|
113
|
+
# @param [String] name The field name. The name must contain only
|
|
114
|
+
# letters (a-z, A-Z), numbers (0-9), or underscores (_), and must
|
|
115
|
+
# start with a letter or underscore. The maximum length is 128
|
|
116
|
+
# characters.
|
|
117
|
+
# @param [String] description A description of the field.
|
|
118
|
+
# @param [Symbol] mode The field's mode. The possible values are
|
|
119
|
+
# `:nullable`, `:required`, and `:repeated`. The default value is
|
|
120
|
+
# `:nullable`.
|
|
121
|
+
def string name, description: nil, mode: :nullable
|
|
122
|
+
add_field name, :string, nil, description: description, mode: mode
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
##
|
|
126
|
+
# Adds an integer field to the schema.
|
|
127
|
+
#
|
|
128
|
+
# @param [String] name The field name. The name must contain only
|
|
129
|
+
# letters (a-z, A-Z), numbers (0-9), or underscores (_), and must
|
|
130
|
+
# start with a letter or underscore. The maximum length is 128
|
|
131
|
+
# characters.
|
|
132
|
+
# @param [String] description A description of the field.
|
|
133
|
+
# @param [Symbol] mode The field's mode. The possible values are
|
|
134
|
+
# `:nullable`, `:required`, and `:repeated`. The default value is
|
|
135
|
+
# `:nullable`.
|
|
136
|
+
def integer name, description: nil, mode: :nullable
|
|
137
|
+
add_field name, :integer, nil, description: description, mode: mode
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
##
|
|
141
|
+
# Adds a floating-point number field to the schema.
|
|
142
|
+
#
|
|
143
|
+
# @param [String] name The field name. The name must contain only
|
|
144
|
+
# letters (a-z, A-Z), numbers (0-9), or underscores (_), and must
|
|
145
|
+
# start with a letter or underscore. The maximum length is 128
|
|
146
|
+
# characters.
|
|
147
|
+
# @param [String] description A description of the field.
|
|
148
|
+
# @param [Symbol] mode The field's mode. The possible values are
|
|
149
|
+
# `:nullable`, `:required`, and `:repeated`. The default value is
|
|
150
|
+
# `:nullable`.
|
|
151
|
+
def float name, description: nil, mode: :nullable
|
|
152
|
+
add_field name, :float, nil, description: description, mode: mode
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
##
|
|
156
|
+
# Adds a boolean field to the schema.
|
|
157
|
+
#
|
|
158
|
+
# @param [String] name The field name. The name must contain only
|
|
159
|
+
# letters (a-z, A-Z), numbers (0-9), or underscores (_), and must
|
|
160
|
+
# start with a letter or underscore. The maximum length is 128
|
|
161
|
+
# characters.
|
|
162
|
+
# @param [String] description A description of the field.
|
|
163
|
+
# @param [Symbol] mode The field's mode. The possible values are
|
|
164
|
+
# `:nullable`, `:required`, and `:repeated`. The default value is
|
|
165
|
+
# `:nullable`.
|
|
166
|
+
def boolean name, description: nil, mode: :nullable
|
|
167
|
+
add_field name, :boolean, nil, description: description, mode: mode
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
##
|
|
171
|
+
# Adds a timestamp field to the schema.
|
|
172
|
+
#
|
|
173
|
+
# @param [String] name The field name. The name must contain only
|
|
174
|
+
# letters (a-z, A-Z), numbers (0-9), or underscores (_), and must
|
|
175
|
+
# start with a letter or underscore. The maximum length is 128
|
|
176
|
+
# characters.
|
|
177
|
+
# @param [String] description A description of the field.
|
|
178
|
+
# @param [Symbol] mode The field's mode. The possible values are
|
|
179
|
+
# `:nullable`, `:required`, and `:repeated`. The default value is
|
|
180
|
+
# `:nullable`.
|
|
181
|
+
def timestamp name, description: nil, mode: :nullable
|
|
182
|
+
add_field name, :timestamp, nil, description: description, mode: mode
|
|
183
|
+
end
|
|
184
|
+
|
|
185
|
+
##
|
|
186
|
+
# Adds a record field to the schema. A block must be passed describing
|
|
187
|
+
# the nested fields of the record. For more information about nested
|
|
188
|
+
# and repeated records, see [Preparing Data for BigQuery
|
|
189
|
+
# ](https://cloud.google.com/bigquery/preparing-data-for-bigquery).
|
|
190
|
+
#
|
|
191
|
+
# @param [String] name The field name. The name must contain only
|
|
192
|
+
# letters (a-z, A-Z), numbers (0-9), or underscores (_), and must
|
|
193
|
+
# start with a letter or underscore. The maximum length is 128
|
|
194
|
+
# characters.
|
|
195
|
+
# @param [String] description A description of the field.
|
|
196
|
+
# @param [Symbol] mode The field's mode. The possible values are
|
|
197
|
+
# `:nullable`, `:required`, and `:repeated`. The default value is
|
|
198
|
+
# `:nullable`.
|
|
199
|
+
# @yield [nested_schema] a block for setting the nested schema
|
|
200
|
+
# @yieldparam [Table::Schema] nested_schema the object accepting the
|
|
201
|
+
# nested schema
|
|
202
|
+
#
|
|
203
|
+
# @example
|
|
204
|
+
# require "gcloud"
|
|
205
|
+
#
|
|
206
|
+
# gcloud = Gcloud.new
|
|
207
|
+
# bigquery = gcloud.bigquery
|
|
208
|
+
# dataset = bigquery.dataset "my_dataset"
|
|
209
|
+
# table = dataset.create_table "my_table"
|
|
210
|
+
#
|
|
211
|
+
# table.schema do |schema|
|
|
212
|
+
# schema.string "first_name", mode: :required
|
|
213
|
+
# schema.record "cities_lived", mode: :repeated do |cities_lived|
|
|
214
|
+
# cities_lived.string "place", mode: :required
|
|
215
|
+
# cities_lived.integer "number_of_years", mode: :required
|
|
216
|
+
# end
|
|
217
|
+
# end
|
|
218
|
+
#
|
|
219
|
+
def record name, description: nil, mode: nil
|
|
220
|
+
fail ArgumentError, "nested RECORD type is not permitted" if @nested
|
|
221
|
+
fail ArgumentError, "a block is required" unless block_given?
|
|
222
|
+
empty_schema = Google::Apis::BigqueryV2::TableSchema.new fields: []
|
|
223
|
+
nested_schema = self.class.from_gapi(empty_schema).tap do |s|
|
|
224
|
+
s.instance_variable_set :@nested, true
|
|
225
|
+
end
|
|
226
|
+
yield nested_schema
|
|
227
|
+
add_field name, :record, nested_schema.fields,
|
|
228
|
+
description: description, mode: mode
|
|
229
|
+
end
|
|
230
|
+
|
|
231
|
+
protected
|
|
232
|
+
|
|
233
|
+
def add_field name, type, nested_fields, description: nil,
|
|
234
|
+
mode: :nullable
|
|
235
|
+
# Remove any existing field of this name
|
|
236
|
+
fields.reject! { |f| f.name == name }
|
|
237
|
+
fields << Field.new(name, type, description: description,
|
|
238
|
+
mode: mode, fields: nested_fields)
|
|
239
|
+
end
|
|
240
|
+
|
|
241
|
+
class Field
|
|
242
|
+
# @private
|
|
243
|
+
MODES = %w( NULLABLE REQUIRED REPEATED )
|
|
244
|
+
|
|
245
|
+
# @private
|
|
246
|
+
TYPES = %w( STRING INTEGER FLOAT BOOLEAN TIMESTAMP RECORD )
|
|
247
|
+
|
|
248
|
+
def initialize name, type, description: nil,
|
|
249
|
+
mode: :nullable, fields: nil
|
|
250
|
+
@gapi = Google::Apis::BigqueryV2::TableFieldSchema.new
|
|
251
|
+
@gapi.update! name: name
|
|
252
|
+
@gapi.update! type: verify_type(type)
|
|
253
|
+
@gapi.update! description: description if description
|
|
254
|
+
@gapi.update! mode: verify_mode(mode) if mode
|
|
255
|
+
if fields
|
|
256
|
+
@fields = fields
|
|
257
|
+
check_for_changed_fields!
|
|
258
|
+
end
|
|
259
|
+
@original_json = @gapi.to_json
|
|
260
|
+
end
|
|
261
|
+
|
|
262
|
+
def name
|
|
263
|
+
@gapi.name
|
|
264
|
+
end
|
|
265
|
+
|
|
266
|
+
def name= new_name
|
|
267
|
+
@gapi.update! name: new_name
|
|
268
|
+
end
|
|
269
|
+
|
|
270
|
+
def type
|
|
271
|
+
@gapi.type
|
|
272
|
+
end
|
|
273
|
+
|
|
274
|
+
def type= new_type
|
|
275
|
+
@gapi.update! type: verify_type(new_type)
|
|
276
|
+
end
|
|
277
|
+
|
|
278
|
+
def description
|
|
279
|
+
@gapi.description
|
|
280
|
+
end
|
|
281
|
+
|
|
282
|
+
def description= new_description
|
|
283
|
+
@gapi.update! description: new_description
|
|
284
|
+
end
|
|
285
|
+
|
|
286
|
+
def mode
|
|
287
|
+
@gapi.mode
|
|
288
|
+
end
|
|
289
|
+
|
|
290
|
+
def mode= new_mode
|
|
291
|
+
@gapi.update! mode: verify_mode(new_mode)
|
|
292
|
+
end
|
|
293
|
+
|
|
294
|
+
def fields
|
|
295
|
+
@fields ||= Array(@gapi.fields).map { |f| Field.from_gapi f }
|
|
296
|
+
end
|
|
297
|
+
|
|
298
|
+
def fields= new_fields
|
|
299
|
+
@fields = new_fields
|
|
300
|
+
end
|
|
301
|
+
|
|
302
|
+
##
|
|
303
|
+
# @private Make sure any fields are saved.
|
|
304
|
+
def check_for_changed_fields!
|
|
305
|
+
return if frozen?
|
|
306
|
+
fields.each(&:check_for_changed_fields!)
|
|
307
|
+
gapi_fields = Array(fields).map(&:to_gapi)
|
|
308
|
+
gapi_fields = nil if gapi_fields.empty?
|
|
309
|
+
@gapi.update! fields: gapi_fields
|
|
310
|
+
end
|
|
311
|
+
|
|
312
|
+
# @private
|
|
313
|
+
def changed?
|
|
314
|
+
@original_json == to_gapi.to_json
|
|
315
|
+
end
|
|
316
|
+
|
|
317
|
+
# @private
|
|
318
|
+
def self.from_gapi gapi
|
|
319
|
+
new("to-be-replaced", "STRING").tap do |f|
|
|
320
|
+
f.instance_variable_set :@gapi, gapi
|
|
321
|
+
f.instance_variable_set :@original_json, gapi.to_json
|
|
322
|
+
end
|
|
323
|
+
end
|
|
324
|
+
|
|
325
|
+
# @private
|
|
326
|
+
def to_gapi
|
|
327
|
+
# make sure any changes are saved.
|
|
328
|
+
check_for_changed_fields!
|
|
329
|
+
@gapi
|
|
330
|
+
end
|
|
331
|
+
|
|
332
|
+
# @private
|
|
333
|
+
def == other
|
|
334
|
+
return false unless other.is_a? Field
|
|
335
|
+
to_gapi.to_h == other.to_gapi.to_h
|
|
336
|
+
end
|
|
337
|
+
|
|
338
|
+
protected
|
|
339
|
+
|
|
340
|
+
def verify_type type
|
|
341
|
+
upcase_type = type.to_s.upcase
|
|
342
|
+
unless TYPES.include? upcase_type
|
|
343
|
+
fail ArgumentError,
|
|
344
|
+
"Type '#{upcase_type}' not found in #{TYPES.inspect}"
|
|
345
|
+
end
|
|
346
|
+
upcase_type
|
|
347
|
+
end
|
|
348
|
+
|
|
349
|
+
def verify_mode mode
|
|
350
|
+
upcase_mode = mode.to_s.upcase
|
|
351
|
+
unless MODES.include? upcase_mode
|
|
352
|
+
fail ArgumentError "Unable to determine mode for '#{mode}'"
|
|
353
|
+
end
|
|
354
|
+
upcase_mode
|
|
355
|
+
end
|
|
356
|
+
end
|
|
357
|
+
end
|
|
358
|
+
end
|
|
359
|
+
end
|
|
@@ -0,0 +1,506 @@
|
|
|
1
|
+
# Copyright 2015 Google Inc. All rights reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
require "gcloud/version"
|
|
17
|
+
require "gcloud/errors"
|
|
18
|
+
require "google/apis/bigquery_v2"
|
|
19
|
+
require "pathname"
|
|
20
|
+
require "digest/md5"
|
|
21
|
+
require "mime/types"
|
|
22
|
+
|
|
23
|
+
module Gcloud
|
|
24
|
+
module Bigquery
|
|
25
|
+
##
|
|
26
|
+
# @private Represents the Bigquery service and API calls.
|
|
27
|
+
class Service
|
|
28
|
+
##
|
|
29
|
+
# Alias to the Google Client API module
|
|
30
|
+
API = Google::Apis::BigqueryV2
|
|
31
|
+
|
|
32
|
+
# @private
|
|
33
|
+
attr_accessor :project
|
|
34
|
+
|
|
35
|
+
# @private
|
|
36
|
+
attr_accessor :credentials
|
|
37
|
+
|
|
38
|
+
##
|
|
39
|
+
# Creates a new Service instance.
|
|
40
|
+
def initialize project, credentials, retries: nil, timeout: nil
|
|
41
|
+
@project = project
|
|
42
|
+
@credentials = credentials
|
|
43
|
+
@credentials = credentials
|
|
44
|
+
@service = API::BigqueryService.new
|
|
45
|
+
@service.client_options.application_name = "gcloud-ruby"
|
|
46
|
+
@service.client_options.application_version = Gcloud::VERSION
|
|
47
|
+
@service.request_options.retries = retries || 3
|
|
48
|
+
@service.request_options.timeout_sec = timeout if timeout
|
|
49
|
+
@service.authorization = @credentials.client
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
def service
|
|
53
|
+
return mocked_service if mocked_service
|
|
54
|
+
@service
|
|
55
|
+
end
|
|
56
|
+
attr_accessor :mocked_service
|
|
57
|
+
|
|
58
|
+
##
|
|
59
|
+
# Lists all datasets in the specified project to which you have
|
|
60
|
+
# been granted the READER dataset role.
|
|
61
|
+
def list_datasets options = {}
|
|
62
|
+
service.list_datasets \
|
|
63
|
+
@project, all: options[:all], max_results: options[:max],
|
|
64
|
+
page_token: options[:token]
|
|
65
|
+
rescue Google::Apis::Error => e
|
|
66
|
+
raise Gcloud::Error.from_error(e)
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
##
|
|
70
|
+
# Returns the dataset specified by datasetID.
|
|
71
|
+
def get_dataset dataset_id
|
|
72
|
+
service.get_dataset @project, dataset_id
|
|
73
|
+
rescue Google::Apis::Error => e
|
|
74
|
+
raise Gcloud::Error.from_error(e)
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
##
|
|
78
|
+
# Creates a new empty dataset.
|
|
79
|
+
def insert_dataset new_dataset_gapi
|
|
80
|
+
service.insert_dataset @project, new_dataset_gapi
|
|
81
|
+
rescue Google::Apis::Error => e
|
|
82
|
+
raise Gcloud::Error.from_error(e)
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
##
|
|
86
|
+
# Updates information in an existing dataset, only replacing
|
|
87
|
+
# fields that are provided in the submitted dataset resource.
|
|
88
|
+
def patch_dataset dataset_id, patched_dataset_gapi
|
|
89
|
+
service.patch_dataset @project, dataset_id, patched_dataset_gapi
|
|
90
|
+
rescue Google::Apis::Error => e
|
|
91
|
+
raise Gcloud::Error.from_error(e)
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
##
|
|
95
|
+
# Deletes the dataset specified by the datasetId value.
|
|
96
|
+
# Before you can delete a dataset, you must delete all its tables,
|
|
97
|
+
# either manually or by specifying force: true in options.
|
|
98
|
+
# Immediately after deletion, you can create another dataset with
|
|
99
|
+
# the same name.
|
|
100
|
+
def delete_dataset dataset_id, force = nil
|
|
101
|
+
service.delete_dataset @project, dataset_id, delete_contents: force
|
|
102
|
+
rescue Google::Apis::Error => e
|
|
103
|
+
raise Gcloud::Error.from_error(e)
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
##
|
|
107
|
+
# Lists all tables in the specified dataset.
|
|
108
|
+
# Requires the READER dataset role.
|
|
109
|
+
def list_tables dataset_id, options = {}
|
|
110
|
+
service.list_tables @project, dataset_id, max_results: options[:max],
|
|
111
|
+
page_token: options[:token]
|
|
112
|
+
rescue Google::Apis::Error => e
|
|
113
|
+
raise Gcloud::Error.from_error(e)
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
def get_project_table project_id, dataset_id, table_id
|
|
117
|
+
service.get_table project_id, dataset_id, table_id
|
|
118
|
+
rescue Google::Apis::Error => e
|
|
119
|
+
raise Gcloud::Error.from_error(e)
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
##
|
|
123
|
+
# Gets the specified table resource by table ID.
|
|
124
|
+
# This method does not return the data in the table,
|
|
125
|
+
# it only returns the table resource,
|
|
126
|
+
# which describes the structure of this table.
|
|
127
|
+
def get_table dataset_id, table_id
|
|
128
|
+
get_project_table @project, dataset_id, table_id
|
|
129
|
+
rescue Google::Apis::Error => e
|
|
130
|
+
raise Gcloud::Error.from_error(e)
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
##
|
|
134
|
+
# Creates a new, empty table in the dataset.
|
|
135
|
+
def insert_table dataset_id, new_table_gapi
|
|
136
|
+
service.insert_table @project, dataset_id, new_table_gapi
|
|
137
|
+
rescue Google::Apis::Error => e
|
|
138
|
+
raise Gcloud::Error.from_error(e)
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
##
|
|
142
|
+
# Updates information in an existing table, replacing fields that
|
|
143
|
+
# are provided in the submitted table resource.
|
|
144
|
+
def patch_table dataset_id, table_id, patched_table_gapi
|
|
145
|
+
service.patch_table @project, dataset_id, table_id, patched_table_gapi
|
|
146
|
+
rescue Google::Apis::Error => e
|
|
147
|
+
raise Gcloud::Error.from_error(e)
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
##
|
|
151
|
+
# Deletes the table specified by tableId from the dataset.
|
|
152
|
+
# If the table contains data, all the data will be deleted.
|
|
153
|
+
def delete_table dataset_id, table_id
|
|
154
|
+
service.delete_table @project, dataset_id, table_id
|
|
155
|
+
rescue Google::Apis::Error => e
|
|
156
|
+
raise Gcloud::Error.from_error(e)
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
##
|
|
160
|
+
# Retrieves data from the table.
|
|
161
|
+
def list_tabledata dataset_id, table_id, options = {}
|
|
162
|
+
service.list_table_data @project, dataset_id, table_id,
|
|
163
|
+
max_results: options.delete(:max),
|
|
164
|
+
page_token: options.delete(:token),
|
|
165
|
+
start_index: options.delete(:start)
|
|
166
|
+
rescue Google::Apis::Error => e
|
|
167
|
+
raise Gcloud::Error.from_error(e)
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
def insert_tabledata dataset_id, table_id, rows, options = {}
|
|
171
|
+
insert_rows = Array(rows).map do |row|
|
|
172
|
+
Google::Apis::BigqueryV2::InsertAllTableDataRequest::Row.new(
|
|
173
|
+
insert_id: Digest::MD5.base64digest(row.inspect),
|
|
174
|
+
# Hash[row.map{|(k,v)| [k.to_s,v]}] for Hash<String,Object>
|
|
175
|
+
json: row
|
|
176
|
+
)
|
|
177
|
+
end
|
|
178
|
+
insert_req = Google::Apis::BigqueryV2::InsertAllTableDataRequest.new(
|
|
179
|
+
rows: insert_rows,
|
|
180
|
+
ignore_unknown_values: options[:ignore_unknown],
|
|
181
|
+
skip_invalid_rows: options[:skip_invalid]
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
service.insert_all_table_data @project, dataset_id, table_id, insert_req
|
|
185
|
+
rescue Google::Apis::Error => e
|
|
186
|
+
raise Gcloud::Error.from_error(e)
|
|
187
|
+
end
|
|
188
|
+
|
|
189
|
+
##
|
|
190
|
+
# Lists all jobs in the specified project to which you have
|
|
191
|
+
# been granted the READER job role.
|
|
192
|
+
def list_jobs options = {}
|
|
193
|
+
service.list_jobs \
|
|
194
|
+
@project, all_users: options[:all], max_results: options[:max],
|
|
195
|
+
page_token: options[:token], projection: "full",
|
|
196
|
+
state_filter: options[:filter]
|
|
197
|
+
rescue Google::Apis::Error => e
|
|
198
|
+
raise Gcloud::Error.from_error(e)
|
|
199
|
+
end
|
|
200
|
+
|
|
201
|
+
##
|
|
202
|
+
# Returns the job specified by jobID.
|
|
203
|
+
def get_job job_id
|
|
204
|
+
service.get_job @project, job_id
|
|
205
|
+
rescue Google::Apis::Error => e
|
|
206
|
+
raise Gcloud::Error.from_error(e)
|
|
207
|
+
end
|
|
208
|
+
|
|
209
|
+
def insert_job config
|
|
210
|
+
job_object = API::Job.new(
|
|
211
|
+
configuration: config
|
|
212
|
+
)
|
|
213
|
+
service.insert_job @project, job_object
|
|
214
|
+
rescue Google::Apis::Error => e
|
|
215
|
+
raise Gcloud::Error.from_error(e)
|
|
216
|
+
end
|
|
217
|
+
|
|
218
|
+
def query_job query, options = {}
|
|
219
|
+
config = query_table_config(query, options)
|
|
220
|
+
service.insert_job @project, config
|
|
221
|
+
rescue Google::Apis::Error => e
|
|
222
|
+
raise Gcloud::Error.from_error(e)
|
|
223
|
+
end
|
|
224
|
+
|
|
225
|
+
def query query, options = {}
|
|
226
|
+
service.query_job @project, query_config(query, options)
|
|
227
|
+
rescue Google::Apis::Error => e
|
|
228
|
+
raise Gcloud::Error.from_error(e)
|
|
229
|
+
end
|
|
230
|
+
|
|
231
|
+
##
|
|
232
|
+
# Returns the query data for the job
|
|
233
|
+
def job_query_results job_id, options = {}
|
|
234
|
+
service.get_job_query_results @project,
|
|
235
|
+
job_id,
|
|
236
|
+
max_results: options.delete(:max),
|
|
237
|
+
page_token: options.delete(:token),
|
|
238
|
+
start_index: options.delete(:start),
|
|
239
|
+
timeout_ms: options.delete(:timeout)
|
|
240
|
+
rescue Google::Apis::Error => e
|
|
241
|
+
raise Gcloud::Error.from_error(e)
|
|
242
|
+
end
|
|
243
|
+
|
|
244
|
+
def copy_table source, target, options = {}
|
|
245
|
+
service.insert_job @project, copy_table_config(source, target, options)
|
|
246
|
+
rescue Google::Apis::Error => e
|
|
247
|
+
raise Gcloud::Error.from_error(e)
|
|
248
|
+
end
|
|
249
|
+
|
|
250
|
+
def extract_table table, storage_files, options = {}
|
|
251
|
+
service.insert_job \
|
|
252
|
+
@project, extract_table_config(table, storage_files, options)
|
|
253
|
+
rescue Google::Apis::Error => e
|
|
254
|
+
raise Gcloud::Error.from_error(e)
|
|
255
|
+
end
|
|
256
|
+
|
|
257
|
+
def load_table_gs_url dataset_id, table_id, url, options = {}
|
|
258
|
+
service.insert_job \
|
|
259
|
+
@project, load_table_url_config(dataset_id, table_id, url, options)
|
|
260
|
+
rescue Google::Apis::Error => e
|
|
261
|
+
raise Gcloud::Error.from_error(e)
|
|
262
|
+
end
|
|
263
|
+
|
|
264
|
+
def load_table_file dataset_id, table_id, file, options = {}
|
|
265
|
+
service.insert_job \
|
|
266
|
+
@project, load_table_file_config(dataset_id, table_id, file, options),
|
|
267
|
+
upload_source: file, content_type: mime_type_for(file)
|
|
268
|
+
rescue Google::Apis::Error => e
|
|
269
|
+
raise Gcloud::Error.from_error(e)
|
|
270
|
+
end
|
|
271
|
+
|
|
272
|
+
##
|
|
273
|
+
# Extracts at least `tbl` group, and possibly `dts` and `prj` groups,
|
|
274
|
+
# from strings in the formats: "my_table", "my_dataset.my_table", or
|
|
275
|
+
# "my-project:my_dataset.my_table". Then merges project_id and
|
|
276
|
+
# dataset_id from the default table if they are missing.
|
|
277
|
+
def self.table_ref_from_s str, default_table_ref
|
|
278
|
+
str = str.to_s
|
|
279
|
+
m = /\A(((?<prj>\S*):)?(?<dts>\S*)\.)?(?<tbl>\S*)\z/.match str
|
|
280
|
+
unless m
|
|
281
|
+
fail ArgumentError, "unable to identify table from #{str.inspect}"
|
|
282
|
+
end
|
|
283
|
+
str_table_ref_hash = {
|
|
284
|
+
project_id: m["prj"],
|
|
285
|
+
dataset_id: m["dts"],
|
|
286
|
+
table_id: m["tbl"]
|
|
287
|
+
}.delete_if { |_, v| v.nil? }
|
|
288
|
+
new_table_ref_hash = default_table_ref.to_h.merge str_table_ref_hash
|
|
289
|
+
Google::Apis::BigqueryV2::TableReference.new new_table_ref_hash
|
|
290
|
+
end
|
|
291
|
+
|
|
292
|
+
def inspect
|
|
293
|
+
"#{self.class}(#{@project})"
|
|
294
|
+
end
|
|
295
|
+
|
|
296
|
+
protected
|
|
297
|
+
|
|
298
|
+
def table_ref_from tbl
|
|
299
|
+
return nil if tbl.nil?
|
|
300
|
+
API::TableReference.new(
|
|
301
|
+
project_id: tbl.project_id,
|
|
302
|
+
dataset_id: tbl.dataset_id,
|
|
303
|
+
table_id: tbl.table_id
|
|
304
|
+
)
|
|
305
|
+
end
|
|
306
|
+
|
|
307
|
+
def dataset_ref_from dts, pjt = nil
|
|
308
|
+
return nil if dts.nil?
|
|
309
|
+
if dts.respond_to? :dataset_id
|
|
310
|
+
API::DatasetReference.new(
|
|
311
|
+
project_id: (pjt || dts.project_id || @project),
|
|
312
|
+
dataset_id: dts.dataset_id
|
|
313
|
+
)
|
|
314
|
+
else
|
|
315
|
+
API::DatasetReference.new(
|
|
316
|
+
project_id: (pjt || @project),
|
|
317
|
+
dataset_id: dts
|
|
318
|
+
)
|
|
319
|
+
end
|
|
320
|
+
end
|
|
321
|
+
|
|
322
|
+
def load_table_file_opts dataset_id, table_id, file, options = {}
|
|
323
|
+
path = Pathname(file).to_path
|
|
324
|
+
{
|
|
325
|
+
destination_table: Google::Apis::BigqueryV2::TableReference.new(
|
|
326
|
+
project_id: @project, dataset_id: dataset_id, table_id: table_id),
|
|
327
|
+
create_disposition: create_disposition(options[:create]),
|
|
328
|
+
write_disposition: write_disposition(options[:write]),
|
|
329
|
+
source_format: source_format(path, options[:format]),
|
|
330
|
+
projection_fields: projection_fields(options[:projection_fields]),
|
|
331
|
+
allow_jagged_rows: options[:jagged_rows],
|
|
332
|
+
allow_quoted_newlines: options[:quoted_newlines],
|
|
333
|
+
encoding: options[:encoding], field_delimiter: options[:delimiter],
|
|
334
|
+
ignore_unknown_values: options[:ignore_unknown],
|
|
335
|
+
max_bad_records: options[:max_bad_records], quote: options[:quote],
|
|
336
|
+
schema: options[:schema], skip_leading_rows: options[:skip_leading]
|
|
337
|
+
}.delete_if { |_, v| v.nil? }
|
|
338
|
+
end
|
|
339
|
+
|
|
340
|
+
def load_table_file_config dataset_id, table_id, file, options = {}
|
|
341
|
+
load_opts = load_table_file_opts dataset_id, table_id, file, options
|
|
342
|
+
API::Job.new(
|
|
343
|
+
configuration: API::JobConfiguration.new(
|
|
344
|
+
load: API::JobConfigurationLoad.new(load_opts),
|
|
345
|
+
dry_run: options[:dryrun]
|
|
346
|
+
)
|
|
347
|
+
)
|
|
348
|
+
end
|
|
349
|
+
|
|
350
|
+
def load_table_url_opts dataset_id, table_id, url, options = {}
|
|
351
|
+
{
|
|
352
|
+
destination_table: Google::Apis::BigqueryV2::TableReference.new(
|
|
353
|
+
project_id: @project, dataset_id: dataset_id, table_id: table_id),
|
|
354
|
+
source_uris: Array(url),
|
|
355
|
+
create_disposition: create_disposition(options[:create]),
|
|
356
|
+
write_disposition: write_disposition(options[:write]),
|
|
357
|
+
source_format: source_format(url, options[:format]),
|
|
358
|
+
projection_fields: projection_fields(options[:projection_fields]),
|
|
359
|
+
allow_jagged_rows: options[:jagged_rows],
|
|
360
|
+
allow_quoted_newlines: options[:quoted_newlines],
|
|
361
|
+
encoding: options[:encoding], field_delimiter: options[:delimiter],
|
|
362
|
+
ignore_unknown_values: options[:ignore_unknown],
|
|
363
|
+
max_bad_records: options[:max_bad_records], quote: options[:quote],
|
|
364
|
+
schema: options[:schema], skip_leading_rows: options[:skip_leading]
|
|
365
|
+
}.delete_if { |_, v| v.nil? }
|
|
366
|
+
end
|
|
367
|
+
|
|
368
|
+
def load_table_url_config dataset_id, table_id, url, options = {}
|
|
369
|
+
load_opts = load_table_url_opts dataset_id, table_id, url, options
|
|
370
|
+
API::Job.new(
|
|
371
|
+
configuration: API::JobConfiguration.new(
|
|
372
|
+
load: API::JobConfigurationLoad.new(load_opts),
|
|
373
|
+
dry_run: options[:dryrun]
|
|
374
|
+
)
|
|
375
|
+
)
|
|
376
|
+
end
|
|
377
|
+
|
|
378
|
+
##
|
|
379
|
+
# Job description for query job
|
|
380
|
+
def query_table_config query, options
|
|
381
|
+
dest_table = table_ref_from options[:table]
|
|
382
|
+
default_dataset = dataset_ref_from options[:dataset]
|
|
383
|
+
API::Job.new(
|
|
384
|
+
configuration: API::JobConfiguration.new(
|
|
385
|
+
query: API::JobConfigurationQuery.new(
|
|
386
|
+
query: query,
|
|
387
|
+
# tableDefinitions: { ... },
|
|
388
|
+
priority: priority_value(options[:priority]),
|
|
389
|
+
use_query_cache: options[:cache],
|
|
390
|
+
destination_table: dest_table,
|
|
391
|
+
create_disposition: create_disposition(options[:create]),
|
|
392
|
+
write_disposition: write_disposition(options[:write]),
|
|
393
|
+
allow_large_results: options[:large_results],
|
|
394
|
+
flatten_results: options[:flatten],
|
|
395
|
+
default_dataset: default_dataset
|
|
396
|
+
)
|
|
397
|
+
)
|
|
398
|
+
)
|
|
399
|
+
end
|
|
400
|
+
|
|
401
|
+
def query_config query, options = {}
|
|
402
|
+
dataset_config = dataset_ref_from options[:dataset], options[:project]
|
|
403
|
+
|
|
404
|
+
API::QueryRequest.new(
|
|
405
|
+
query: query,
|
|
406
|
+
max_results: options[:max],
|
|
407
|
+
default_dataset: dataset_config,
|
|
408
|
+
timeout_ms: options[:timeout],
|
|
409
|
+
dry_run: options[:dryrun],
|
|
410
|
+
use_query_cache: options[:cache]
|
|
411
|
+
)
|
|
412
|
+
end
|
|
413
|
+
|
|
414
|
+
##
|
|
415
|
+
# Job description for copy job
|
|
416
|
+
def copy_table_config source, target, options = {}
|
|
417
|
+
API::Job.new(
|
|
418
|
+
configuration: API::JobConfiguration.new(
|
|
419
|
+
copy: API::JobConfigurationTableCopy.new(
|
|
420
|
+
source_table: source,
|
|
421
|
+
destination_table: target,
|
|
422
|
+
create_disposition: create_disposition(options[:create]),
|
|
423
|
+
write_disposition: write_disposition(options[:write])
|
|
424
|
+
),
|
|
425
|
+
dry_run: options[:dryrun]
|
|
426
|
+
)
|
|
427
|
+
)
|
|
428
|
+
end
|
|
429
|
+
|
|
430
|
+
def extract_table_config table, storage_files, options = {}
|
|
431
|
+
storage_urls = Array(storage_files).map do |url|
|
|
432
|
+
url.respond_to?(:to_gs_url) ? url.to_gs_url : url
|
|
433
|
+
end
|
|
434
|
+
dest_format = source_format storage_urls.first, options[:format]
|
|
435
|
+
API::Job.new(
|
|
436
|
+
configuration: API::JobConfiguration.new(
|
|
437
|
+
extract: API::JobConfigurationExtract.new(
|
|
438
|
+
destination_uris: Array(storage_urls),
|
|
439
|
+
source_table: table,
|
|
440
|
+
destination_format: dest_format,
|
|
441
|
+
compression: options[:compression],
|
|
442
|
+
field_delimiter: options[:delimiter],
|
|
443
|
+
print_header: options[:header]
|
|
444
|
+
),
|
|
445
|
+
dry_run: options[:dryrun]
|
|
446
|
+
)
|
|
447
|
+
)
|
|
448
|
+
end
|
|
449
|
+
|
|
450
|
+
def create_disposition str
|
|
451
|
+
{ "create_if_needed" => "CREATE_IF_NEEDED",
|
|
452
|
+
"createifneeded" => "CREATE_IF_NEEDED",
|
|
453
|
+
"if_needed" => "CREATE_IF_NEEDED",
|
|
454
|
+
"needed" => "CREATE_IF_NEEDED",
|
|
455
|
+
"create_never" => "CREATE_NEVER",
|
|
456
|
+
"createnever" => "CREATE_NEVER",
|
|
457
|
+
"never" => "CREATE_NEVER" }[str.to_s.downcase]
|
|
458
|
+
end
|
|
459
|
+
|
|
460
|
+
def write_disposition str
|
|
461
|
+
{ "write_truncate" => "WRITE_TRUNCATE",
|
|
462
|
+
"writetruncate" => "WRITE_TRUNCATE",
|
|
463
|
+
"truncate" => "WRITE_TRUNCATE",
|
|
464
|
+
"write_append" => "WRITE_APPEND",
|
|
465
|
+
"writeappend" => "WRITE_APPEND",
|
|
466
|
+
"append" => "WRITE_APPEND",
|
|
467
|
+
"write_empty" => "WRITE_EMPTY",
|
|
468
|
+
"writeempty" => "WRITE_EMPTY",
|
|
469
|
+
"empty" => "WRITE_EMPTY" }[str.to_s.downcase]
|
|
470
|
+
end
|
|
471
|
+
|
|
472
|
+
def priority_value str
|
|
473
|
+
{ "batch" => "BATCH",
|
|
474
|
+
"interactive" => "INTERACTIVE" }[str.to_s.downcase]
|
|
475
|
+
end
|
|
476
|
+
|
|
477
|
+
def source_format path, format
|
|
478
|
+
val = { "csv" => "CSV",
|
|
479
|
+
"json" => "NEWLINE_DELIMITED_JSON",
|
|
480
|
+
"newline_delimited_json" => "NEWLINE_DELIMITED_JSON",
|
|
481
|
+
"avro" => "AVRO",
|
|
482
|
+
"datastore" => "DATASTORE_BACKUP",
|
|
483
|
+
"datastore_backup" => "DATASTORE_BACKUP" }[format.to_s.downcase]
|
|
484
|
+
return val unless val.nil?
|
|
485
|
+
return nil if path.nil?
|
|
486
|
+
return "CSV" if path.end_with? ".csv"
|
|
487
|
+
return "NEWLINE_DELIMITED_JSON" if path.end_with? ".json"
|
|
488
|
+
return "AVRO" if path.end_with? ".avro"
|
|
489
|
+
return "DATASTORE_BACKUP" if path.end_with? ".backup_info"
|
|
490
|
+
nil
|
|
491
|
+
end
|
|
492
|
+
|
|
493
|
+
def projection_fields array_or_str
|
|
494
|
+
Array(array_or_str) unless array_or_str.nil?
|
|
495
|
+
end
|
|
496
|
+
|
|
497
|
+
def mime_type_for file
|
|
498
|
+
mime_type = MIME::Types.of(Pathname(file).to_path).first.to_s
|
|
499
|
+
return nil if mime_type.empty?
|
|
500
|
+
mime_type
|
|
501
|
+
rescue
|
|
502
|
+
nil
|
|
503
|
+
end
|
|
504
|
+
end
|
|
505
|
+
end
|
|
506
|
+
end
|