kura 0.1.5 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 8c8c4c2a36a6496e732ea16eb4f9ce00b0c684f5
4
- data.tar.gz: 89cd59cba88c72171737ecddd586cec674c2f8e9
3
+ metadata.gz: 7e158696726d53d4f57682c7074342b353ec867d
4
+ data.tar.gz: 1a52d2b10903e57608ce0dbfdc0ae25c2a00f33a
5
5
  SHA512:
6
- metadata.gz: 7859fa4470015fbad77dc101066044e3cf0a08feb324c0f0f41d5efa9bd71d96db4ecdcf2d6d8028ee558c90e97be3827020256ade9b07de55318f4ad7c1dd2b
7
- data.tar.gz: 962c374c8502f1a5bc1bce15b9d158083b18093a7b328f2a56980aa13520485ca4c538282f38f3bde048c778aecca23bfc8879eedeb3f8aa30f4f9411128132d
6
+ metadata.gz: c4785e4b3bf7e51d0dac054eb8b88cf79155c56bfc1cf285a41f7c9edf5f438e9f3559a3a7329c2fe774a746fb42fe4caee7b9d6ba66c4295ff902eeca6b2027
7
+ data.tar.gz: 9f4ab700c948f13dda3e11fcdd0ac1dda6494ef78e284f59f0566d7a996b54d6976cfa686150cb6e97204e7094e13d0fda7b96c61c5313a2bb6d034fb16a1457
data/kura.gemspec CHANGED
@@ -22,7 +22,7 @@ Gem::Specification.new do |spec|
22
22
 
23
23
  spec.required_ruby_version = '>= 2.1'
24
24
 
25
- spec.add_runtime_dependency "google-api-client", "~> 0.8.5"
25
+ spec.add_runtime_dependency "google-api-client", "~> 0.9.pre3"
26
26
 
27
27
  spec.add_development_dependency "bundler", "~> 1.10"
28
28
  spec.add_development_dependency "rake", "~> 10.0"
data/lib/kura/client.rb CHANGED
@@ -1,11 +1,12 @@
1
1
  # coding: utf-8
2
2
 
3
- require "google/api_client"
3
+ require "google/apis/bigquery_v2"
4
+ require "googleauth"
4
5
  require "kura/version"
5
6
 
6
7
  module Kura
7
8
  class Client
8
- def initialize(default_project_id: nil, email_address: nil, private_key: nil, http_options: {open_timeout: 60})
9
+ def initialize(default_project_id: nil, email_address: nil, private_key: nil, http_options: {timeout: 60}, default_retries: 5)
9
10
  @default_project_id = default_project_id
10
11
  @scope = "https://www.googleapis.com/auth/bigquery"
11
12
  @email_address = email_address
@@ -17,140 +18,116 @@ module Kura
17
18
  scope: @scope,
18
19
  issuer: @email_address,
19
20
  signing_key: @private_key)
21
+ # MEMO: signet-0.6.1 depend on Farady.default_connection
22
+ Faraday.default_connection.options.timeout = 60
23
+ auth.fetch_access_token!
20
24
  else
21
- auth = Google::APIClient::ComputeServiceAccount.new
25
+ auth = Google::Auth.get_application_default([@scope])
26
+ auth.fetch_access_token!
22
27
  end
23
- @api = Google::APIClient.new(application_name: "Kura", application_version: Kura::VERSION, authorization: auth, faraday_option: http_options)
24
- @api.authorization.fetch_access_token!
25
- @bigquery_api = @api.discovered_api("bigquery", "v2")
28
+ Google::Apis::RequestOptions.default.retries = default_retries
29
+ Google::Apis::RequestOptions.default.timeout_sec = http_options[:timeout]
30
+ @api = Google::Apis::BigqueryV2::BigqueryService.new
31
+ @api.authorization = auth
26
32
 
27
33
  if @default_project_id.nil?
28
34
  @default_project_id = self.projects.first.id
29
35
  end
30
36
  end
31
37
 
32
- def projects(limit: 1000)
33
- r = @api.execute(api_method: @bigquery_api.projects.list, parameters: { maxResults: limit })
34
- unless r.success?
35
- error = r.data["error"]["errors"][0]
38
+ def process_error(err)
39
+ if err.respond_to?(:body)
40
+ jobj = JSON.parse(err.body)
41
+ error = jobj["error"]
42
+ error = error["errors"][0]
36
43
  raise Kura::ApiError.new(error["reason"], error["message"])
44
+ else
45
+ raise err
37
46
  end
38
- r.data.projects
47
+ end
48
+ private :process_error
49
+
50
+ def projects(limit: 1000)
51
+ result = @api.list_projects(max_results: limit)
52
+ result.projects
53
+ rescue
54
+ process_error($!)
39
55
  end
40
56
 
41
57
  def datasets(project_id: @default_project_id, all: false, limit: 1000)
42
- r = @api.execute(api_method: @bigquery_api.datasets.list, parameters: { projectId: project_id, all: all, maxResult: limit })
43
- unless r.success?
44
- error = r.data["error"]["errors"][0]
45
- raise Kura::ApiError.new(error["reason"], error["message"])
46
- end
47
- r.data.datasets
58
+ result = @api.list_datasets(project_id, all: all, max_results: limit)
59
+ result.datasets
60
+ rescue
61
+ process_error($!)
48
62
  end
49
63
 
50
64
  def dataset(dataset_id, project_id: @default_project_id)
51
- r = @api.execute(api_method: @bigquery_api.datasets.get, parameters: { projectId: project_id, datasetId: dataset_id })
52
- unless r.success?
53
- if r.data.error["code"] == 404
54
- return nil
55
- else
56
- error = r.data["error"]["errors"][0]
57
- raise Kura::ApiError.new(error["reason"], error["message"])
58
- end
59
- end
60
- r.data
65
+ @api.get_dataset(project_id, dataset_id)
66
+ rescue
67
+ return nil if $!.respond_to?(:status_code) and $!.status_code == 404
68
+ process_error($!)
61
69
  end
62
70
 
63
71
  def insert_dataset(dataset_id, project_id: @default_project_id)
64
- r = @api.execute(api_method: @bigquery_api.datasets.insert, parameters: { projectId: project_id }, body_object: { datasetReference: { datasetId: dataset_id } })
65
- unless r.success?
66
- error = r.data["error"]["errors"][0]
67
- raise Kura::ApiError.new(error["reason"], error["message"])
68
- end
69
- r.data
72
+ obj = Google::Apis::BigqueryV2::Dataset.new(dataset_reference: Google::Apis::BigqueryV2::DatasetReference.new(project_id: project_id, dataset_id: dataset_id))
73
+ @api.insert_dataset(project_id, obj)
74
+ rescue
75
+ process_error($!)
70
76
  end
71
77
 
72
78
  def delete_dataset(dataset_id, project_id: @default_project_id, delete_contents: false)
73
- r = @api.execute(api_method: @bigquery_api.datasets.delete, parameters: { projectId: project_id, datasetId: dataset_id, deleteContents: delete_contents })
74
- unless r.success?
75
- error = r.data["error"]["errors"][0]
76
- raise Kura::ApiError.new(error["reason"], error["message"])
77
- end
78
- r.data
79
+ @api.delete_dataset(project_id, dataset_id, delete_contents: delete_contents)
80
+ rescue
81
+ return nil if $!.respond_to?(:status_code) and $!.status_code == 404
82
+ process_error($!)
79
83
  end
80
84
 
81
85
  def patch_dataset(dataset_id, project_id: @default_project_id, access: nil, description: nil, default_table_expiration_ms: nil, friendly_name: nil )
82
- body = {}
83
- body["access"] = access if access
84
- body["defaultTableExpirationMs"] = default_table_expiration_ms if default_table_expiration_ms
85
- body["description"] = description if description
86
- body["friendlyName"] = friendly_name if friendly_name
87
- r = @api.execute(api_method: @bigquery_api.datasets.patch, parameters: { projectId: project_id, datasetId: dataset_id }, body_object: body)
88
- unless r.success?
89
- error = r.data["error"]["errors"][0]
90
- raise Kura::ApiError.new(error["reason"], error["message"])
91
- end
92
- r.data
86
+ obj = Google::Apis::BigqueryV2::Dataset.new(dataset_reference: Google::Apis::BigqueryV2::DatasetReference.new(project_id: project_id, dataset_id: dataset_id))
87
+ obj.access = access if access
88
+ obj.default_table_expiration_ms = default_table_expiration_ms if default_table_expiration_ms
89
+ obj.description = description if description
90
+ obj.friendly_name = friendly_name if friendly_name
91
+ @api.patch_dataset(project_id, dataset_id, obj)
92
+ rescue
93
+ process_error($!)
93
94
  end
94
95
 
95
96
  def tables(dataset_id, project_id: @default_project_id, limit: 1000)
96
- params = { projectId: project_id, datasetId: dataset_id, maxResult: limit }
97
- r = @api.execute(api_method: @bigquery_api.tables.list, parameters: params)
98
- unless r.success?
99
- error = r.data["error"]["errors"][0]
100
- raise Kura::ApiError.new(error["reason"], error["message"])
101
- end
102
- r.data.tables
97
+ result = @api.list_tables(project_id, dataset_id, max_results: limit)
98
+ result.tables
99
+ rescue
100
+ process_error($!)
103
101
  end
104
102
 
105
103
  def table(dataset_id, table_id, project_id: @default_project_id)
106
- params = { projectId: project_id, datasetId: dataset_id, tableId: table_id }
107
- r = @api.execute(api_method: @bigquery_api.tables.get, parameters: params)
108
- unless r.success?
109
- if r.data["error"]["code"] == 404
110
- return nil
111
- else
112
- error = r.data["error"]["errors"][0]
113
- raise Kura::ApiError.new(error["reason"], error["message"])
114
- end
115
- end
116
- r.data
104
+ @api.get_table(project_id, dataset_id, table_id)
105
+ rescue
106
+ return nil if $!.respond_to?(:status_code) and $!.status_code == 404
107
+ process_error($!)
117
108
  end
118
109
 
119
110
  def delete_table(dataset_id, table_id, project_id: @default_project_id)
120
- params = { projectId: project_id, datasetId: dataset_id, tableId: table_id }
121
- r = @api.execute(api_method: @bigquery_api.tables.delete, parameters: params)
122
- unless r.success?
123
- if r.data["error"]["code"] == 404
124
- return nil
125
- else
126
- error = r.data["error"]["errors"][0]
127
- raise Kura::ApiError.new(error["reason"], error["message"])
128
- end
129
- end
130
- r.data
111
+ @api.delete_table(project_id, dataset_id, table_id)
112
+ rescue
113
+ return nil if $!.respond_to?(:status_code) and $!.status_code == 404
114
+ process_error($!)
131
115
  end
132
116
 
133
117
  def list_tabledata(dataset_id, table_id, project_id: @default_project_id, start_index: 0, max_result: 100, page_token: nil, schema: nil)
134
118
  schema ||= table(dataset_id, table_id, project_id: project_id).schema.fields
135
- field_names = schema.map{|f| f["name"] }
136
- params = { projectId: project_id, datasetId: dataset_id, tableId: table_id, maxResults: max_result }
137
- if page_token
138
- params[:pageToken] = page_token
139
- else
140
- params[:startIndex] = start_index
141
- end
142
- r = @api.execute(api_method: @bigquery_api.tabledata.list, parameters: params)
143
- unless r.success?
144
- error = r.data["error"]["errors"][0]
145
- raise Kura::ApiError.new(error["reason"], error["message"])
146
- end
119
+ field_names = schema.map{|f| f.respond_to?(:[]) ? (f["name"] || f[:name]) : f.name }
120
+
121
+ r = @api.list_table_data(project_id, dataset_id, table_id, max_results: max_result, start_index: start_index, page_token: page_token)
147
122
  {
148
- total_rows: r.data.totalRows,
149
- next_token: r.data["pageToken"],
150
- rows: r.data.rows.map do |row|
123
+ total_rows: r.total_rows.to_i,
124
+ next_token: r.page_token,
125
+ rows: r.rows.map do |row|
151
126
  row.f.zip(field_names).each_with_object({}) do |(v, fn), tbl| tbl[fn] = v.v end
152
127
  end
153
128
  }
129
+ rescue
130
+ process_error($!)
154
131
  end
155
132
 
156
133
  def mode_to_write_disposition(mode)
@@ -162,21 +139,17 @@ module Kura
162
139
  private :mode_to_write_disposition
163
140
 
164
141
  def insert_job(configuration, project_id: @default_project_id, media: nil, wait: nil)
165
- params = { projectId: project_id }
166
- if media
167
- params["uploadType"] = "multipart"
168
- end
169
- body = { configuration: configuration }
170
- r = @api.execute(api_method: @bigquery_api.jobs.insert, parameters: params, body_object: body, media: media)
171
- unless r.success?
172
- error = r.data["error"]["errors"][0]
173
- raise Kura::ApiError.new(error["reason"], error["message"])
174
- end
142
+ job_object = Google::Apis::BigqueryV2::Job.new
143
+ job_object.configuration = configuration
144
+ result = @api.insert_job(project_id, job_object, upload_source: media)
145
+ job_id = result.job_reference.job_id
175
146
  if wait
176
- wait_job(r.data.jobReference.jobId, wait, project_id: project_id)
147
+ wait_job(job_id, wait, project_id: project_id)
177
148
  else
178
- r.data.jobReference.jobId
149
+ job_id
179
150
  end
151
+ rescue
152
+ process_error($!)
180
153
  end
181
154
 
182
155
  def query(sql, mode: :truncate,
@@ -193,19 +166,45 @@ module Kura
193
166
  configuration = {
194
167
  query: {
195
168
  query: sql,
196
- writeDisposition: write_disposition,
197
- allowLargeResults: allow_large_results,
198
- flattenResults: flatten_results,
169
+ write_disposition: write_disposition,
170
+ allow_large_results: allow_large_results,
171
+ flatten_results: flatten_results,
199
172
  priority: priority,
200
- useQueryCache: use_query_cache,
173
+ use_query_cache: use_query_cache,
201
174
  }
202
175
  }
203
176
  if dataset_id and table_id
204
- configuration[:query][:destinationTable] = { projectId: project_id, datasetId: dataset_id, tableId: table_id }
177
+ configuration[:query][:destination_table] = { project_id: project_id, dataset_id: dataset_id, table_id: table_id }
205
178
  end
206
179
  insert_job(configuration, wait: wait, project_id: job_project_id)
207
180
  end
208
181
 
182
+ def normalize_schema(schema)
183
+ schema.map do |s|
184
+ if s.respond_to?(:[])
185
+ f = {
186
+ name: (s[:name] || s["name"]),
187
+ type: (s[:type] || s["type"]),
188
+ mode: (s[:mode] || s["mode"]),
189
+ }
190
+ if (sub_fields = (s[:fields] || s["fields"]))
191
+ f[:fields] = normalize_schema(sub_fields)
192
+ end
193
+ else
194
+ f = {
195
+ name: s.name,
196
+ type: s.type,
197
+ mode: s.mode,
198
+ }
199
+ if (sub_fields = f.fields)
200
+ f[:fields] = normalize_schema(sub_fields)
201
+ end
202
+ end
203
+ f
204
+ end
205
+ end
206
+ private :normalize_schema
207
+
209
208
  def load(dataset_id, table_id, source_uris=nil,
210
209
  schema: nil, delimiter: ",", field_delimiter: delimiter, mode: :append,
211
210
  allow_jagged_rows: false, max_bad_records: 0,
@@ -220,31 +219,29 @@ module Kura
220
219
  source_uris = [source_uris] if source_uris.is_a?(String)
221
220
  configuration = {
222
221
  load: {
223
- destinationTable: {
224
- projectId: project_id,
225
- datasetId: dataset_id,
226
- tableId: table_id,
222
+ destination_table: {
223
+ project_id: project_id,
224
+ dataset_id: dataset_id,
225
+ table_id: table_id,
227
226
  },
228
- writeDisposition: write_disposition,
229
- allowJaggedRows: allow_jagged_rows,
230
- maxBadRecords: max_bad_records,
231
- ignoreUnknownValues: ignore_unknown_values,
232
- sourceFormat: source_format,
227
+ write_disposition: write_disposition,
228
+ allow_jagged_rows: allow_jagged_rows,
229
+ max_bad_records: max_bad_records,
230
+ ignore_unknown_values: ignore_unknown_values,
231
+ source_format: source_format,
233
232
  }
234
233
  }
235
234
  if schema
236
- configuration[:load][:schema] = { fields: schema }
235
+ configuration[:load][:schema] = { fields: normalize_schema(schema) }
237
236
  end
238
237
  if source_format == "CSV"
239
- configuration[:load][:fieldDelimiter] = field_delimiter
240
- configuration[:load][:allowQuotedNewlines] = allow_quoted_newlines
238
+ configuration[:load][:field_delimiter] = field_delimiter
239
+ configuration[:load][:allow_quoted_newlines] = allow_quoted_newlines
241
240
  configuration[:load][:quote] = quote
242
- configuration[:load][:skipLeadingRows] = skip_leading_rows
241
+ configuration[:load][:skip_leading_rows] = skip_leading_rows
243
242
  end
244
- if file
245
- file = Google::APIClient::UploadIO.new(file, "application/octet-stream")
246
- else
247
- configuration[:load][:sourceUris] = source_uris
243
+ unless file
244
+ configuration[:load][:source_uris] = source_uris
248
245
  end
249
246
  insert_job(configuration, media: file, wait: wait, project_id: job_project_id)
250
247
  end
@@ -261,18 +258,18 @@ module Kura
261
258
  configuration = {
262
259
  extract: {
263
260
  compression: compression,
264
- destinationFormat: destination_format,
265
- sourceTable: {
266
- projectId: project_id,
267
- datasetId: dataset_id,
268
- tableId: table_id,
261
+ destination_format: destination_format,
262
+ source_table: {
263
+ project_id: project_id,
264
+ dataset_id: dataset_id,
265
+ table_id: table_id,
269
266
  },
270
- destinationUris: dest_uris,
267
+ destination_uris: dest_uris,
271
268
  }
272
269
  }
273
270
  if destination_format == "CSV"
274
- configuration[:extract][:fieldDelimiter] = field_delimiter
275
- configuration[:extract][:printHeader] = print_header
271
+ configuration[:extract][:field_delimiter] = field_delimiter
272
+ configuration[:extract][:print_header] = print_header
276
273
  end
277
274
  insert_job(configuration, wait: wait, project_id: job_project_id)
278
275
  end
@@ -286,36 +283,32 @@ module Kura
286
283
  write_disposition = mode_to_write_disposition(mode)
287
284
  configuration = {
288
285
  copy: {
289
- destinationTable: {
290
- projectId: dest_project_id,
291
- datasetId: dest_dataset_id,
292
- tableId: dest_table_id,
286
+ destination_table: {
287
+ project_id: dest_project_id,
288
+ dataset_id: dest_dataset_id,
289
+ table_id: dest_table_id,
293
290
  },
294
- sourceTable: {
295
- projectId: src_project_id,
296
- datasetId: src_dataset_id,
297
- tableId: src_table_id,
291
+ source_table: {
292
+ project_id: src_project_id,
293
+ dataset_id: src_dataset_id,
294
+ table_id: src_table_id,
298
295
  },
299
- writeDisposition: write_disposition,
296
+ write_disposition: write_disposition,
300
297
  }
301
298
  }
302
299
  insert_job(configuration, wait: wait, project_id: job_project_id)
303
300
  end
304
301
 
305
302
  def job(job_id, project_id: @default_project_id)
306
- params = { projectId: project_id, jobId: job_id }
307
- r = @api.execute(api_method: @bigquery_api.jobs.get, parameters: params)
308
- unless r.success?
309
- error = r.data["error"]["errors"][0]
310
- raise Kura::ApiError.new(error["reason"], error["message"])
311
- end
312
- r.data
303
+ @api.get_job(project_id, job_id)
304
+ rescue
305
+ process_error($!)
313
306
  end
314
307
 
315
308
  def job_finished?(r)
316
309
  if r.status.state == "DONE"
317
- if r.status["errorResult"]
318
- raise Kura::ApiError.new(r.status.errorResult.reason, r.status.errorResult.message)
310
+ if r.status.error_result
311
+ raise Kura::ApiError.new(r.status.error_result.reason, r.status.error_result.message)
319
312
  end
320
313
  return true
321
314
  end
data/lib/kura/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module Kura
2
- VERSION = "0.1.5"
2
+ VERSION = "0.2.0"
3
3
  end
data/lib/kura.rb CHANGED
@@ -34,7 +34,7 @@ module Kura
34
34
  private_key
35
35
  end
36
36
 
37
- def self.client(project_id=nil, email_address=nil, private_key=nil, http_options: {open_timeout: 60})
37
+ def self.client(project_id=nil, email_address=nil, private_key=nil, http_options: {timeout: 60})
38
38
  if private_key
39
39
  private_key = get_private_key(private_key)
40
40
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: kura
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.5
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Chikanaga Tomoyuki
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2015-08-13 00:00:00.000000000 Z
11
+ date: 2015-08-25 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: google-api-client
@@ -16,14 +16,14 @@ dependencies:
16
16
  requirements:
17
17
  - - "~>"
18
18
  - !ruby/object:Gem::Version
19
- version: 0.8.5
19
+ version: 0.9.pre3
20
20
  type: :runtime
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
- version: 0.8.5
26
+ version: 0.9.pre3
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: bundler
29
29
  requirement: !ruby/object:Gem::Requirement
@@ -147,7 +147,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
147
147
  version: '0'
148
148
  requirements: []
149
149
  rubyforge_project:
150
- rubygems_version: 2.4.5
150
+ rubygems_version: 2.4.5.1
151
151
  signing_key:
152
152
  specification_version: 4
153
153
  summary: Interface to BigQuery API v2.