kura 0.1.2 → 0.1.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: d152cd817d2c2b1d5ca66756310e5af0a4a1d338
4
- data.tar.gz: a18b967e4fec4f97b1d22937590ef5e9866f4f96
3
+ metadata.gz: 80adcab6f4a5fd5ed60452a26121bbc13ecd491a
4
+ data.tar.gz: 51431b6a1f57e9f01615b3af4b513652c155aeed
5
5
  SHA512:
6
- metadata.gz: a834c80e7c6eec7e1aea2c32a37616e4adbbdfc44509df5f49c3e5859eedce046d69b256d3919d1fb38267de61734e59f496a52a35a3910a7eebcfb582b8f180
7
- data.tar.gz: dd055673db99ecd8c38bf2810c7d44c53ee449292e08bd0a53206bc3a9072b372bc1bdf3accc2e5ed15b48b266e350df38af60891b0758487004891b45be815d
6
+ metadata.gz: 77490104632b871a6f15f2ff034263b1a45379ca07a5412c6c83556c4a6993bea17f172c885e02201faa466068872d10c0d8d788d2056d1f54fa523cf98ead0f
7
+ data.tar.gz: 0246eb96056d65d30e7b2473640d06ca713936725bd5350d607340be2e09afda189e85cc7f0b23aa37e07514f1bbb2b2e1588500e9cdf050cf72f38737d824c7
data/ChangeLog.md CHANGED
@@ -1,3 +1,11 @@
1
+ # 0.1.3
2
+
3
+ ## Enhancements
4
+
5
+ * Add Kura::Client#projects support projects.list API.
6
+ * All APIs accept `project_id` keyword argument to override @default_project_id.
7
+ * Get @default_project_id by projects.list API if not specified by the argument.
8
+
1
9
  # 0.1.2
2
10
 
3
11
  ## Incompatible Changes
data/README.md CHANGED
@@ -24,7 +24,7 @@ Or install it yourself as:
24
24
  ```
25
25
  client = Kura.client(project_id, email, private_key)
26
26
  client.load("dataset", "table", "gs://mybucket/data.csv", wait: 120)
27
- client.query("dataset", "result", "SELECT * FROM [dataset.table];", wait: 120)
27
+ client.query("SELECT * FROM [dataset.table];", wait: 120)
28
28
  client.extract("dataset", "result", "gs://mybucket/extracted.csv", wait: 120)
29
29
  ```
30
30
 
data/lib/kura/client.rb CHANGED
@@ -6,7 +6,7 @@ require "kura/version"
6
6
  module Kura
7
7
  class Client
8
8
  def initialize(project_id, email_address, private_key)
9
- @project_id = project_id
9
+ @default_project_id = project_id
10
10
  @scope = "https://www.googleapis.com/auth/bigquery"
11
11
  @email_address = email_address
12
12
  @private_key = private_key
@@ -19,9 +19,22 @@ module Kura
19
19
  @api = Google::APIClient.new(application_name: "Kura", application_version: Kura::VERSION, authorization: auth)
20
20
  @api.authorization.fetch_access_token!
21
21
  @bigquery_api = @api.discovered_api("bigquery", "v2")
22
+
23
+ if @default_project_id.nil?
24
+ @default_project_id = self.projects.first.id
25
+ end
22
26
  end
23
27
 
24
- def datasets(project_id: @project_id, all: false, limit: 1000)
28
+ def projects(limit: 1000)
29
+ r = @api.execute(api_method: @bigquery_api.projects.list, parameters: { maxResults: limit })
30
+ unless r.success?
31
+ error = r.data["error"]["errors"][0]
32
+ raise Kura::ApiError.new(error["reason"], error["message"])
33
+ end
34
+ r.data.projects
35
+ end
36
+
37
+ def datasets(project_id: @default_project_id, all: false, limit: 1000)
25
38
  r = @api.execute(api_method: @bigquery_api.datasets.list, parameters: { projectId: project_id, all: all, maxResult: limit })
26
39
  unless r.success?
27
40
  error = r.data["error"]["errors"][0]
@@ -30,7 +43,7 @@ module Kura
30
43
  r.data.datasets
31
44
  end
32
45
 
33
- def dataset(dataset_id, project_id: @project_id)
46
+ def dataset(dataset_id, project_id: @default_project_id)
34
47
  r = @api.execute(api_method: @bigquery_api.datasets.get, parameters: { projectId: project_id, datasetId: dataset_id })
35
48
  unless r.success?
36
49
  if r.data.error["code"] == 404
@@ -43,8 +56,8 @@ module Kura
43
56
  r.data
44
57
  end
45
58
 
46
- def insert_dataset(dataset_id)
47
- r = @api.execute(api_method: @bigquery_api.datasets.insert, parameters: { projectId: @project_id }, body_object: { datasetReference: { datasetId: dataset_id } })
59
+ def insert_dataset(dataset_id, project_id: @default_project_id)
60
+ r = @api.execute(api_method: @bigquery_api.datasets.insert, parameters: { projectId: project_id }, body_object: { datasetReference: { datasetId: dataset_id } })
48
61
  unless r.success?
49
62
  error = r.data["error"]["errors"][0]
50
63
  raise Kura::ApiError.new(error["reason"], error["message"])
@@ -52,8 +65,8 @@ module Kura
52
65
  r.data
53
66
  end
54
67
 
55
- def delete_dataset(dataset_id, delete_contents: false)
56
- r = @api.execute(api_method: @bigquery_api.datasets.delete, parameters: { projectId: @project_id, datasetId: dataset_id, deleteContents: delete_contents })
68
+ def delete_dataset(dataset_id, project_id: @default_project_id, delete_contents: false)
69
+ r = @api.execute(api_method: @bigquery_api.datasets.delete, parameters: { projectId: project_id, datasetId: dataset_id, deleteContents: delete_contents })
57
70
  unless r.success?
58
71
  error = r.data["error"]["errors"][0]
59
72
  raise Kura::ApiError.new(error["reason"], error["message"])
@@ -61,7 +74,7 @@ module Kura
61
74
  r.data
62
75
  end
63
76
 
64
- def patch_dataset(dataset_id, project_id: @project_id, access: nil, description: nil, default_table_expiration_ms: nil, friendly_name: nil )
77
+ def patch_dataset(dataset_id, project_id: @default_project_id, access: nil, description: nil, default_table_expiration_ms: nil, friendly_name: nil )
65
78
  body = {}
66
79
  body["access"] = access if access
67
80
  body["defaultTableExpirationMs"] = default_table_expiration_ms if default_table_expiration_ms
@@ -75,7 +88,7 @@ module Kura
75
88
  r.data
76
89
  end
77
90
 
78
- def tables(dataset_id, project_id: @project_id, limit: 1000)
91
+ def tables(dataset_id, project_id: @default_project_id, limit: 1000)
79
92
  params = { projectId: project_id, datasetId: dataset_id, maxResult: limit }
80
93
  r = @api.execute(api_method: @bigquery_api.tables.list, parameters: params)
81
94
  unless r.success?
@@ -85,7 +98,7 @@ module Kura
85
98
  r.data.tables
86
99
  end
87
100
 
88
- def table(dataset_id, table_id, project_id: @project_id)
101
+ def table(dataset_id, table_id, project_id: @default_project_id)
89
102
  params = { projectId: project_id, datasetId: dataset_id, tableId: table_id }
90
103
  r = @api.execute(api_method: @bigquery_api.tables.get, parameters: params)
91
104
  unless r.success?
@@ -99,8 +112,8 @@ module Kura
99
112
  r.data
100
113
  end
101
114
 
102
- def delete_table(dataset_id, table_id)
103
- params = { projectId: @project_id, datasetId: dataset_id, tableId: table_id }
115
+ def delete_table(dataset_id, table_id, project_id: @default_project_id)
116
+ params = { projectId: project_id, datasetId: dataset_id, tableId: table_id }
104
117
  r = @api.execute(api_method: @bigquery_api.tables.delete, parameters: params)
105
118
  unless r.success?
106
119
  if r.data["error"]["code"] == 404
@@ -113,7 +126,7 @@ module Kura
113
126
  r.data
114
127
  end
115
128
 
116
- def list_tabledata(dataset_id, table_id, project_id: @project_id, start_index: 0, max_result: 100, page_token: nil, schema: nil)
129
+ def list_tabledata(dataset_id, table_id, project_id: @default_project_id, start_index: 0, max_result: 100, page_token: nil, schema: nil)
117
130
  schema ||= table(dataset_id, table_id, project_id: project_id).schema.fields
118
131
  field_names = schema.map{|f| f["name"] }
119
132
  params = { projectId: project_id, datasetId: dataset_id, tableId: table_id, maxResults: max_result }
@@ -144,8 +157,8 @@ module Kura
144
157
  end
145
158
  private :mode_to_write_disposition
146
159
 
147
- def insert_job(configuration, media: nil, wait: nil)
148
- params = { projectId: @project_id }
160
+ def insert_job(configuration, project_id: @default_project_id, media: nil, wait: nil)
161
+ params = { projectId: project_id }
149
162
  if media
150
163
  params["uploadType"] = "multipart"
151
164
  end
@@ -156,7 +169,7 @@ module Kura
156
169
  raise Kura::ApiError.new(error["reason"], error["message"])
157
170
  end
158
171
  if wait
159
- wait_job(r.data.jobReference.jobId, wait)
172
+ wait_job(r.data.jobReference.jobId, wait, project_id: project_id)
160
173
  else
161
174
  r.data.jobReference.jobId
162
175
  end
@@ -169,6 +182,8 @@ module Kura
169
182
  flatten_results: true,
170
183
  priority: "INTERACTIVE",
171
184
  use_query_cache: true,
185
+ project_id: @default_project_id,
186
+ job_project_id: @default_project_id,
172
187
  wait: nil)
173
188
  write_disposition = mode_to_write_disposition(mode)
174
189
  configuration = {
@@ -182,9 +197,9 @@ module Kura
182
197
  }
183
198
  }
184
199
  if dataset_id and table_id
185
- configuration[:query][:destinationTable] = { projectId: @project_id, datasetId: dataset_id, tableId: table_id }
200
+ configuration[:query][:destinationTable] = { projectId: project_id, datasetId: dataset_id, tableId: table_id }
186
201
  end
187
- insert_job(configuration, wait: wait)
202
+ insert_job(configuration, wait: wait, project_id: job_project_id)
188
203
  end
189
204
 
190
205
  def load(dataset_id, table_id, source_uris=nil,
@@ -194,13 +209,15 @@ module Kura
194
209
  allow_quoted_newlines: false,
195
210
  quote: '"', skip_leading_rows: 0,
196
211
  source_format: "CSV",
212
+ project_id: @default_project_id,
213
+ job_project_id: @default_project_id,
197
214
  file: nil, wait: nil)
198
215
  write_disposition = mode_to_write_disposition(mode)
199
216
  source_uris = [source_uris] if source_uris.is_a?(String)
200
217
  configuration = {
201
218
  load: {
202
219
  destinationTable: {
203
- projectId: @project_id,
220
+ projectId: project_id,
204
221
  datasetId: dataset_id,
205
222
  tableId: table_id,
206
223
  },
@@ -225,7 +242,7 @@ module Kura
225
242
  else
226
243
  configuration[:load][:sourceUris] = source_uris
227
244
  end
228
- insert_job(configuration, media: file, wait: wait)
245
+ insert_job(configuration, media: file, wait: wait, project_id: job_project_id)
229
246
  end
230
247
 
231
248
  def extract(dataset_id, table_id, dest_uris,
@@ -233,6 +250,8 @@ module Kura
233
250
  destination_format: "CSV",
234
251
  field_delimiter: ",",
235
252
  print_header: true,
253
+ project_id: @default_project_id,
254
+ job_project_id: @default_project_id,
236
255
  wait: nil)
237
256
  dest_uris = [ dest_uris ] if dest_uris.is_a?(String)
238
257
  configuration = {
@@ -240,7 +259,7 @@ module Kura
240
259
  compression: compression,
241
260
  destinationFormat: destination_format,
242
261
  sourceTable: {
243
- projectId: @project_id,
262
+ projectId: project_id,
244
263
  datasetId: dataset_id,
245
264
  tableId: table_id,
246
265
  },
@@ -251,31 +270,36 @@ module Kura
251
270
  configuration[:extract][:fieldDelimiter] = field_delimiter
252
271
  configuration[:extract][:printHeader] = print_header
253
272
  end
254
- insert_job(configuration, wait: wait)
273
+ insert_job(configuration, wait: wait, project_id: job_project_id)
255
274
  end
256
275
 
257
- def copy(src_dataset_id, src_table_id, dest_dataset_id, dest_table_id, mode: :truncate, wait: nil)
276
+ def copy(src_dataset_id, src_table_id, dest_dataset_id, dest_table_id,
277
+ mode: :truncate,
278
+ src_project_id: @default_project_id,
279
+ dest_project_id: @default_project_id,
280
+ job_project_id: @default_project_id,
281
+ wait: nil)
258
282
  write_disposition = mode_to_write_disposition(mode)
259
283
  configuration = {
260
284
  copy: {
261
285
  destinationTable: {
262
- projectId: @project_id,
286
+ projectId: dest_project_id,
263
287
  datasetId: dest_dataset_id,
264
288
  tableId: dest_table_id,
265
289
  },
266
290
  sourceTable: {
267
- projectId: @project_id,
291
+ projectId: src_project_id,
268
292
  datasetId: src_dataset_id,
269
293
  tableId: src_table_id,
270
294
  },
271
295
  writeDisposition: write_disposition,
272
296
  }
273
297
  }
274
- insert_job(configuration, wait: wait)
298
+ insert_job(configuration, wait: wait, project_id: job_project_id)
275
299
  end
276
300
 
277
- def job(job_id)
278
- params = { projectId: @project_id, jobId: job_id }
301
+ def job(job_id, project_id: @default_project_id)
302
+ params = { projectId: project_id, jobId: job_id }
279
303
  r = @api.execute(api_method: @bigquery_api.jobs.get, parameters: params)
280
304
  unless r.success?
281
305
  error = r.data["error"]["errors"][0]
@@ -294,10 +318,10 @@ module Kura
294
318
  return false
295
319
  end
296
320
 
297
- def wait_job(job_id, timeout=60*10)
321
+ def wait_job(job_id, timeout=60*10, project_id: @default_project_id)
298
322
  expire = Time.now + timeout
299
323
  while expire > Time.now
300
- j = job(job_id)
324
+ j = job(job_id, project_id: project_id)
301
325
  if job_finished?(j)
302
326
  return j
303
327
  end
data/lib/kura/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module Kura
2
- VERSION = "0.1.2"
2
+ VERSION = "0.1.3"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: kura
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.2
4
+ version: 0.1.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Chikanaga Tomoyuki
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2015-07-22 00:00:00.000000000 Z
11
+ date: 2015-07-31 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: google-api-client