kura 0.2.8 → 0.2.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/ChangeLog.md +15 -0
- data/lib/kura/client.rb +53 -12
- data/lib/kura/version.rb +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f925ebb59635bd1c2cd732e5389e514281f56321
|
4
|
+
data.tar.gz: 2851191f428d93430007ce36232e05406fbce9d7
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 6e9234b9455f0e2c73e71803460e7d7a498495f026fc196c469ae899d3f3a6a44d1dcde8e8d0e5f093c629ca2bb53015274772cdd531693c03137cd20557fddc
|
7
|
+
data.tar.gz: db5cfa26b39f15a909d18b789a9f459e4f4d87f5fa6010eb655969668896e181d3ccacd70d801ee220fbc280780f6d1708c16306c073b9d6c4aa0be9f8ae2c6b
|
data/ChangeLog.md
CHANGED
@@ -1,3 +1,18 @@
|
|
1
|
+
# 0.2.9
|
2
|
+
|
3
|
+
## Enhancements
|
4
|
+
|
5
|
+
* Add keyword argument `job_id` to query/load/extract/copy.
|
6
|
+
You can generate unique jobId on client side and pass it to get rid of duplicated
|
7
|
+
job insertion at network failure.
|
8
|
+
see https://cloud.google.com/bigquery/docs/managing_jobs_datasets_projects#generate-jobid
|
9
|
+
* Add keyword argument `dry_run` to query/load/extract/copy.
|
10
|
+
|
11
|
+
## Fixes
|
12
|
+
|
13
|
+
* Add workaround of a bug in google-api-client-0.9.pre4.
|
14
|
+
see https://github.com/google/google-api-ruby-client/issues/326
|
15
|
+
|
1
16
|
# 0.2.8
|
2
17
|
|
3
18
|
## Fixes
|
data/lib/kura/client.rb
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
# coding: utf-8
|
2
2
|
|
3
|
+
require "json"
|
3
4
|
require "google/apis/bigquery_v2"
|
4
5
|
require "googleauth"
|
5
6
|
require "kura/version"
|
@@ -36,6 +37,15 @@ module Kura
|
|
36
37
|
end
|
37
38
|
end
|
38
39
|
|
40
|
+
def normalize_parameter(v)
|
41
|
+
case v
|
42
|
+
when nil
|
43
|
+
nil
|
44
|
+
else
|
45
|
+
v.to_s
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
39
49
|
def process_error(err)
|
40
50
|
if err.respond_to?(:body)
|
41
51
|
begin
|
@@ -80,6 +90,7 @@ module Kura
|
|
80
90
|
end
|
81
91
|
|
82
92
|
def datasets(project_id: @default_project_id, all: false, limit: 1000, &blk)
|
93
|
+
all = normalize_parameter(all)
|
83
94
|
if blk
|
84
95
|
@api.list_datasets(project_id, all: all, max_results: limit) do |result, err|
|
85
96
|
result &&= result.datasets
|
@@ -118,6 +129,7 @@ module Kura
|
|
118
129
|
end
|
119
130
|
|
120
131
|
def delete_dataset(dataset_id, project_id: @default_project_id, delete_contents: false, &blk)
|
132
|
+
delete_contents = normalize_parameter(delete_contents)
|
121
133
|
@api.delete_dataset(project_id, dataset_id, delete_contents: delete_contents, &blk)
|
122
134
|
rescue
|
123
135
|
return nil if $!.respond_to?(:status_code) and $!.status_code == 404
|
@@ -249,9 +261,14 @@ module Kura
|
|
249
261
|
end
|
250
262
|
private :mode_to_write_disposition
|
251
263
|
|
252
|
-
def insert_job(configuration, project_id: @default_project_id, media: nil, wait: nil, &blk)
|
264
|
+
def insert_job(configuration, job_id: nil, project_id: @default_project_id, media: nil, wait: nil, &blk)
|
253
265
|
job_object = Google::Apis::BigqueryV2::Job.new
|
254
266
|
job_object.configuration = configuration
|
267
|
+
if job_id
|
268
|
+
job_object.job_reference = Google::Apis::BigqueryV2::JobReference.new
|
269
|
+
job_object.job_reference.project_id = project_id
|
270
|
+
job_object.job_reference.job_id = job_id
|
271
|
+
end
|
255
272
|
job = @api.insert_job(project_id, job_object, upload_source: media)
|
256
273
|
job.kura_api = self
|
257
274
|
if wait
|
@@ -273,19 +290,25 @@ module Kura
|
|
273
290
|
user_defined_function_resources: nil,
|
274
291
|
project_id: @default_project_id,
|
275
292
|
job_project_id: @default_project_id,
|
293
|
+
job_id: nil,
|
276
294
|
wait: nil,
|
295
|
+
dry_run: false,
|
277
296
|
&blk)
|
278
297
|
write_disposition = mode_to_write_disposition(mode)
|
279
298
|
configuration = Google::Apis::BigqueryV2::JobConfiguration.new({
|
280
299
|
query: Google::Apis::BigqueryV2::JobConfigurationQuery.new({
|
281
300
|
query: sql,
|
282
301
|
write_disposition: write_disposition,
|
283
|
-
allow_large_results: allow_large_results,
|
284
|
-
flatten_results: flatten_results,
|
302
|
+
allow_large_results: normalize_parameter(allow_large_results),
|
303
|
+
flatten_results: normalize_parameter(flatten_results),
|
285
304
|
priority: priority,
|
286
|
-
use_query_cache: use_query_cache,
|
305
|
+
use_query_cache: normalize_parameter(use_query_cache),
|
287
306
|
})
|
288
307
|
})
|
308
|
+
if dry_run
|
309
|
+
configuration.dry_run = true
|
310
|
+
wait = nil
|
311
|
+
end
|
289
312
|
if dataset_id and table_id
|
290
313
|
configuration.query.destination_table = Google::Apis::BigqueryV2::TableReference.new({ project_id: project_id, dataset_id: dataset_id, table_id: table_id })
|
291
314
|
end
|
@@ -299,7 +322,7 @@ module Kura
|
|
299
322
|
end
|
300
323
|
end
|
301
324
|
end
|
302
|
-
insert_job(configuration, wait: wait, project_id: job_project_id, &blk)
|
325
|
+
insert_job(configuration, wait: wait, job_id: job_id, project_id: job_project_id, &blk)
|
303
326
|
end
|
304
327
|
|
305
328
|
def normalize_schema(schema)
|
@@ -337,7 +360,9 @@ module Kura
|
|
337
360
|
source_format: "CSV",
|
338
361
|
project_id: @default_project_id,
|
339
362
|
job_project_id: @default_project_id,
|
363
|
+
job_id: nil,
|
340
364
|
file: nil, wait: nil,
|
365
|
+
dry_run: false,
|
341
366
|
&blk)
|
342
367
|
write_disposition = mode_to_write_disposition(mode)
|
343
368
|
source_uris = [source_uris] if source_uris.is_a?(String)
|
@@ -349,25 +374,29 @@ module Kura
|
|
349
374
|
table_id: table_id,
|
350
375
|
}),
|
351
376
|
write_disposition: write_disposition,
|
352
|
-
allow_jagged_rows: allow_jagged_rows,
|
377
|
+
allow_jagged_rows: normalize_parameter(allow_jagged_rows),
|
353
378
|
max_bad_records: max_bad_records,
|
354
|
-
ignore_unknown_values: ignore_unknown_values,
|
379
|
+
ignore_unknown_values: normalize_parameter(ignore_unknown_values),
|
355
380
|
source_format: source_format,
|
356
381
|
})
|
357
382
|
})
|
383
|
+
if dry_run
|
384
|
+
configuration.dry_run = true
|
385
|
+
wait = nil
|
386
|
+
end
|
358
387
|
if schema
|
359
388
|
configuration.load.schema = Google::Apis::BigqueryV2::TableSchema.new({ fields: normalize_schema(schema) })
|
360
389
|
end
|
361
390
|
if source_format == "CSV"
|
362
391
|
configuration.load.field_delimiter = field_delimiter
|
363
|
-
configuration.load.allow_quoted_newlines = allow_quoted_newlines
|
392
|
+
configuration.load.allow_quoted_newlines = normalize_parameter(allow_quoted_newlines)
|
364
393
|
configuration.load.quote = quote
|
365
394
|
configuration.load.skip_leading_rows = skip_leading_rows
|
366
395
|
end
|
367
396
|
unless file
|
368
397
|
configuration.load.source_uris = source_uris
|
369
398
|
end
|
370
|
-
insert_job(configuration, media: file, wait: wait, project_id: job_project_id, &blk)
|
399
|
+
insert_job(configuration, media: file, wait: wait, job_id: job_id, project_id: job_project_id, &blk)
|
371
400
|
end
|
372
401
|
|
373
402
|
def extract(dataset_id, table_id, dest_uris,
|
@@ -377,7 +406,9 @@ module Kura
|
|
377
406
|
print_header: true,
|
378
407
|
project_id: @default_project_id,
|
379
408
|
job_project_id: @default_project_id,
|
409
|
+
job_id: nil,
|
380
410
|
wait: nil,
|
411
|
+
dry_run: false,
|
381
412
|
&blk)
|
382
413
|
dest_uris = [ dest_uris ] if dest_uris.is_a?(String)
|
383
414
|
configuration = Google::Apis::BigqueryV2::JobConfiguration.new({
|
@@ -392,11 +423,15 @@ module Kura
|
|
392
423
|
destination_uris: dest_uris,
|
393
424
|
})
|
394
425
|
})
|
426
|
+
if dry_run
|
427
|
+
configuration.dry_run = true
|
428
|
+
wait = nil
|
429
|
+
end
|
395
430
|
if destination_format == "CSV"
|
396
431
|
configuration.extract.field_delimiter = field_delimiter
|
397
|
-
configuration.extract.print_header = print_header
|
432
|
+
configuration.extract.print_header = normalize_parameter(print_header)
|
398
433
|
end
|
399
|
-
insert_job(configuration, wait: wait, project_id: job_project_id, &blk)
|
434
|
+
insert_job(configuration, wait: wait, job_id: job_id, project_id: job_project_id, &blk)
|
400
435
|
end
|
401
436
|
|
402
437
|
def copy(src_dataset_id, src_table_id, dest_dataset_id, dest_table_id,
|
@@ -404,7 +439,9 @@ module Kura
|
|
404
439
|
src_project_id: @default_project_id,
|
405
440
|
dest_project_id: @default_project_id,
|
406
441
|
job_project_id: @default_project_id,
|
442
|
+
job_id: nil,
|
407
443
|
wait: nil,
|
444
|
+
dry_run: false,
|
408
445
|
&blk)
|
409
446
|
write_disposition = mode_to_write_disposition(mode)
|
410
447
|
configuration = Google::Apis::BigqueryV2::JobConfiguration.new({
|
@@ -422,7 +459,11 @@ module Kura
|
|
422
459
|
write_disposition: write_disposition,
|
423
460
|
})
|
424
461
|
})
|
425
|
-
|
462
|
+
if dry_run
|
463
|
+
configuration.dry_run = true
|
464
|
+
wait = nil
|
465
|
+
end
|
466
|
+
insert_job(configuration, wait: wait, job_id: job_id, project_id: job_project_id, &blk)
|
426
467
|
end
|
427
468
|
|
428
469
|
def job(job_id, project_id: @default_project_id)
|
data/lib/kura/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: kura
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.2.
|
4
|
+
version: 0.2.9
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Chikanaga Tomoyuki
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2016-01-08 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: google-api-client
|