kura 0.4.3 → 0.6.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 2055945c222e4a141f646ffa69974eb97cd1ce26c5f7bf1910bb57044c573764
4
- data.tar.gz: b7118e38c616f8fc26c454230586647932b92a68661aece9c38a2bd915b7f430
3
+ metadata.gz: 2ea4ebb00dbb6f6c467dff7f4573d3627f433a4f02b55ba8b3d6b13c64c29508
4
+ data.tar.gz: a26d9f4a2f0eb6fc60d7b4371cdfff674161bdc8da0ef695d55c680a1069be0f
5
5
  SHA512:
6
- metadata.gz: 51b8ebfccc735b923c577be770282e7af95ead82502ada51d228f541aa918f27fd8d2ae5b586f5ca55aaaf5e5a309b06a57f2c22b691e2806f102e1088faa0ce
7
- data.tar.gz: a0ec2909b094a50f3dbbb268e34ed9a78c030cfcd48c61c5c29c5699b9a9aac98579872dfc09050cfb12e92f9e29a4bd8a0ca7863548414c2ef8b0c800fcfb32
6
+ metadata.gz: 3a0a5aa8fc5ee6d559f1e2b47be2bdde98726ac278a023238aaaa690e922ed5876d5ba732c3814db03074234123364a77ceb86b5d7097bdf6bcd8e88292b78ca
7
+ data.tar.gz: 8503ea77b5e22e76f84eebe4391eb0e3757e11596c34ff53c2a450ca35d86995efd78b8017170f231a23de64100b000dfccf57a49738cf27bb9fd974d149f84d
data/ChangeLog.md CHANGED
@@ -1,3 +1,39 @@
1
+ # 0.6.2
2
+
3
+ ## Enhancements
4
+
5
+ * Support ruby 3.0.
6
+ * Support range partitioning and time partitioning parameter for load job configuration.
7
+
8
+ # 0.6.1
9
+
10
+ ## Enhancements
11
+
12
+ * `job` method now accept `fields` keyword argument.
13
+
14
+ # 0.6.0
15
+
16
+ ## Changes
17
+
18
+ * Replace runtime dependency "google-api-client.gem" -> "google-apis-bigquery_v2".
19
+ See https://github.com/groovenauts/gcs-ruby/pull/2://github.com/googleapis/google-api-ruby-client/blob/master/google-api-client/OVERVIEW.md for more details.
20
+
21
+ # 0.5.0
22
+
23
+ ## Changes
24
+
25
+ * `Kura::Client#list_tabledata` now return TIMESTAMP value in ISO 8601 format String.
26
+
27
+ ## Enhancements
28
+
29
+ * Accept description field in schema specification at insert&load table.
30
+
31
+ # 0.4.4
32
+
33
+ ## Enhancements
34
+
35
+ * Support [Routines API](https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/).
36
+
1
37
  # 0.4.3
2
38
 
3
39
  ## Fixes
data/kura.gemspec CHANGED
@@ -21,7 +21,7 @@ Gem::Specification.new do |spec|
21
21
 
22
22
  spec.required_ruby_version = '>= 2.1'
23
23
 
24
- spec.add_runtime_dependency "google-api-client", [">= 0.28.6", "!= 0.29.1"]
24
+ spec.add_runtime_dependency "google-apis-bigquery_v2"
25
25
 
26
26
  spec.add_development_dependency "bundler"
27
27
  spec.add_development_dependency "rake"
data/lib/kura/client.rb CHANGED
@@ -1,5 +1,6 @@
1
1
  # coding: utf-8
2
2
 
3
+ require "time"
3
4
  require "json"
4
5
  require "google/apis/bigquery_v2"
5
6
  require "googleauth"
@@ -219,7 +220,7 @@ module Kura
219
220
  view: view,
220
221
  external_data_configuration: external_data_configuration)
221
222
  if time_partitioning
222
- table.time_partitioning = Google::Apis::BigqueryV2::TimePartitioning.new(time_partitioning)
223
+ table.time_partitioning = Google::Apis::BigqueryV2::TimePartitioning.new(**time_partitioning)
223
224
  end
224
225
  @api.insert_table(project_id, dataset_id, table, &blk)
225
226
  rescue
@@ -268,6 +269,8 @@ module Kura
268
269
  end
269
270
  when "BOOLEAN"
270
271
  x.to_s == "true"
272
+ when "TIMESTAMP"
273
+ Time.at(Float(x)).utc.iso8601(6)
271
274
  when "RECORD"
272
275
  _convert_tabledata_row(x, field_info["fields"])
273
276
  else
@@ -409,16 +412,16 @@ module Kura
409
412
  wait: nil,
410
413
  dry_run: false,
411
414
  &blk)
412
- configuration = Google::Apis::BigqueryV2::JobConfiguration.new({
413
- query: Google::Apis::BigqueryV2::JobConfigurationQuery.new({
415
+ configuration = Google::Apis::BigqueryV2::JobConfiguration.new(
416
+ query: Google::Apis::BigqueryV2::JobConfigurationQuery.new(
414
417
  query: sql,
415
418
  allow_large_results: normalize_parameter(allow_large_results),
416
419
  flatten_results: normalize_parameter(flatten_results),
417
420
  priority: priority,
418
421
  use_query_cache: normalize_parameter(use_query_cache),
419
422
  use_legacy_sql: use_legacy_sql,
420
- })
421
- })
423
+ )
424
+ )
422
425
  if mode
423
426
  configuration.query.write_disposition = mode_to_write_disposition(mode)
424
427
  end
@@ -433,15 +436,15 @@ module Kura
433
436
  configuration.query.maximum_bytes_billed = maximum_bytes_billed
434
437
  end
435
438
  if dataset_id and table_id
436
- configuration.query.destination_table = Google::Apis::BigqueryV2::TableReference.new({ project_id: project_id, dataset_id: dataset_id, table_id: table_id })
439
+ configuration.query.destination_table = Google::Apis::BigqueryV2::TableReference.new(project_id: project_id, dataset_id: dataset_id, table_id: table_id)
437
440
  end
438
441
  if user_defined_function_resources
439
442
  configuration.query.user_defined_function_resources = Array(user_defined_function_resources).map do |r|
440
443
  r = r.to_s
441
444
  if r.start_with?("gs://")
442
- Google::Apis::BigqueryV2::UserDefinedFunctionResource.new({ resource_uri: r })
445
+ Google::Apis::BigqueryV2::UserDefinedFunctionResource.new(resource_uri: r)
443
446
  else
444
- Google::Apis::BigqueryV2::UserDefinedFunctionResource.new({ inline_code: r })
447
+ Google::Apis::BigqueryV2::UserDefinedFunctionResource.new(inline_code: r)
445
448
  end
446
449
  end
447
450
  end
@@ -459,6 +462,9 @@ module Kura
459
462
  type: (s[:type] || s["type"]),
460
463
  mode: (s[:mode] || s["mode"]),
461
464
  }
465
+ if (desc = (s[:description] || s["description"]))
466
+ f[:description] = desc
467
+ end
462
468
  if (sub_fields = (s[:fields] || s["fields"]))
463
469
  f[:fields] = normalize_schema(sub_fields)
464
470
  end
@@ -468,6 +474,9 @@ module Kura
468
474
  type: s.type,
469
475
  mode: s.mode,
470
476
  }
477
+ if s.respond_to?(:description)
478
+ f[:description] = s.description
479
+ end
471
480
  if (sub_fields = s.fields)
472
481
  f[:fields] = normalize_schema(sub_fields)
473
482
  end
@@ -485,6 +494,8 @@ module Kura
485
494
  quote: '"', skip_leading_rows: 0,
486
495
  source_format: "CSV",
487
496
  autodetect: false,
497
+ range_partitioning: nil,
498
+ time_partitioning: nil,
488
499
  project_id: @default_project_id,
489
500
  job_project_id: @default_project_id,
490
501
  job_id: nil,
@@ -493,26 +504,36 @@ module Kura
493
504
  &blk)
494
505
  write_disposition = mode_to_write_disposition(mode)
495
506
  source_uris = [source_uris] if source_uris.is_a?(String)
496
- configuration = Google::Apis::BigqueryV2::JobConfiguration.new({
497
- load: Google::Apis::BigqueryV2::JobConfigurationLoad.new({
498
- destination_table: Google::Apis::BigqueryV2::TableReference.new({
507
+ case range_partitioning
508
+ when Hash
509
+ range_partitioning = Google::Apis::BigqueryV2::RangePartitioning.new(**range_partitioning)
510
+ end
511
+ case time_partitioning
512
+ when Hash
513
+ time_partitioning = Google::Apis::BigqueryV2::TimePartitioning.new(**time_partitioning)
514
+ end
515
+ configuration = Google::Apis::BigqueryV2::JobConfiguration.new(
516
+ load: Google::Apis::BigqueryV2::JobConfigurationLoad.new(
517
+ destination_table: Google::Apis::BigqueryV2::TableReference.new(
499
518
  project_id: project_id,
500
519
  dataset_id: dataset_id,
501
520
  table_id: table_id,
502
- }),
521
+ ),
503
522
  write_disposition: write_disposition,
504
523
  allow_jagged_rows: normalize_parameter(allow_jagged_rows),
505
524
  max_bad_records: max_bad_records,
506
525
  ignore_unknown_values: normalize_parameter(ignore_unknown_values),
507
526
  source_format: source_format,
508
- })
509
- })
527
+ range_partitioning: range_partitioning,
528
+ time_partitioning: time_partitioning,
529
+ )
530
+ )
510
531
  if dry_run
511
532
  configuration.dry_run = true
512
533
  wait = nil
513
534
  end
514
535
  if schema
515
- configuration.load.schema = Google::Apis::BigqueryV2::TableSchema.new({ fields: normalize_schema(schema) })
536
+ configuration.load.schema = Google::Apis::BigqueryV2::TableSchema.new(fields: normalize_schema(schema))
516
537
  end
517
538
  if source_format == "CSV"
518
539
  configuration.load.field_delimiter = field_delimiter
@@ -541,18 +562,18 @@ module Kura
541
562
  dry_run: false,
542
563
  &blk)
543
564
  dest_uris = [ dest_uris ] if dest_uris.is_a?(String)
544
- configuration = Google::Apis::BigqueryV2::JobConfiguration.new({
545
- extract: Google::Apis::BigqueryV2::JobConfigurationExtract.new({
565
+ configuration = Google::Apis::BigqueryV2::JobConfiguration.new(
566
+ extract: Google::Apis::BigqueryV2::JobConfigurationExtract.new(
546
567
  compression: compression,
547
568
  destination_format: destination_format,
548
- source_table: Google::Apis::BigqueryV2::TableReference.new({
569
+ source_table: Google::Apis::BigqueryV2::TableReference.new(
549
570
  project_id: project_id,
550
571
  dataset_id: dataset_id,
551
572
  table_id: table_id,
552
- }),
573
+ ),
553
574
  destination_uris: dest_uris,
554
- })
555
- })
575
+ )
576
+ )
556
577
  if dry_run
557
578
  configuration.dry_run = true
558
579
  wait = nil
@@ -574,21 +595,21 @@ module Kura
574
595
  dry_run: false,
575
596
  &blk)
576
597
  write_disposition = mode_to_write_disposition(mode)
577
- configuration = Google::Apis::BigqueryV2::JobConfiguration.new({
578
- copy: Google::Apis::BigqueryV2::JobConfigurationTableCopy.new({
579
- destination_table: Google::Apis::BigqueryV2::TableReference.new({
598
+ configuration = Google::Apis::BigqueryV2::JobConfiguration.new(
599
+ copy: Google::Apis::BigqueryV2::JobConfigurationTableCopy.new(
600
+ destination_table: Google::Apis::BigqueryV2::TableReference.new(
580
601
  project_id: dest_project_id,
581
602
  dataset_id: dest_dataset_id,
582
603
  table_id: dest_table_id,
583
- }),
584
- source_table: Google::Apis::BigqueryV2::TableReference.new({
604
+ ),
605
+ source_table: Google::Apis::BigqueryV2::TableReference.new(
585
606
  project_id: src_project_id,
586
607
  dataset_id: src_dataset_id,
587
608
  table_id: src_table_id,
588
- }),
609
+ ),
589
610
  write_disposition: write_disposition,
590
- })
591
- })
611
+ )
612
+ )
592
613
  if dry_run
593
614
  configuration.dry_run = true
594
615
  wait = nil
@@ -617,14 +638,14 @@ module Kura
617
638
  state_filter: state_filter)
618
639
  end
619
640
 
620
- def job(job_id, location: nil, project_id: @default_project_id, &blk)
641
+ def job(job_id, location: nil, project_id: @default_project_id, fields: nil, &blk)
621
642
  if blk
622
- @api.get_job(project_id, job_id, location: location) do |j, e|
643
+ @api.get_job(project_id, job_id, location: location, fields: fields) do |j, e|
623
644
  j.kura_api = self if j
624
645
  blk.call(j, e)
625
646
  end
626
647
  else
627
- @api.get_job(project_id, job_id, location: location).tap{|j| j.kura_api = self if j }
648
+ @api.get_job(project_id, job_id, location: location, fields: fields).tap{|j| j.kura_api = self if j }
628
649
  end
629
650
  rescue
630
651
  process_error($!)
@@ -737,5 +758,72 @@ module Kura
737
758
  return nil if $!.respond_to?(:status_code) and $!.status_code == 404
738
759
  process_error($!)
739
760
  end
761
+
762
+ # Routines API
763
+ def routines(dataset_id, project_id: @default_project_id, limit: 1000, page_token: nil, &blk)
764
+ if blk
765
+ @api.list_routines(project_id, dataset_id, max_results: limit, page_token: page_token) do |result, err|
766
+ result &&= (result.routines || [])
767
+ blk.call(result, err)
768
+ end
769
+ else
770
+ @api.list_routines(project_id, dataset_id, max_results: limit, page_token: page_token)
771
+ end
772
+ rescue
773
+ process_error($!)
774
+ end
775
+
776
+ def routine(dataset_id, routine_id, project_id: @default_project_id, &blk)
777
+ if blk
778
+ @api.get_routine(project_id, dataset_id, routine_id) do |result, err|
779
+ if err.respond_to?(:status_code) and err.status_code == 404
780
+ result = nil
781
+ err = nil
782
+ end
783
+ blk.call(result, err)
784
+ end
785
+ else
786
+ @api.get_routine(project_id, dataset_id, routine_id)
787
+ end
788
+ rescue
789
+ return nil if $!.respond_to?(:status_code) and $!.status_code == 404
790
+ process_error($!)
791
+ end
792
+
793
+ def delete_routine(dataset_id, routine_id, project_id: @default_project_id, &blk)
794
+ @api.delete_routine(project_id, dataset_id, routine_id, &blk)
795
+ rescue
796
+ return nil if $!.respond_to?(:status_code) and $!.status_code == 404
797
+ process_error($!)
798
+ end
799
+
800
+ def insert_routine(dataset_id,
801
+ routine_id,
802
+ body,
803
+ project_id: @default_project_id,
804
+ routine_type: "PROCEDURE",
805
+ language: "SQL",
806
+ arguments: [],
807
+ return_type: nil,
808
+ imported_libraries: [],
809
+ description: nil)
810
+ @api.insert_routine(
811
+ project_id,
812
+ dataset_id,
813
+ Google::Apis::BigqueryV2::Routine.new(
814
+ routine_reference: Google::Apis::BigqueryV2::RoutineReference.new(
815
+ project_id: project_id,
816
+ dataset_id: dataset_id,
817
+ routine_id: routine_id
818
+ ),
819
+ arguments: arguments,
820
+ definition_body: body,
821
+ imported_libraries: imported_libraries,
822
+ language: language,
823
+ return_type: return_type,
824
+ routine_type: routine_type,
825
+ description: description
826
+ ))
827
+ end
740
828
  end
741
829
  end
data/lib/kura/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module Kura
2
- VERSION = "0.4.3"
2
+ VERSION = "0.6.2"
3
3
  end
metadata CHANGED
@@ -1,35 +1,29 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: kura
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.4.3
4
+ version: 0.6.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Chikanaga Tomoyuki
8
- autorequire:
8
+ autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2020-01-28 00:00:00.000000000 Z
11
+ date: 2021-07-09 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
- name: google-api-client
14
+ name: google-apis-bigquery_v2
15
15
  requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
17
  - - ">="
18
18
  - !ruby/object:Gem::Version
19
- version: 0.28.6
20
- - - "!="
21
- - !ruby/object:Gem::Version
22
- version: 0.29.1
19
+ version: '0'
23
20
  type: :runtime
24
21
  prerelease: false
25
22
  version_requirements: !ruby/object:Gem::Requirement
26
23
  requirements:
27
24
  - - ">="
28
25
  - !ruby/object:Gem::Version
29
- version: 0.28.6
30
- - - "!="
31
- - !ruby/object:Gem::Version
32
- version: 0.29.1
26
+ version: '0'
33
27
  - !ruby/object:Gem::Dependency
34
28
  name: bundler
35
29
  requirement: !ruby/object:Gem::Requirement
@@ -139,7 +133,7 @@ homepage: https://github.com/nagachika/kura/
139
133
  licenses:
140
134
  - MIT
141
135
  metadata: {}
142
- post_install_message:
136
+ post_install_message:
143
137
  rdoc_options: []
144
138
  require_paths:
145
139
  - lib
@@ -154,8 +148,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
154
148
  - !ruby/object:Gem::Version
155
149
  version: '0'
156
150
  requirements: []
157
- rubygems_version: 3.1.2
158
- signing_key:
151
+ rubygems_version: 3.2.15
152
+ signing_key:
159
153
  specification_version: 4
160
154
  summary: Interface to BigQuery API v2.
161
155
  test_files: []