bigquery-client 0.3.4 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: dcdf35c7bc17562e5cdd590bdbac3d6ae9df055d
4
- data.tar.gz: 206728dd2bd10c4bda55ce79857e73b6c6f9d85b
3
+ metadata.gz: e7f17a75e10d343f606ac5bcf6fdf6f4380c8400
4
+ data.tar.gz: 287c065f3fb531df155dd1748bd075771174644b
5
5
  SHA512:
6
- metadata.gz: 8a23eff48fd905f4de17ce835e2d7c7c3b85a8e31be65c23949b41640c846aa4061e1ecdb1b9661c83aba6e2e052323c41ac1117a39f87d1fa305b2bf676f6fd
7
- data.tar.gz: c944d45ce22b57b5566b2218aa85baa4ca03e5541d8b10f6bcb9d50d5171a7c54e8d4b2afe40e649067996194f82fe2d6cfa812c5617dba989635c6ab4f35660
6
+ metadata.gz: 6e14f2c77368f4849a7eb5603e8f649ff804307742d59d45e6a59c0c74b5e61297588631f7259522592702466a95d63017834e5cce65c2511e1c60b3a58fc27e
7
+ data.tar.gz: d11a77c7a18345800ba7e5b8d6ff661d62381e0ff58c77584c81271bd56bbccb5451a13ece01c2d4855d5b757bfb5382d64f15bac0e71403af87dd6ca6ce7259
data/.gitignore CHANGED
@@ -15,3 +15,4 @@
15
15
  *.o
16
16
  *.a
17
17
  mkmf.log
18
+ /vcr_cassettes
data/Gemfile CHANGED
@@ -10,6 +10,8 @@ end
10
10
 
11
11
  group :test do
12
12
  gem 'test-unit', '~> 3.0.0'
13
+ gem "webmock"
14
+ gem 'vcr'
13
15
  end
14
16
 
15
17
  gemspec
data/README.md CHANGED
@@ -54,7 +54,7 @@ https://cloud.google.com/bigquery/docs/reference/v2/
54
54
  | Datasets | delete | `delete_dataset` | :white_check_mark: |
55
55
  | Projects | list | `projects`, `list_projects` | :white_check_mark: |
56
56
 
57
- ## Usage
57
+ ## Basic Usage
58
58
 
59
59
  ```ruby
60
60
  # insert
@@ -74,26 +74,43 @@ schema = [
74
74
  ]
75
75
  client.create_table("new_table", schema)
76
76
 
77
- # SQL
77
+ # sql
78
78
  client.sql "SELECT * FROM your_dataset.your_table LIMIT 100"
79
79
 
80
- # SQL (public data)
81
- client.sql "SELECT * FROM publicdata:samples.wikipedia LIMIT 10"
80
+ # sql (public data)
81
+ client.sql "SELECT born_alive_alive,is_male,weight_pounds FROM publicdata:samples.natality LIMIT 3"
82
+ #=> [{"born_alive_alive"=>0, "is_male"=>true, "weight_pounds"=>8.437090766739999}, {"born_alive_alive"=>2, "is_male"=>true, "weight_pounds"=>6.8122838958}, {"born_alive_alive"=>4, "is_male"=>false, "weight_pounds"=>6.9996768185}]
83
+
84
+ # query
85
+ client.query "SELECT born_alive_alive,is_male,weight_pounds FROM publicdata:samples.natality LIMIT 3"
86
+ #=> #<struct BigQuery::ResultSet job_id="job_wNWRgrTUJKIi-IUFf9bIqe1mpU8", column_names=["born_alive_alive", "is_male", "weight_pounds"], column_types=["INTEGER", "BOOLEAN", "FLOAT"], records=[["0", "true", "8.437090766739999"], ["2", "true", "6.8122838958"], ["4", "false", "6.9996768185"]]>
82
87
 
83
88
  # tables
84
89
  client.tables
85
90
  #=> ["your_table", "your_table2", "your_table3"]
86
91
 
87
- # datasets
88
- client.datasets
89
- #=> ["your_dataset", "your_dataset2"]
90
-
91
92
  # fetch schema
92
93
  client.fetch_schema("your_table")
93
94
  #=> [{"name"=>"nickname", "type"=>"STRING"}, {"name"=>"age", "type"=>"INTEGER"}]
94
95
 
95
96
  # delete table
96
97
  client.delete_table('your_table')
98
+ ```
99
+
100
+ ## Datasets API
101
+
102
+ ```ruby
103
+ # No need to specify `:dataset`
104
+ client = BigQuery::Client.new(
105
+ project: "your-project-42",
106
+ email: "1234567890@developer.gserviceaccount.com",
107
+ private_key_path: "/path/to/keyfile.p12",
108
+ private_key_passphrase: "notasecret",
109
+ auth_method: "private_key"
110
+ )
111
+
112
+ client.datasets
113
+ #=> ["your_dataset", "your_dataset2"]
97
114
 
98
115
  # create dataset
99
116
  client.create_dataset('your_dataset')
@@ -102,6 +119,21 @@ client.create_dataset('your_dataset')
102
119
  client.delete_dataset('your_dataset')
103
120
  ```
104
121
 
122
+ ## Projects API
123
+
124
+ ```ruby
125
+ # No need to specify `:project` and `:dataset`
126
+ client = BigQuery::Client.new(
127
+ email: "1234567890@developer.gserviceaccount.com",
128
+ private_key_path: "/path/to/keyfile.p12",
129
+ private_key_passphrase: "notasecret",
130
+ auth_method: "private_key"
131
+ )
132
+
133
+ client.projects
134
+ #=> ["your_project"]
135
+ ```
136
+
105
137
  ## TODO
106
138
 
107
139
  - [ ] Support all API methods
@@ -16,7 +16,7 @@ Gem::Specification.new do |spec|
16
16
  spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
17
17
  spec.require_paths = ['lib']
18
18
 
19
- spec.required_ruby_version = '>= 1.9.3'
19
+ spec.required_ruby_version = '>= 2.0.0'
20
20
 
21
21
  spec.add_runtime_dependency 'google-api-client', '~> 0.8.0'
22
22
 
@@ -0,0 +1,30 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require "bundler/setup"
4
+ require "bigquery-client"
5
+
6
+ require 'awesome_print'
7
+ require 'pry'
8
+ require 'pry-byebug'
9
+ require 'vcr'
10
+
11
+ require 'dotenv'
12
+ Dotenv.load
13
+
14
+ if defined?(PryByebug)
15
+ Pry.commands.alias_command 'c', 'continue'
16
+ Pry.commands.alias_command 's', 'step'
17
+ Pry.commands.alias_command 'n', 'next'
18
+ Pry.commands.alias_command 'f', 'finish'
19
+ end
20
+
21
+ client = BigQuery::Client.new(
22
+ project: ENV['BIGQUERY_PROJECT'],
23
+ email: ENV['BIGQUERY_EMAIL'],
24
+ private_key_path: ENV['BIGQUERY_PRIVATE_KEY_PATH'],
25
+ dataset: 'test_bigquery_client_default',
26
+ private_key_passphrase: 'notasecret',
27
+ auth_method: 'private_key'
28
+ )
29
+
30
+ Pry.start
@@ -1,3 +1,13 @@
1
+ require 'bigquery-client/attribute'
2
+ require 'bigquery-client/attribute/base'
3
+ require 'bigquery-client/attribute/boolean'
4
+ require 'bigquery-client/attribute/float'
5
+ require 'bigquery-client/attribute/integer'
6
+ require 'bigquery-client/attribute/string'
7
+ require 'bigquery-client/attribute/timestamp'
8
+ require 'bigquery-client/query_result'
9
+ require 'bigquery-client/run_query'
10
+ require 'bigquery-client/insert_rows'
1
11
  require 'bigquery-client/datasets'
2
12
  require 'bigquery-client/jobs'
3
13
  require 'bigquery-client/projects'
@@ -0,0 +1,15 @@
1
+ module BigQuery
2
+ module Attribute
3
+ class UnknownType < StandardError
4
+ end
5
+
6
+ def self.new(name: nil, type: nil, value: nil)
7
+ class_name = (type[0] || '').upcase + (type[1..-1] || '').downcase
8
+ if klass = BigQuery::Attribute.const_get(class_name)
9
+ klass.new(name, value)
10
+ else
11
+ fail UnknownType, "unknown type: #{type}"
12
+ end
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,15 @@
1
+ module BigQuery
2
+ module Attribute
3
+ class Base
4
+ attr_reader :name, :type, :value
5
+
6
+ def initialize(name, value)
7
+ @name, @value = name, value
8
+ @type = self.class.name.split('::').last.downcase.to_sym
9
+ end
10
+
11
+ def parse
12
+ end
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,13 @@
1
+ module BigQuery
2
+ module Attribute
3
+ class Boolean < Base
4
+ def parse
5
+ case @value
6
+ when nil then nil
7
+ when 'true' then true
8
+ when 'false' then false
9
+ end
10
+ end
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,10 @@
1
+ module BigQuery
2
+ module Attribute
3
+ class Float < Base
4
+ def parse
5
+ return nil if @value.nil?
6
+ @value.to_f
7
+ end
8
+ end
9
+ end
10
+ end
@@ -0,0 +1,10 @@
1
+ module BigQuery
2
+ module Attribute
3
+ class Integer < Base
4
+ def parse
5
+ return nil if @value.nil?
6
+ @value.to_i
7
+ end
8
+ end
9
+ end
10
+ end
@@ -0,0 +1,9 @@
1
+ module BigQuery
2
+ module Attribute
3
+ class String < Base
4
+ def parse
5
+ @value
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,10 @@
1
+ module BigQuery
2
+ module Attribute
3
+ class Timestamp < Base
4
+ def parse
5
+ return nil if @value.nil?
6
+ Time.at(@value.to_f)
7
+ end
8
+ end
9
+ end
10
+ end
@@ -12,12 +12,12 @@ module BigQuery
12
12
  include BigQuery::Tabledata
13
13
  include BigQuery::Tables
14
14
 
15
+ private
16
+
15
17
  def initialize(attributes = {})
16
18
  attributes.each { |name, value| instance_variable_set("@#{name}", value) }
17
19
  end
18
20
 
19
- private
20
-
21
21
  def access_api(params = {})
22
22
  params[:parameters] ||= {}
23
23
  params[:parameters][:projectId] ||= @project
@@ -0,0 +1,23 @@
1
+ module BigQuery
2
+ module Tabledata
3
+ class InsertRows
4
+ class InsertError < StandardError
5
+ end
6
+
7
+ def initialize(client, table, arg)
8
+ @client, @table = client, table
9
+ @rows = arg.is_a?(Array) ? arg : [arg]
10
+ end
11
+
12
+ def call
13
+ result = @client.insert_all(@table, @rows)
14
+ handle_errors(result['insertErrors']) if result['insertErrors']
15
+ result
16
+ end
17
+
18
+ def handle_errors(errors)
19
+ fail InsertError, errors
20
+ end
21
+ end
22
+ end
23
+ end
@@ -3,21 +3,15 @@
3
3
  module BigQuery
4
4
  module Jobs
5
5
  def sql(query, options = {})
6
- jobs_query_response = jobs_query(query, options)
7
- fields = jobs_query_response['schema']['fields']
8
- names = fields.map {|field| field['name'] }
9
- types = fields.map {|field| field['type'] }
10
- records = extract_records(jobs_query_response)
11
- job_id = jobs_query_response['jobReference']['jobId']
12
- page_token = jobs_query_response['pageToken']
6
+ query(query, options).to_a
7
+ end
13
8
 
14
- while page_token
15
- query_results_response = query_results(job_id, { pageToken: page_token }.merge(options))
16
- records += extract_records(query_results_response)
17
- page_token = query_results_response['pageToken']
18
- end
9
+ def find_all(query, options = {})
10
+ raise NotImplementedError
11
+ end
19
12
 
20
- convert(records, types).map { |values| [names, values].transpose.to_h }
13
+ def query(query, options = {})
14
+ RunQuery.new(self, query, options).call
21
15
  end
22
16
 
23
17
  def jobs_query(query, options = {})
@@ -59,26 +53,5 @@ module BigQuery
59
53
  parameters: { jobId: id }.merge(options)
60
54
  )
61
55
  end
62
-
63
- private
64
-
65
- def extract_records(response)
66
- (response['rows'] || []).map {|row| row['f'].map {|record| record['v'] } }
67
- end
68
-
69
- def convert(records, types)
70
- records.map do |values|
71
- values.map.with_index do |value, index|
72
- case types[index]
73
- when 'INTEGER'
74
- value.to_i
75
- when 'BOOLEAN'
76
- value == 'true'
77
- else
78
- value
79
- end
80
- end
81
- end
82
- end
83
56
  end
84
57
  end
@@ -0,0 +1,12 @@
1
+ module BigQuery
2
+ class ResultSet < Struct.new(:job_id, :column_names, :column_types, :records)
3
+ def to_a
4
+ records.map {|record|
5
+ values = record.map.with_index do |value, index|
6
+ Attribute.new(value: value, type: column_types[index]).parse
7
+ end
8
+ [column_names, values].transpose.to_h
9
+ }
10
+ end
11
+ end
12
+ end
@@ -0,0 +1,40 @@
1
+ module BigQuery
2
+ module Jobs
3
+ class RunQuery
4
+ def initialize(client, query, options)
5
+ @client, @query, @options = client, query, options
6
+ end
7
+
8
+ def call
9
+ @result = ResultSet.new
10
+ execute_query
11
+ fetch_pagenated_result
12
+ @result
13
+ end
14
+
15
+ private
16
+
17
+ def execute_query
18
+ response = @client.jobs_query(@query, @options)
19
+ @page_token = response['pageToken']
20
+ fields = response['schema']['fields']
21
+ @result.job_id = response['jobReference']['jobId']
22
+ @result.column_names = fields.map {|field| field['name'] }
23
+ @result.column_types = fields.map {|field| field['type'] }
24
+ @result.records = extract_records(response)
25
+ end
26
+
27
+ def fetch_pagenated_result
28
+ while @page_token
29
+ response = @client.query_results(@result.job_id, { pageToken: @page_token }.merge(@options))
30
+ @result.records += extract_records(response)
31
+ @page_token = response['pageToken']
32
+ end
33
+ end
34
+
35
+ def extract_records(response)
36
+ (response['rows'] || []).map {|row| row['f'].map {|record| record['v'] } }
37
+ end
38
+ end
39
+ end
40
+ end
@@ -3,7 +3,10 @@
3
3
  module BigQuery
4
4
  module Tabledata
5
5
  def insert(table, arg)
6
- rows = arg.is_a?(Array) ? arg : [arg]
6
+ InsertRows.new(self, table, arg).call
7
+ end
8
+
9
+ def insert_all(table, rows)
7
10
  access_api(
8
11
  api_method: bigquery.tabledata.insert_all,
9
12
  parameters: {
@@ -14,7 +17,6 @@ module BigQuery
14
17
  }
15
18
  )
16
19
  end
17
- alias_method :insert_all, :insert
18
20
 
19
21
  def list_tabledata(table)
20
22
  access_api(
@@ -1,5 +1,5 @@
1
1
  module BigQuery
2
2
  class Client
3
- VERSION = '0.3.4'
3
+ VERSION = '0.4.0'
4
4
  end
5
5
  end
@@ -5,6 +5,9 @@ require 'bigquery-client'
5
5
  require 'dotenv'
6
6
  require 'awesome_print'
7
7
  require 'pry-byebug'
8
+ require 'vcr'
9
+
10
+ Dotenv.load
8
11
 
9
12
  if defined?(PryByebug)
10
13
  Pry.commands.alias_command 'c', 'continue'
@@ -13,9 +16,13 @@ if defined?(PryByebug)
13
16
  Pry.commands.alias_command 'f', 'finish'
14
17
  end
15
18
 
16
- Dotenv.load
19
+ VCR.configure do |config|
20
+ config.cassette_library_dir = "vcr_cassettes"
21
+ config.hook_into :webmock
22
+ end
17
23
 
18
- $dataset = 'test_bigquery_client'
24
+ $prefix = 'test_bigquery_client'
25
+ $dataset = "#{$prefix}_default"
19
26
 
20
27
  $client = BigQuery::Client.new(
21
28
  project: ENV['BIGQUERY_PROJECT'],
@@ -26,10 +33,19 @@ $client = BigQuery::Client.new(
26
33
  auth_method: 'private_key'
27
34
  )
28
35
 
29
- $client.datasets.select { |dataset|
30
- Regexp.new($dataset) =~ dataset
31
- }.each { |dataset|
32
- $client.delete_dataset(dataset)
33
- }
36
+ VCR.use_cassette("helper") do
37
+ $client.datasets.each do |dataset|
38
+ $client.delete_dataset(dataset) if Regexp.new($prefix) =~ dataset
39
+ end
40
+ $client.create_dataset($dataset)
41
+ end
42
+
43
+ class ApiTest < Test::Unit::TestCase
44
+ def setup
45
+ VCR.insert_cassette("#{self.class}_#{@method_name}")
46
+ end
34
47
 
35
- $client.create_dataset($dataset)
48
+ def cleanup
49
+ VCR.eject_cassette
50
+ end
51
+ end
@@ -0,0 +1,66 @@
1
+ require 'helper'
2
+
3
+ class AttributeTest < Test::Unit::TestCase
4
+ def test_attribute
5
+ attribute = BigQuery::Attribute.new(name: 'is_admin', type: 'BOOLEAN', value: 'true')
6
+ assert { attribute.parse == true }
7
+ assert { attribute.name == 'is_admin' }
8
+ assert { attribute.value == 'true' }
9
+ assert { attribute.type == :boolean }
10
+ end
11
+
12
+ def test_string
13
+ attribute = BigQuery::Attribute::String.new(nil, 'foo')
14
+ assert { attribute.parse == 'foo' }
15
+ assert { attribute.type == :string }
16
+ end
17
+
18
+ def test_string_nil
19
+ attribute = BigQuery::Attribute::String.new(nil, nil)
20
+ assert { attribute.parse.nil? }
21
+ end
22
+
23
+ def test_integer
24
+ attribute = BigQuery::Attribute::Integer.new(nil, '42')
25
+ assert { attribute.parse == 42 }
26
+ assert { attribute.type == :integer }
27
+ end
28
+
29
+ def test_integer_nil
30
+ attribute = BigQuery::Attribute::Integer.new(nil, nil)
31
+ assert { attribute.parse.nil? }
32
+ end
33
+
34
+ def test_float
35
+ attribute = BigQuery::Attribute::Float.new(nil, '6.4992274837599995')
36
+ assert { attribute.parse == 6.4992274837599995 }
37
+ assert { attribute.type == :float }
38
+ end
39
+
40
+ def test_float_nil
41
+ attribute = BigQuery::Attribute::Float.new(nil, nil)
42
+ assert { attribute.parse.nil? }
43
+ end
44
+
45
+ def test_boolean
46
+ attribute = BigQuery::Attribute::Boolean.new(nil, 'false')
47
+ assert { attribute.parse == false }
48
+ assert { attribute.type == :boolean }
49
+ end
50
+
51
+ def test_boolean_nil
52
+ attribute = BigQuery::Attribute::Boolean.new(nil, nil)
53
+ assert { attribute.parse.nil? }
54
+ end
55
+
56
+ def test_timestamp
57
+ attribute = BigQuery::Attribute::Timestamp.new(nil, "-5.51952E7")
58
+ assert { attribute.parse == Time.new(1968, 4, 2, 13, 0, 0) }
59
+ assert { attribute.type == :timestamp }
60
+ end
61
+
62
+ def test_timestamp_nil
63
+ attribute = BigQuery::Attribute::Timestamp.new(nil, nil)
64
+ assert { attribute.parse.nil? }
65
+ end
66
+ end
@@ -1,6 +1,6 @@
1
1
  require 'helper'
2
2
 
3
- class ClientTest < Test::Unit::TestCase
3
+ class ClientTest < ApiTest
4
4
  def test_initialize
5
5
  actual = $client.instance_variables
6
6
  minimal = [:@project, :@dataset, :@email, :@private_key_path, :@private_key_passphrase, :@auth_method]
@@ -1,25 +1,31 @@
1
1
  require 'helper'
2
2
 
3
- class DatasetsTest < Test::Unit::TestCase
3
+ class DatasetsTest < ApiTest
4
4
  def test_datasets
5
- dataset_name = "#{$dataset}_test_datasets"
6
- $client.create_dataset(dataset_name)
7
- $client.datasets.include?(dataset_name)
5
+ dataset_name = "#{$prefix}_#{__method__}"
6
+ unless $client.datasets.include?(dataset_name)
7
+ $client.create_dataset(dataset_name)
8
+ end
9
+ assert { $client.datasets.include?(dataset_name) }
8
10
  end
9
11
 
10
12
  def test_create_dataset
11
- dataset_name = "#{$dataset}_create_dataset"
13
+ dataset_name = "#{$prefix}_#{__method__}"
14
+ if $client.datasets.include?(dataset_name)
15
+ $client.delete_dataset(dataset_name)
16
+ end
17
+ before = $client.datasets
12
18
  $client.create_dataset(dataset_name)
19
+ after = $client.datasets
20
+ assert { (after - before) == [dataset_name] }
13
21
  end
14
22
 
15
23
  def test_delete_dataset
16
- dataset_name = "#{$dataset}_delete_dataset"
24
+ dataset_name = "#{$prefix}_#{__method__}"
17
25
  $client.create_dataset(dataset_name)
18
26
  before = $client.datasets
19
27
  $client.delete_dataset(dataset_name)
20
28
  after = $client.datasets
21
- actual = before - after
22
- expected = [dataset_name]
23
- assert { actual == expected }
29
+ assert { (before - after) == [dataset_name] }
24
30
  end
25
31
  end
@@ -1,9 +1,9 @@
1
1
  require 'helper'
2
2
 
3
- class JobsTest < Test::Unit::TestCase
4
- @@normal_query = <<-"EOS"
3
+ class JobsTest < ApiTest
4
+ NORMAL_QUERY = <<-"EOS"
5
5
  SELECT
6
- born_alive_alive, mother_residence_state, is_male
6
+ born_alive_alive, mother_residence_state, is_male, weight_pounds
7
7
  FROM
8
8
  publicdata:samples.natality
9
9
  ORDER BY
@@ -12,41 +12,41 @@ class JobsTest < Test::Unit::TestCase
12
12
  100
13
13
  EOS
14
14
 
15
- @@no_rows_query = <<-"EOS"
16
- SELECT
17
- born_alive_alive, mother_residence_state, is_male
18
- FROM
19
- publicdata:samples.natality
20
- ORDER BY
21
- day
22
- LIMIT
23
- 0
24
- EOS
25
-
26
- @@huge_result_query = <<-"EOS"
27
- SELECT
28
- title
29
- FROM
30
- publicdata:samples.wikipedia
31
- LIMIT
32
- 1234567
33
- EOS
34
-
35
15
  def test_sql
36
- result = $client.sql(@@normal_query)
16
+ result = $client.sql NORMAL_QUERY
37
17
  assert { result.size == 100 }
38
18
  assert { result.sample["born_alive_alive"].is_a? Fixnum }
39
19
  assert { result.sample["mother_residence_state"].is_a? String }
20
+ assert { result.sample["weight_pounds"].is_a? Float }
40
21
  assert { result.first["is_male"] == true || result.first["is_male"] == false }
41
22
  end
42
23
 
24
+ def test_sql_timestamp_columns
25
+ table_name = 'test_timestamp'
26
+ $client.create_table(table_name, [{ name: 'time', type: 'timestamp' }])
27
+ $client.insert(table_name, time: "2015-10-13 20:05:43")
28
+ result = $client.sql('SELECT * FROM test_bigquery_client_default.test_timestamp LIMIT 1')
29
+ assert { result.last["time"] == Time.parse("2015-10-13 20:05:43 UTC") }
30
+ end
31
+
32
+ def test_query
33
+ result = $client.query NORMAL_QUERY
34
+ assert { result.column_names == %w(born_alive_alive mother_residence_state is_male weight_pounds) }
35
+ assert { result.column_types == %w(INTEGER STRING BOOLEAN FLOAT) }
36
+ records = result.records
37
+ assert { records.size == 100 }
38
+ assert { records.is_a?(Array) && records.sample.is_a?(Array) }
39
+ assert { records.sample.size == 4 }
40
+ assert { records.sample.all? {|value| value.is_a?(String) } }
41
+ end
42
+
43
43
  def test_sql_when_no_rows
44
- result = $client.sql(@@no_rows_query)
45
- assert { result == [] }
44
+ no_rows_query = 'SELECT * FROM publicdata:samples.natality LIMIT 0'
45
+ assert { $client.sql(no_rows_query) == [] }
46
46
  end
47
47
 
48
48
  def test_sql_pagination
49
- record_size = $client.sql(@@huge_result_query, maxResults: 10000).size
50
- assert { record_size == 1234567 }
49
+ pagination_query = 'SELECT title FROM publicdata:samples.wikipedia LIMIT 123'
50
+ assert { $client.sql(pagination_query, maxResults: 100).size == 123 }
51
51
  end
52
52
  end
@@ -1,15 +1,7 @@
1
1
  require 'helper'
2
2
 
3
- class ProjectsTest < Test::Unit::TestCase
3
+ class ProjectsTest < ApiTest
4
4
  def test_projects
5
- actual = $client.projects.first
6
- expected = ENV['BIGQUERY_PROJECT']
7
- assert { actual == expected }
8
- end
9
-
10
- def test_list_projects
11
- actual = $client.list_projects['projects'].first['id']
12
- expected = ENV['BIGQUERY_PROJECT']
13
- assert { actual == expected }
5
+ assert { $client.projects.include? ENV['BIGQUERY_PROJECT'] }
14
6
  end
15
7
  end
@@ -1,12 +1,20 @@
1
1
  require 'helper'
2
2
 
3
- class TablesTest < Test::Unit::TestCase
3
+ class TabledataTest < ApiTest
4
4
  def test_insert
5
5
  table_name = __method__.to_s
6
6
  schema = [{ name: 'bar', type: 'string' }]
7
7
  $client.create_table(table_name, schema)
8
- result = $client.insert(table_name, bar: "foo")
9
- assert { result['kind'] == 'bigquery#tableDataInsertAllResponse' }
8
+ assert { $client.insert(table_name, bar: "foo") == { 'kind' => 'bigquery#tableDataInsertAllResponse' } }
9
+ end
10
+
11
+ def test_insert_invalid_timestamp
12
+ table_name = __method__.to_s
13
+ schema = [{ name: 'time', type: 'timestamp' }]
14
+ $client.create_table(table_name, schema)
15
+ assert_raise(BigQuery::Tabledata::InsertRows::InsertError) do
16
+ $client.insert(table_name, time: "invalid_timestamp")
17
+ end
10
18
  end
11
19
 
12
20
  def test_insert_with_array
@@ -15,8 +23,7 @@ class TablesTest < Test::Unit::TestCase
15
23
  $client.create_table(table_name, schema)
16
24
  result = $client.list_tabledata(table_name)
17
25
  rows = [{ bar: "foo" }, { bar: "foo2" }, { bar: "foo3" }]
18
- result = $client.insert(table_name, rows)
19
- assert { result['kind'] == 'bigquery#tableDataInsertAllResponse' }
26
+ assert { $client.insert(table_name, rows) == { 'kind' => 'bigquery#tableDataInsertAllResponse' } }
20
27
  end
21
28
 
22
29
  def test_list_tabledata
@@ -1,6 +1,6 @@
1
1
  require 'helper'
2
2
 
3
- class TablesTest < Test::Unit::TestCase
3
+ class TablesTest < ApiTest
4
4
  def test_tables
5
5
  table_name = __method__.to_s
6
6
  schema = [{ name: 'bar', type: 'string' }]
@@ -8,22 +8,11 @@ class TablesTest < Test::Unit::TestCase
8
8
  assert { $client.tables.include?(table_name) }
9
9
  end
10
10
 
11
- def test_table_pagination
12
- $client.tables.each {|t| $client.delete_table(t) }
13
- 3.times do |i|
14
- table_name = __method__.to_s + "_#{i.to_s}"
15
- schema = [{ name: 'bar', type: 'string' }]
16
- $client.create_table(table_name, schema)
17
- end
18
- assert { $client.tables(maxResults: 1).count == 3 }
19
- end
20
-
21
11
  def test_fetch_schema
22
12
  table_name = __method__.to_s
23
13
  schema = [{ name: 'bar', type: 'string' }]
24
14
  $client.create_table(table_name, schema)
25
- result = $client.fetch_schema(table_name)
26
- assert { result == [{"name"=>"bar", "type"=>"STRING"}] }
15
+ assert { $client.fetch_schema(table_name) == [{"name"=>"bar", "type"=>"STRING"}] }
27
16
  end
28
17
 
29
18
  def test_create_table
@@ -31,10 +20,8 @@ class TablesTest < Test::Unit::TestCase
31
20
  schema = [{ name: 'bar', type: 'string' }]
32
21
  before = $client.tables
33
22
  $client.create_table(table_name, schema)
34
- after = $client.tables
35
- actual = after - before
36
- expected = [table_name]
37
- assert { expected == actual }
23
+ after = $client.tables
24
+ assert { (after - before) == [table_name] }
38
25
  end
39
26
 
40
27
  def test_delete_table
@@ -44,8 +31,16 @@ class TablesTest < Test::Unit::TestCase
44
31
  before = $client.tables
45
32
  $client.delete_table(table_name)
46
33
  after = $client.tables
47
- actual = before - after
48
- expected = [table_name]
49
- assert { actual == expected }
34
+ assert { (before - after) == [table_name] }
35
+ end
36
+
37
+ def test_table_pagination
38
+ $client.tables.each {|t| $client.delete_table(t) }
39
+ 3.times do |i|
40
+ table_name = __method__.to_s + "_#{i.to_s}"
41
+ schema = [{ name: 'bar', type: 'string' }]
42
+ $client.create_table(table_name, schema)
43
+ end
44
+ assert { $client.tables(maxResults: 1).count == 3 }
50
45
  end
51
46
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: bigquery-client
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.4
4
+ version: 0.4.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Tsukuru Tanimichi
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-06-10 00:00:00.000000000 Z
11
+ date: 2015-10-30 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: google-api-client
@@ -55,7 +55,8 @@ dependencies:
55
55
  description:
56
56
  email:
57
57
  - ttanimichi@hotmail.com
58
- executables: []
58
+ executables:
59
+ - console
59
60
  extensions: []
60
61
  extra_rdoc_files: []
61
62
  files:
@@ -66,16 +67,28 @@ files:
66
67
  - README.md
67
68
  - Rakefile
68
69
  - bigquery-client.gemspec
70
+ - bin/console
69
71
  - lib/bigquery-client.rb
72
+ - lib/bigquery-client/attribute.rb
73
+ - lib/bigquery-client/attribute/base.rb
74
+ - lib/bigquery-client/attribute/boolean.rb
75
+ - lib/bigquery-client/attribute/float.rb
76
+ - lib/bigquery-client/attribute/integer.rb
77
+ - lib/bigquery-client/attribute/string.rb
78
+ - lib/bigquery-client/attribute/timestamp.rb
70
79
  - lib/bigquery-client/client.rb
71
80
  - lib/bigquery-client/datasets.rb
72
81
  - lib/bigquery-client/errors.rb
82
+ - lib/bigquery-client/insert_rows.rb
73
83
  - lib/bigquery-client/jobs.rb
74
84
  - lib/bigquery-client/projects.rb
85
+ - lib/bigquery-client/query_result.rb
86
+ - lib/bigquery-client/run_query.rb
75
87
  - lib/bigquery-client/tabledata.rb
76
88
  - lib/bigquery-client/tables.rb
77
89
  - lib/bigquery-client/version.rb
78
90
  - test/helper.rb
91
+ - test/test_attribute.rb
79
92
  - test/test_client.rb
80
93
  - test/test_datasets.rb
81
94
  - test/test_jobs.rb
@@ -94,7 +107,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
94
107
  requirements:
95
108
  - - ">="
96
109
  - !ruby/object:Gem::Version
97
- version: 1.9.3
110
+ version: 2.0.0
98
111
  required_rubygems_version: !ruby/object:Gem::Requirement
99
112
  requirements:
100
113
  - - ">="
@@ -102,12 +115,13 @@ required_rubygems_version: !ruby/object:Gem::Requirement
102
115
  version: '0'
103
116
  requirements: []
104
117
  rubyforge_project:
105
- rubygems_version: 2.4.5
118
+ rubygems_version: 2.4.5.1
106
119
  signing_key:
107
120
  specification_version: 4
108
121
  summary: A Ruby interface to the BigQuery API.
109
122
  test_files:
110
123
  - test/helper.rb
124
+ - test/test_attribute.rb
111
125
  - test/test_client.rb
112
126
  - test/test_datasets.rb
113
127
  - test/test_jobs.rb