bigquery 0.8.3 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: f86dbbfbeb3a4a08ac71e39181408a2b526a09f8
4
- data.tar.gz: 6e28318195e7650bb8097db3aff0e54f2575d017
3
+ metadata.gz: 8e1bed7457e42ea19c9ac5d8c665969fe091032d
4
+ data.tar.gz: 8783d467491ae84d1079eddf269362d12929a9e2
5
5
  SHA512:
6
- metadata.gz: 05ca7432301a8f1b7b074dd93fd0daca5672b02d93771c50e466fb266b435417193a1f63c4be47af1bb9e6a06ea4dd6e61924d3799f9c250be73e4d5d8e84294
7
- data.tar.gz: 5e1e5f48a3308c5cbf8f08dcf9b93cc454b1036c75e26d15e3358c42143724e591e6c5f289c67611332d021d277326818cab427b6ff12db9b34e1bd9289a1e59
6
+ metadata.gz: 41310cad0e3ec7fdc3efc72ecbdd6cc4906a02d4da3dc5cd31c852dc5d81d10dd119b2f2f23e013e4d44b3fb2c2a7cac3db79b10245941d7bdf6b6138bbc539c
7
+ data.tar.gz: f4d80225dba14d8a518f628ab68f3d6f06a8964d52deda801346dcc6582063432f425b3a2e967fe77599bb62c1fe79d3ea3f46e192c1b727846da985421c1b7c
@@ -1,13 +1,13 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- bigquery (0.8.3)
4
+ bigquery (0.9.0)
5
5
  google-api-client (= 0.8.6)
6
6
 
7
7
  GEM
8
8
  remote: https://rubygems.org/
9
9
  specs:
10
- activesupport (4.2.3)
10
+ activesupport (4.2.5)
11
11
  i18n (~> 0.7)
12
12
  json (~> 1.7, >= 1.7.7)
13
13
  minitest (~> 5.1)
data/README.md CHANGED
@@ -14,6 +14,7 @@ Only service accounts are supported right now. https://developers.google.com/acc
14
14
 
15
15
  * query
16
16
  * tables
17
+ * datasets
17
18
  * load
18
19
  * tables_formatted
19
20
  * job
@@ -76,6 +77,24 @@ Describe table schema
76
77
 
77
78
  bq.describe_table('table_name')
78
79
 
80
+ ## Datasets
81
+
82
+ List datasets in dataset
83
+
84
+ bq.datasets
85
+
86
+ List dataset names
87
+
88
+ bq.datasets_formatted
89
+
90
+ Delete exiting dataset
91
+
92
+ bq.delete_dataset('test123')
93
+
94
+ Create dataset. First param is the dataset name
95
+
96
+ bq.create_dataset('test123')
97
+
79
98
  ## Querying
80
99
 
81
100
  bq.query("SELECT * FROM [#{config['dataset']}.table_name] LIMIT 1")
@@ -85,12 +104,28 @@ Describe table schema
85
104
  Insert a single row
86
105
 
87
106
  bq.insert('table_name', 'id' => 123, 'type' => 'Task')
88
-
107
+
89
108
  Batch insert an array of rows. [See bigquery docs for limitations.](https://cloud.google.com/bigquery/streaming-data-into-bigquery#quota)
90
109
 
91
110
  data = [{'id' => 123, 'type' => 'Foo'}, {'id' => 321, 'type' => 'Bar'}]
92
111
  bq.insert('table_name', data)
93
112
 
113
+ # Patching
114
+
115
+ Patching a exiting table
116
+
117
+ bq.patch_table('test', 'id' => 123, 'type' => 'Task', 'name' => 'Task1')
118
+
119
+ Tables: patch [See bigquery docs for details.](https://cloud.google.com/bigquery/docs/reference/v2/tables/patch)
120
+
121
+ # Updating
122
+
123
+ Updating a exiting table
124
+
125
+ bq.update_table('test', 'id' => 123, 'type' => 'Task', 'name' => 'Task1')
126
+
127
+ Tables: update [See bigquery docs for details.](https://cloud.google.com/bigquery/docs/reference/v2/tables/update)
128
+
94
129
  ## Keys
95
130
 
96
131
  To get the keys you need to have a:
@@ -2,6 +2,7 @@ require 'big_query/client/errors'
2
2
  require 'big_query/client/query'
3
3
  require 'big_query/client/jobs'
4
4
  require 'big_query/client/tables'
5
+ require 'big_query/client/datasets'
5
6
  require 'big_query/client/load'
6
7
 
7
8
  module BigQuery
@@ -10,6 +11,7 @@ module BigQuery
10
11
  include BigQuery::Client::Query
11
12
  include BigQuery::Client::Jobs
12
13
  include BigQuery::Client::Tables
14
+ include BigQuery::Client::Datasets
13
15
  include BigQuery::Client::Insert
14
16
 
15
17
  attr_accessor :dataset, :project_id
@@ -0,0 +1,52 @@
1
+ module BigQuery
2
+ class Client
3
+ module Datasets
4
+
5
+ # Lists the datasets
6
+ #
7
+ # @return [Hash] json api response
8
+ def datasets
9
+ response = api({
10
+ :api_method => @bq.datasets.list,
11
+ })
12
+
13
+ response['datasets'] || []
14
+ end
15
+
16
+ # Lists the datasets returnning only the tableId
17
+ #
18
+ # @return [Hash] json api response
19
+ def datasets_formatted
20
+ datasets.map { |t| t['datasetReference']['datasetId'] }
21
+ end
22
+
23
+ # Creating a new dataset
24
+ #
25
+ # @param datasetId [String] dataset id to insert into
26
+ # @return [Hash] json api response
27
+ #
28
+ # examples:
29
+ #
30
+ # @bq.create_dataset('new_dataset')
31
+ def create_dataset(datasetId)
32
+ api(
33
+ api_method: @bq.datasets.insert,
34
+ body_object: { "datasetReference" => {
35
+ "datasetId" => datasetId,
36
+ "projectId" => @project_id,
37
+ }
38
+ }
39
+ )
40
+ end
41
+
42
+ # Deletes the given datasetId
43
+ #
44
+ # @param datasetId [String] dataset id to insert into
45
+ def delete_dataset(datasetId)
46
+ api(api_method: @bq.datasets.delete,
47
+ parameters: { 'datasetId' => datasetId }
48
+ )
49
+ end
50
+ end
51
+ end
52
+ end
@@ -95,6 +95,60 @@ module BigQuery
95
95
  )
96
96
  end
97
97
 
98
+ # Patching a exsiting table
99
+ #
100
+ # @param tableId [String] table id to insert into
101
+ # @param schema [Hash] name => opts hash for the schema
102
+ #
103
+ # examples:
104
+ #
105
+ # @bq.patch_table('existing_table', id: { type: 'INTEGER', mode: 'required' }, price: { type: 'FLOAT' })
106
+ # It should be provide entire schema including the difference between the existing schema
107
+ # Otherwise 'BigQuery::Errors::BigQueryError: Provided Schema does not match Table' occur
108
+ def patch_table(tableId, schema={})
109
+ api(
110
+ api_method: @bq.tables.patch,
111
+ parameters: { 'tableId' => tableId,
112
+ 'datasetId' => @dataset },
113
+ body_object: { 'tableReference' => {
114
+ 'tableId' => tableId,
115
+ 'projectId' => @project_id,
116
+ 'datasetId' => @dataset
117
+ },
118
+ 'schema' => {
119
+ 'fields' => validate_schema(schema)
120
+ }
121
+ }
122
+ )
123
+ end
124
+
125
+ # Updating a exsiting table
126
+ #
127
+ # @param tableId [String] table id to insert into
128
+ # @param schema [Hash] name => opts hash for the schema
129
+ #
130
+ # examples:
131
+ #
132
+ # @bq.update_table('existing_table', id: { type: 'INTEGER', mode: 'required' }, price: { type: 'FLOAT' })
133
+ # It should be provide entire schema including the difference between the existing schema
134
+ # Otherwise 'BigQuery::Errors::BigQueryError: Provided Schema does not match Table' occur
135
+ def update_table(tableId, schema={})
136
+ api(
137
+ api_method: @bq.tables.update,
138
+ parameters: { 'tableId' => tableId,
139
+ 'datasetId' => @dataset },
140
+ body_object: { 'tableReference' => {
141
+ 'tableId' => tableId,
142
+ 'projectId' => @project_id,
143
+ 'datasetId' => @dataset
144
+ },
145
+ 'schema' => {
146
+ 'fields' => validate_schema(schema)
147
+ }
148
+ }
149
+ )
150
+ end
151
+
98
152
  # Describe the schema of the given tableId
99
153
  #
100
154
  # @param tableId [String] table id to describe
@@ -1,3 +1,3 @@
1
1
  module BigQuery
2
- VERSION = '0.8.3'
2
+ VERSION = '0.9.0'
3
3
  end
@@ -98,6 +98,57 @@ class BigQueryTest < MiniTest::Unit::TestCase
98
98
  refute_includes tables, 'test123'
99
99
  end
100
100
 
101
+ def test_for_patch_table
102
+ schema = {
103
+ id: { type: 'INTEGER', mode: 'REQUIRED' },
104
+ type: { type: 'STRING', mode: 'NULLABLE' },
105
+ date: { type: 'TIMESTAMP' },
106
+ city: {
107
+ name: 'city',
108
+ type: 'RECORD',
109
+ mode: 'nullable',
110
+ fields: {
111
+ id: { name: 'id', type: 'INTEGER' }
112
+ }
113
+ }
114
+ }
115
+
116
+ result = @bq.patch_table('test', schema)
117
+
118
+ assert_equal result['kind'], "bigquery#table"
119
+ assert_equal result['tableReference']['tableId'], "test"
120
+ assert_equal result['schema']['fields'], [
121
+ { 'name' => 'id', 'type' => 'INTEGER', 'mode' => 'REQUIRED' },
122
+ { 'name' => 'type', 'type' => 'STRING', 'mode' => 'NULLABLE' },
123
+ { 'name' => 'date', 'type' => 'TIMESTAMP' },
124
+ {
125
+ 'name' => 'city',
126
+ 'type' => 'RECORD',
127
+ 'fields' => [
128
+ { 'name' => 'id', 'type' => 'INTEGER' },
129
+ ]
130
+ }
131
+ ]
132
+ end
133
+
134
+ def test_for_update_table
135
+ schema = {
136
+ id: { type: 'INTEGER', mode: 'REQUIRED' },
137
+ type: { type: 'STRING', mode: 'NULLABLE' },
138
+ name: { type: 'STRING' }
139
+ }
140
+
141
+ result = @bq.update_table('test', schema)
142
+
143
+ assert_equal result['kind'], "bigquery#table"
144
+ assert_equal result['tableReference']['tableId'], "test"
145
+ assert_equal result['schema']['fields'], [
146
+ { 'name' => 'id', 'type' => 'INTEGER', 'mode' => 'REQUIRED' },
147
+ { 'name' => 'type', 'type' => 'STRING', 'mode' => 'NULLABLE' },
148
+ { 'name' => 'name', 'type' => 'STRING' }
149
+ ]
150
+ end
151
+
101
152
  def test_for_describe_table
102
153
  result = @bq.describe_table('test')
103
154
 
@@ -155,4 +206,40 @@ class BigQueryTest < MiniTest::Unit::TestCase
155
206
 
156
207
  assert_equal result['kind'], "bigquery#job"
157
208
  end
209
+
210
+ def test_for_datasets
211
+ dataset = @bq.datasets.select{|t| t['id'] == "#{config['project_id']}:#{config['dataset']}"}.first
212
+
213
+ assert_equal dataset['kind'], "bigquery#dataset"
214
+ assert_equal dataset['datasetReference']['datasetId'], config['dataset']
215
+ end
216
+
217
+ def test_for_datasets_formatted
218
+ result = @bq.datasets_formatted
219
+
220
+ assert_includes result, config['dataset']
221
+ end
222
+
223
+ def test_for_create_datasets
224
+ if @bq.datasets_formatted.include? 'test123'
225
+ @bq.delete_dataset('test123')
226
+ end
227
+
228
+ result = @bq.create_dataset('test123')
229
+
230
+ assert_equal result['kind'], "bigquery#dataset"
231
+ assert_equal result['datasetReference']['datasetId'], 'test123'
232
+ end
233
+
234
+ def test_for_delete_datasets
235
+ if !@bq.datasets_formatted.include? 'test123'
236
+ @bq.create_dataset('test123')
237
+ end
238
+
239
+ result = @bq.delete_dataset('test123')
240
+
241
+ datasets = @bq.datasets_formatted
242
+
243
+ refute_includes datasets, 'test123'
244
+ end
158
245
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: bigquery
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.8.3
4
+ version: 0.9.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Adam Bronte
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2015-07-14 00:00:00.000000000 Z
12
+ date: 2016-01-25 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: google-api-client
@@ -100,6 +100,7 @@ files:
100
100
  - bigquery.gemspec
101
101
  - lib/big_query.rb
102
102
  - lib/big_query/client.rb
103
+ - lib/big_query/client/datasets.rb
103
104
  - lib/big_query/client/errors.rb
104
105
  - lib/big_query/client/jobs.rb
105
106
  - lib/big_query/client/load.rb