big_shift 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/README.md +127 -0
- data/lib/big_shift/commands/base_command.rb +63 -0
- data/lib/big_shift/commands/base_table_command.rb +13 -0
- data/lib/big_shift/commands/create_table_command.rb +26 -0
- data/lib/big_shift/commands/get_access_token_command.rb +46 -0
- data/lib/big_shift/commands/insert_rows_command.rb +27 -0
- data/lib/big_shift/core.rb +13 -0
- data/lib/big_shift/models/access_token.rb +15 -0
- data/lib/big_shift/models/base_model.rb +7 -0
- data/lib/big_shift/models/schema.rb +21 -0
- data/lib/big_shift/models/table_field.rb +15 -0
- data/lib/big_shift/responses/create_table_response.rb +4 -0
- data/lib/big_shift/responses/get_access_token_response.rb +7 -0
- data/lib/big_shift/responses/insert_rows_response.rb +9 -0
- data/lib/big_shift/services/access_token_service.rb +14 -0
- data/lib/big_shift.rb +30 -0
- data/license/gplv3.md +650 -0
- data/license/lgplv3.md +171 -0
- data/license/lgplv3.png +0 -0
- metadata +193 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 8dc1bd209d69121511dbf0b3efffceb8c38c45e9
|
4
|
+
data.tar.gz: f05ef9279b88a0e8d630005a7ee541bd53e57d09
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: b132ceb754777e66bd3a824a230014a3f0eae8047f506d73796549b904adb15f700ab474a2f181bc08cc436c9da2f8d67920c28a67576716d7b69014671f37df
|
7
|
+
data.tar.gz: 943089b7da61fd19178b5273e7fe71f73eccd6cef09301a880ad5a063a47b3ab890d5094c49af93455b6331ec7c4a9c8e80c7dc1366b3e73c70f8fb3888a8cf3
|
data/README.md
ADDED
@@ -0,0 +1,127 @@
|
|
1
|
+
BigShift
|
2
|
+
======
|
3
|
+
|
4
|
+
BigShift uploads your data to BigQuery.
|
5
|
+
|
6
|
+
|
7
|
+
Setup
|
8
|
+
-----
|
9
|
+
|
10
|
+
$ bundle install
|
11
|
+
|
12
|
+
|
13
|
+
Run Specs
|
14
|
+
---------
|
15
|
+
|
16
|
+
$ bundle exec rake
|
17
|
+
|
18
|
+
### Regenerating VCR Cassettes
|
19
|
+
|
20
|
+
#### BigQuery Workspace
|
21
|
+
|
22
|
+
A BigQuery dataset is necessary to regenerate cassettes.
|
23
|
+
The dataset may be named anything,
|
24
|
+
but a table named `vcr_insert_table` is required
|
25
|
+
and must have a field named `field` of type `string`.
|
26
|
+
|
27
|
+
If the cassettes have been regenerated against an existing dataset,
|
28
|
+
the table named `vcr_create_table` will need to be deleted
|
29
|
+
prior to generating vcr cassettes again.
|
30
|
+
|
31
|
+
|
32
|
+
#### Environment Variables
|
33
|
+
|
34
|
+
In order to regenerate vcr cassettes,
|
35
|
+
some envrionment variables are required
|
36
|
+
and should be added to a `.env` file in the root of the project:
|
37
|
+
|
38
|
+
```
|
39
|
+
BIG_SHIFT_PROJECT_ID = [project-id]
|
40
|
+
BIG_SHIFT_DATASET_ID = [dataset-id]
|
41
|
+
BIG_SHIFT_REFRESH_TOKEN = [refresh-token]
|
42
|
+
BIG_SHIFT_GOOGLE_CLIENT_ID = [google-client-id]
|
43
|
+
BIG_SHIFT_GOOGLE_CLIENT_SECRET = [google-client-secret]
|
44
|
+
```
|
45
|
+
|
46
|
+
|
47
|
+
#### Find AwesomenessTv Variables
|
48
|
+
|
49
|
+
##### Project ID, Dataset ID
|
50
|
+
|
51
|
+
Look on the BigQuery dashboard or talk to a team member.
|
52
|
+
|
53
|
+
|
54
|
+
##### Client ID and Client Secret
|
55
|
+
|
56
|
+
1. Go to [the Google API Console][google-api-console].
|
57
|
+
2. Log in as `awesomenesstv.dev@gmail.com` (see a team member for the password).
|
58
|
+
3. Select the `Business Intelligence` project (if not already there).
|
59
|
+
4. Click on `Credentials` on the left nav (under `APIs and auth`).
|
60
|
+
5. Client ID and Client Secret are listed under `Client ID for web application`.
|
61
|
+
|
62
|
+
|
63
|
+
##### Refresh Token (requires Client ID & Client Secret)
|
64
|
+
|
65
|
+
1. Sign in to Google as `awesomenesstv.dev@gmail.com`.
|
66
|
+
1. Go to [the OAuth playground][oauth-playground].
|
67
|
+
2. Select the BigQuery APIs (`bigquery` and `bigquery.insertdata` scopes).
|
68
|
+
3. Use the gear/options dropdown to set the Client ID and Client Secret.
|
69
|
+
4. Click on `Authorize APIs`.
|
70
|
+
5. Look for the `Refresh token` value that is generated.
|
71
|
+
|
72
|
+
|
73
|
+
Usage
|
74
|
+
-----
|
75
|
+
|
76
|
+
All environment variables listed in the section on regenerating vcr cassettes
|
77
|
+
must be set by any consumer of this gem.
|
78
|
+
|
79
|
+
|
80
|
+
Interface
|
81
|
+
---------
|
82
|
+
|
83
|
+
All public endpoints are exposed in BigShift::Core.
|
84
|
+
|
85
|
+
Every response from the public API is wrapped in a `Response` object
|
86
|
+
that will always have the same interface regardless of request.
|
87
|
+
The `Response#data` attribute will be an object specific to the data requested.
|
88
|
+
|
89
|
+
|
90
|
+
### create_table
|
91
|
+
|
92
|
+
This method requires only a schema to be specified:
|
93
|
+
|
94
|
+
```
|
95
|
+
schema = BigShift::Schema.new('MyTable')
|
96
|
+
.add_field('field1', :string)
|
97
|
+
.add_field('field2', :integer)
|
98
|
+
.add_field('field3', :boolean)
|
99
|
+
|
100
|
+
BigShift.create_table schema
|
101
|
+
```
|
102
|
+
|
103
|
+
|
104
|
+
### insert_rows
|
105
|
+
|
106
|
+
This method requires a table name and an array of objects to insert.
|
107
|
+
|
108
|
+
All objects are expected to respond to `to_json`.
|
109
|
+
|
110
|
+
```
|
111
|
+
rows = [{
|
112
|
+
:field1 => 'field1-value-1',
|
113
|
+
:field2 => 'field2-value-1',
|
114
|
+
}, {
|
115
|
+
:field1 => 'field1-value-2',
|
116
|
+
:field2 => 'field2-value-2',
|
117
|
+
}, {
|
118
|
+
:field1 => 'field1-value-3',
|
119
|
+
:field2 => 'field2-value-3',
|
120
|
+
}]
|
121
|
+
|
122
|
+
BigShift.insert_rows 'MyTable', rows
|
123
|
+
```
|
124
|
+
|
125
|
+
|
126
|
+
[google-api-console]: https://code.google.com/apis/console
|
127
|
+
[oauth-playground]: https://developers.google.com/oauthplayground
|
@@ -0,0 +1,63 @@
|
|
1
|
+
module BigShift
|
2
|
+
class BaseCommand
|
3
|
+
class << self
|
4
|
+
private
|
5
|
+
|
6
|
+
def connection
|
7
|
+
Faraday.new(:url => base_url)
|
8
|
+
end
|
9
|
+
|
10
|
+
def post
|
11
|
+
connection.post url, request_body, headers
|
12
|
+
end
|
13
|
+
|
14
|
+
def headers
|
15
|
+
{
|
16
|
+
'Authorization' => "Bearer #{access_token}",
|
17
|
+
'Content-Type' => 'application/json',
|
18
|
+
}
|
19
|
+
end
|
20
|
+
|
21
|
+
def params
|
22
|
+
url_params.merge({
|
23
|
+
:key => access_token,
|
24
|
+
})
|
25
|
+
end
|
26
|
+
|
27
|
+
def request_body
|
28
|
+
body.to_json
|
29
|
+
end
|
30
|
+
|
31
|
+
def base_url
|
32
|
+
'https://www.googleapis.com/bigquery/v2/projects/%s/datasets/%s' % [
|
33
|
+
project_id,
|
34
|
+
dataset_id,
|
35
|
+
]
|
36
|
+
end
|
37
|
+
|
38
|
+
def url
|
39
|
+
connection.build_url endpoint, params
|
40
|
+
end
|
41
|
+
|
42
|
+
def url_params
|
43
|
+
{}
|
44
|
+
end
|
45
|
+
|
46
|
+
def body
|
47
|
+
{}
|
48
|
+
end
|
49
|
+
|
50
|
+
def project_id
|
51
|
+
ENV['BIG_SHIFT_PROJECT_ID']
|
52
|
+
end
|
53
|
+
|
54
|
+
def dataset_id
|
55
|
+
ENV['BIG_SHIFT_DATASET_ID']
|
56
|
+
end
|
57
|
+
|
58
|
+
def access_token
|
59
|
+
AccessTokenService.retrieve_token
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
module BigShift
|
2
|
+
class CreateTableCommand < BaseCommand
|
3
|
+
class << self
|
4
|
+
def execute(schema)
|
5
|
+
@schema = schema
|
6
|
+
CreateTableResponse.new post
|
7
|
+
end
|
8
|
+
|
9
|
+
def endpoint
|
10
|
+
'tables'
|
11
|
+
end
|
12
|
+
|
13
|
+
def body
|
14
|
+
{
|
15
|
+
'tableReference' => {
|
16
|
+
'projectId' => project_id,
|
17
|
+
'datasetId' => dataset_id,
|
18
|
+
'tableId' => @schema.table_name,
|
19
|
+
},
|
20
|
+
|
21
|
+
'schema' => @schema,
|
22
|
+
}
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,46 @@
|
|
1
|
+
module BigShift
|
2
|
+
class GetAccessTokenCommand < BaseCommand
|
3
|
+
class << self
|
4
|
+
def execute
|
5
|
+
GetAccessTokenResponse.new post
|
6
|
+
end
|
7
|
+
|
8
|
+
def headers
|
9
|
+
{
|
10
|
+
'Content-Type' => 'application/json',
|
11
|
+
}
|
12
|
+
end
|
13
|
+
|
14
|
+
def params
|
15
|
+
{
|
16
|
+
:client_id => client_id,
|
17
|
+
:client_secret => client_secret,
|
18
|
+
:refresh_token => refresh_token,
|
19
|
+
:grant_type => 'refresh_token',
|
20
|
+
}
|
21
|
+
end
|
22
|
+
|
23
|
+
def endpoint
|
24
|
+
'token'
|
25
|
+
end
|
26
|
+
|
27
|
+
private
|
28
|
+
|
29
|
+
def client_id
|
30
|
+
ENV['BIG_SHIFT_GOOGLE_CLIENT_ID']
|
31
|
+
end
|
32
|
+
|
33
|
+
def client_secret
|
34
|
+
ENV['BIG_SHIFT_GOOGLE_CLIENT_SECRET']
|
35
|
+
end
|
36
|
+
|
37
|
+
def refresh_token
|
38
|
+
ENV['BIG_SHIFT_REFRESH_TOKEN']
|
39
|
+
end
|
40
|
+
|
41
|
+
def base_url
|
42
|
+
'https://www.googleapis.com/oauth2/v3'
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
module BigShift
|
2
|
+
class InsertRowsCommand < BaseTableCommand
|
3
|
+
class << self
|
4
|
+
def execute(table_name, rows)
|
5
|
+
@rows = rows
|
6
|
+
self.table_id = table_name
|
7
|
+
InsertRowsResponse.new post
|
8
|
+
end
|
9
|
+
|
10
|
+
def endpoint
|
11
|
+
'insertAll'
|
12
|
+
end
|
13
|
+
|
14
|
+
def body
|
15
|
+
{ :rows => build_rows }
|
16
|
+
end
|
17
|
+
|
18
|
+
private
|
19
|
+
|
20
|
+
def build_rows
|
21
|
+
@rows.map do |row|
|
22
|
+
{:json => row}
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
module BigShift
|
2
|
+
class AccessToken
|
3
|
+
attr_accessor :access_token, :expires
|
4
|
+
|
5
|
+
def initialize(hash = {})
|
6
|
+
@access_token = hash['access_token']
|
7
|
+
@expires = hash['expires_in']
|
8
|
+
@created = Time.now
|
9
|
+
end
|
10
|
+
|
11
|
+
def expired?
|
12
|
+
Time.now - @created > (expires - 10)
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
module BigShift
|
2
|
+
class Schema < BaseModel
|
3
|
+
attr_reader :table_name
|
4
|
+
|
5
|
+
def initialize(table_name)
|
6
|
+
@table_name = table_name
|
7
|
+
@fields = []
|
8
|
+
end
|
9
|
+
|
10
|
+
def add_field(field_name, field_type)
|
11
|
+
@fields << TableField.new(field_name, field_type)
|
12
|
+
self
|
13
|
+
end
|
14
|
+
|
15
|
+
def as_json(json_state = nil)
|
16
|
+
{
|
17
|
+
'fields' => @fields,
|
18
|
+
}
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
module BigShift
|
2
|
+
class TableField < BaseModel
|
3
|
+
def initialize(field_name, field_type)
|
4
|
+
@field_name = field_name
|
5
|
+
@field_type = field_type
|
6
|
+
end
|
7
|
+
|
8
|
+
def as_json(json_state = nil)
|
9
|
+
{
|
10
|
+
:name => @field_name,
|
11
|
+
:type => @field_type,
|
12
|
+
}
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
data/lib/big_shift.rb
ADDED
@@ -0,0 +1,30 @@
|
|
1
|
+
require 'json'
|
2
|
+
|
3
|
+
require 'faraday'
|
4
|
+
require 'reverb'
|
5
|
+
|
6
|
+
module BigShift
|
7
|
+
end
|
8
|
+
|
9
|
+
require_relative 'big_shift/models/base_model'
|
10
|
+
require_relative 'big_shift/models/access_token'
|
11
|
+
require_relative 'big_shift/models/table_field'
|
12
|
+
require_relative 'big_shift/models/schema'
|
13
|
+
|
14
|
+
require_relative 'big_shift/responses/get_access_token_response'
|
15
|
+
require_relative 'big_shift/responses/create_table_response'
|
16
|
+
require_relative 'big_shift/responses/insert_rows_response'
|
17
|
+
|
18
|
+
require_relative 'big_shift/commands/base_command'
|
19
|
+
require_relative 'big_shift/commands/base_table_command'
|
20
|
+
require_relative 'big_shift/commands/create_table_command'
|
21
|
+
require_relative 'big_shift/commands/get_access_token_command'
|
22
|
+
require_relative 'big_shift/commands/insert_rows_command'
|
23
|
+
|
24
|
+
require_relative 'big_shift/services/access_token_service'
|
25
|
+
|
26
|
+
require_relative 'big_shift/core'
|
27
|
+
|
28
|
+
module BigShift
|
29
|
+
extend Core
|
30
|
+
end
|